query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| negatives
listlengths 19
20
| metadata
dict |
---|---|---|---|
pylag.mlfit.MLCrossSpectrum.process_fit_results(fit_result, params) Process a scipy.optimise fit result to calculate the bestfitting cross spectrum, lag spectrum and errors from the model.
|
def process_fit_results(self, fit_result, params):
hess = fit_result.hess_inv(fit_result.x) if callable(fit_result.hess_inv) else np.diag(fit_result.hess_inv)
self.cpsd = self.get_cpsd()
if self.cpsd_model is None:
self.cpsd_error = hess[:len(self.fbins)] ** 0.5
else:
return NotImplemented
# # calculate the error on each PSD point from the error on each parameter
# psd_deriv = self.model.eval_gradient(params, self.fbins.bin_cent)
# self.psd_error = np.sum([e * psd_deriv[..., i] for i, e in enumerate(self.param_error)], axis=0) / self.psd
# if np.any(np.isnan(self.psd_error)):
# self.psd_error = None
self.lag = self.get_lag()
if self.cpsd_model is None:
self.lag_error = hess[len(self.fbins):] ** 0.5 / (2. * np.pi * self.fbins.bin_cent)
else:
return NotImplemented
|
[
"def process_fit_results(self, fit_result, params):\n self.psd = self.get_psd()\n if self.model is None:\n self.psd_error = self.param_error\n else:\n # calculate the error on each PSD point from the error on each parameter\n psd_deriv = self.model.eval_gradient(params, self.fbins.bin_cent)\n self.psd_error = np.sum([e * psd_deriv[..., i] for i, e in enumerate(self.param_error)], axis=0) / self.psd\n if np.any(np.isnan(self.psd_error)):\n self.psd_error = None",
"def fit_report(fit_result, modelpars=None, show_correl=True, min_correl=0.1,\n sort_pars=True, _larch=None, **kws):\n result = getattr(fit_result, 'fit_details', fit_result)\n if isinstance(result, MinimizerResult):\n return lmfit.fit_report(result, modelpars=modelpars,\n show_correl=show_correl,\n min_correl=min_correl, sort_pars=sort_pars)\n elif isinstance(result, ModelResult):\n return result.fit_report(modelpars=modelpars,\n show_correl=show_correl,\n min_correl=min_correl, sort_pars=sort_pars)\n else:\n result = getattr(fit_result, 'params', fit_result)\n if isinstance(result, Parameters):\n return lmfit.fit_report(result, modelpars=modelpars,\n show_correl=show_correl,\n min_correl=min_correl, sort_pars=sort_pars)\n else:\n try:\n result = group2params(fit_result, _larch=_larch)\n return lmfit.fit_report(result, modelpars=modelpars,\n show_correl=show_correl,\n min_correl=min_correl, sort_pars=sort_pars)\n except (ValueError, AttributeError):\n pass\n return \"Cannot make fit report with %s\" % repr(fit_result)",
"def from_fit(result):\n params = result.params\n return {name : mp.gummy(param.value,param.stderr) for name,param in params.items()}",
"def get_fit_components(fit_result: lmfit.model.ModelResult) -> list[ProfileComponent]:\n # collect parameters for each component\n component_parameters: dict[str, dict[str, float]] = defaultdict(dict)\n for name, val in fit_result.params.valuesdict().items():\n # name contains component and parameter name\n cname, pname = name.split(\"_\", maxsplit=1)\n component_parameters[cname][pname] = val\n\n components = [\n ProfileComponent(\n name,\n height=params[\"height\"],\n sigma=params[\"sigma\"],\n fwhm=params[\"fwhm\"],\n center=params[\"center\"],\n )\n for name, params in component_parameters.items()\n ]\n\n return components",
"def predefined_cross_validation(self, param_grid, fit_params, folds=None, n_jobs=-1):\n if self.is_random_search:\n # If it is random search, creates 6 random combinations of\n # the parameters grid/distribution for each fold\n paramGrid = ParameterSampler(param_grid, 6)\n else:\n # Regular GridSearch, obtains a combination of all possible parameters\n paramGrid = ParameterGrid(param_grid)\n print(self.estimator)\n\n # Find optimal threshold\n if self.estimator.algorithm_name == 'modSAR':\n internal_samples_sim = self.data_split.get_sim_matrix_internal_samples(self.split_number)\n _, threshold = GraphUtils.find_optimal_threshold(internal_samples_sim)\n\n fit_params['threshold'] = threshold\n\n \"\"\" Creats parallel tasks for the cross-validation.\n This is the same function used in the source code of GridSearchCV in sklearn.\n Parallel function will take care of all for loops defined here and will correctly\n allocate more computational resources when each for loop complete.\n Each for loop runs the function _fit_and_score defined above \"\"\"\n cross_validation_results = \\\n Parallel(n_jobs=n_jobs, verbose=True, pre_dispatch='n_jobs') \\\n (delayed(self._fit_and_score)(deepcopy(self.estimator), fold, params, fit_params)\n for fold in range(1, self.n_splits + 1) if folds is None or (folds is not None and fold in folds)\n for params in paramGrid)\n\n # After cross-validation, gather results and picks best model\n (results, cv_models) = zip(*cross_validation_results)\n results = pd.concat(results, ignore_index=True)\n\n bestFold = results[\"test_mae\"].idxmin()\n # Shows parameters of the best fold\n print(\"Metrics for best model in cross-validation:\")\n print(results.iloc[bestFold])\n best_model = cv_models[bestFold]\n\n # External Validation\n external_X = self.data_split.get_external_samples(self.split_number)\n external_y = self.data_split.get_external_Y(self.split_number)\n\n if self.estimator.algorithm_name == \"modSAR\":\n id_external_samples = self.data_split.get_id_external_samples(self.split_number)\n externalX_smiles = self.data_split.qsar_dataset.X_smiles.loc[id_external_samples]\n\n pred = best_model.predict(external_X, externalX_smiles)\n else:\n pred = best_model.predict(external_X)\n\n mae_external = mean_absolute_error(external_y, pred)\n rmse_external = mean_squared_error(external_y, pred) ** 0.5\n\n if best_model.algorithm_name in [\"OplraRegularised\", \"OplraFeatureSelection\"]:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': results.iloc[bestFold]['beta'],\n 'lambda': results.iloc[bestFold]['lambda'],\n 'no_regions': results.iloc[bestFold]['no_regions'],\n 'no_features': results.iloc[bestFold]['no_features']},\n index=np.arange(1))\n elif best_model.algorithm_name in [\"OplraEnsemble\"]:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': results.iloc[bestFold]['beta'],\n 'lambda': results.iloc[bestFold]['lambda'],\n 'no_repeats': results.iloc[bestFold]['no_repeats'],\n 'resampling': results.iloc[bestFold]['resampling'],\n 'avg_no_regions': results.iloc[bestFold]['avg_no_regions'],\n 'no_features': results.iloc[bestFold]['no_features']},\n index=np.arange(1))\n elif best_model.algorithm_name in [\"modSAR\"]:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'no_modules': results.iloc[bestFold]['no_modules'],\n 'no_classes': results.iloc[bestFold]['no_classes'],\n 'threshold': results.iloc[bestFold]['threshold'],\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': results.iloc[bestFold]['beta'],\n 'lambda': results.iloc[bestFold]['lambda']},\n index=np.arange(1))\n else:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'no_modules': None,\n 'no_classes': None,\n 'threshold': None,\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': None,\n 'lambda': None},\n index=np.arange(1))\n\n results = pd.concat([results, external_results], ignore_index=True)\n\n return results, best_model",
"def multiple_fits(self):\n self.subtract_background()\n k = 1\n for key in self.fit_names:\n #get params for this fit\n #with new lmfit might not need to do this\n self.params = copy.deepcopy(self.all_params[key])\n\n results = minimize(self.fit_dict[self.fit_type], self.params,\n args = ())\n self.params = results.params\n\n #then if k > num_fits copy result values to params dictionary and fit\n if k < self.num_fits:\n #update parameters\n next_key = self.fit_names[k]\n for i in self.all_params[next_key].keys():\n self.all_params[next_key][i].value = self.params[i].value\n\n #move to next iteration\n k = k + 1\n\n self.fit_results = results",
"def _process_scipy_result(res: OptimizeResult, options: Dict[str, Any]) -> None:\n if \"success\" not in res.keys() or \"status\" not in res.keys():\n with warnings.catch_warnings():\n warnings.simplefilter(\"always\", category=OptimizationWarning)\n warnings.warn(\n \"Optimization failed within `scipy.optimize.minimize` with no \"\n \"status returned to `res.`\",\n OptimizationWarning,\n )\n elif not res.success:\n if (\n \"ITERATIONS REACHED LIMIT\" in res.message\n or \"Iteration limit reached\" in res.message\n ):\n logger.info(\n \"`scipy.minimize` exited by reaching the iteration limit of \"\n f\"`maxiter: {options.get('maxiter')}`.\"\n )\n elif \"EVALUATIONS EXCEEDS LIMIT\" in res.message:\n logger.info(\n \"`scipy.minimize` exited by reaching the function evaluation limit of \"\n f\"`maxfun: {options.get('maxfun')}`.\"\n )\n elif \"Optimization timed out after\" in res.message:\n logger.info(res.message)\n else:\n with warnings.catch_warnings():\n warnings.simplefilter(\"always\", category=OptimizationWarning)\n warnings.warn(\n f\"Optimization failed within `scipy.optimize.minimize` with status \"\n f\"{res.status} and message {res.message}.\",\n OptimizationWarning,\n )",
"def rearrange_lmfit_2obj(result):\n arr = np.array([result.params['flux_a'].value,result.params['hlr_a'].value,result.params['e1_a'].value,result.params['e2_a'].value,result.params['x0_a'].value,result.params['y0_a'].value,\n result.params['flux_b'].value,result.params['hlr_b'].value,result.params['e1_b'].value,result.params['e2_b'].value,result.params['x0_b'].value,result.params['y0_b'].value])\n arr = pd.Series(arr,index=['flux_a','hlr_a','e1_a','e2_a','x0_a','y0_a',\n 'flux_b','hlr_b','e1_b','e2_b','x0_b','y0_b'])\n return arr",
"def fit(self, X, Xerr):\n \n if type(X) == pd.core.frame.DataFrame:\n if type(X.columns) == pd.indexes.base.Index:\n self.labels = np.array(X.columns)\n X = X.values\n \n if self.method=='astroML':\n self.GMM.n_components=self.n_components\n self.GMM.n_iter=self.n_iter\n self.GMM.fit(X, Xerr)\n \n self.V=self.GMM.V\n self.mu=self.GMM.mu\n self.weights=self.GMM.alpha\n \n if self.method=='Bovy':\n \"\"\"\n Bovy extreme_deconvolution only imports if the method is\n 'Bovy' (this is because installation is somewhat more\n complicated than astroML, and we don't want it to be\n required)\n \n As with the astroML method, initialize with a few steps of\n the scikit-learn GMM\n \"\"\"\n from extreme_deconvolution import extreme_deconvolution\\\n as bovyXD\n \n tmp_gmm = skl_GMM(self.n_components, max_iter=10,\n covariance_type='full',\n random_state=self.random_state)\n tmp_gmm.fit(X)\n self.mu = tmp_gmm.means_\n self.weights = tmp_gmm.weights_\n self.V = tmp_gmm.covariances_\n \n logl=bovyXD(X,Xerr,self.weights,self.mu,self.V,\n tol=self.tol,maxiter=self.n_iter,w=self.w)\n self.GMM.V = self.V\n self.GMM.mu = self.mu\n self.GMM.alpha = self.weights\n \n return self",
"def _sherpa_to_fitresult(shmodel, covar, efilter, fitresult):\n\n from . import SpectrumFitResult\n\n # Translate sherpa model to GP model\n # Units will be transformed to TeV, s, and m to avoid numerical issues\n # e.g. a flux error of O(-13) results in a covariance entry of O(-45) due\n # to the sqrt and unit keV which kills the uncertainties package\n amplfact = SpectrumFit.FLUX_FACTOR\n pardict = dict(gamma=['index', u.Unit('')],\n ref=['reference',\n (1 * u.keV).to('TeV')],\n ampl=['amplitude',\n (amplfact * u.Unit('cm-2 s-1 keV-1')).to('m-2 s-1 TeV-1')],\n cutoff=['lambda_',\n u.Unit('TeV-1')])\n kwargs = dict()\n\n for par in shmodel.pars:\n name = par.name\n kwargs[pardict[name][0]] = par.val * pardict[name][1]\n\n if 'powlaw1d' in shmodel.name:\n model = models.PowerLaw(**kwargs)\n elif 'ecpl' in shmodel.name:\n model = models.ExponentialCutoffPowerLaw(**kwargs)\n else:\n raise NotImplementedError(str(shmodel))\n\n # Adjust parameters in covariance matrix\n covariance = copy.deepcopy(covar.extra_output)\n covar_axis = list()\n for idx, par in enumerate(covar.parnames):\n name = par.split('.')[-1]\n covar_axis.append(pardict[name][0])\n temp = covariance[idx] * pardict[name][1]\n covariance[idx] = temp\n temp2 = covariance[:,idx] * pardict[name][1]\n covariance[:,idx] = temp2\n\n # Efilter sometimes contains ','\n if ':' in efilter:\n temp = efilter.split(':')\n else:\n temp = efilter.split(',')\n \n # Special case only one noticed bin\n if len(temp) == 1:\n fit_range = ([float(temp[0]), float(temp[0])] * u.keV).to('TeV')\n else:\n fit_range = ([float(temp[0]), float(temp[1])] * u.keV).to('TeV')\n\n npred = shmodel(1)\n statname = fitresult.statname\n statval = fitresult.statval\n\n # TODO: Calc Flux@1TeV + Error\n return SpectrumFitResult(model=model,\n covariance=covariance,\n covar_axis=covar_axis,\n fit_range=fit_range,\n statname=statname,\n statval=statval,\n npred=npred\n )",
"def fit(self):\n self.procedure_id = uuid4().hex\n self.procedure_date = str(datetime.today())\n t = perf_counter()\n self.__check_data()\n if self.error_free:\n max_iter = self.parameters[\"max iterations\"]\n conv_criteria = self.parameters[\"convergence level\"]\n\n if self.matrix.is_omx():\n self.output = AequilibraeMatrix()\n self.output.create_from_omx(self.output.random_name(), self.matrix.file_path,\n cores=self.matrix.view_names)\n self.output.computational_view()\n else:\n self.output = self.matrix.copy(self.output_name)\n if self.nan_as_zero:\n self.output.matrix_view[:, :] = np.nan_to_num(self.output.matrix_view)[:, :]\n\n rows = self.rows.data[self.row_field]\n columns = self.columns.data[self.column_field]\n tot_matrix = np.nansum(self.output.matrix_view[:, :])\n\n # Reporting\n self.report.append(\"Target convergence criteria: \" + str(conv_criteria))\n self.report.append(\"Maximum iterations: \" + str(max_iter))\n self.report.append(\"\")\n self.report.append(\"Rows:\" + str(self.rows.entries))\n self.report.append(\"Columns: \" + str(self.columns.entries))\n\n self.report.append(\"Total of seed matrix: \" + \"{:28,.4f}\".format(float(tot_matrix)))\n self.report.append(\"Total of target vectors: \" + \"{:25,.4f}\".format(float(np.nansum(rows))))\n self.report.append(\"\")\n self.report.append(\"Iteration, Convergence\")\n self.gap = conv_criteria + 1\n\n iter = 0\n while self.gap > conv_criteria and iter < max_iter:\n iter += 1\n # computes factors for zones\n marg_rows = self.__tot_rows(self.output.matrix_view[:, :])\n row_factor = self.__factor(marg_rows, rows)\n # applies factor\n self.output.matrix_view[:, :] = np.transpose(\n np.transpose(self.output.matrix_view[:, :]) * np.transpose(row_factor)\n )[:, :]\n\n # computes factors for columns\n marg_cols = self.__tot_columns(self.output.matrix_view[:, :])\n column_factor = self.__factor(marg_cols, columns)\n\n # applies factor\n self.output.matrix_view[:, :] = self.output.matrix_view[:, :] * column_factor\n\n # increments iterarions and computes errors\n self.gap = max(\n abs(1 - np.min(row_factor)),\n abs(np.max(row_factor) - 1),\n abs(1 - np.min(column_factor)),\n abs(np.max(column_factor) - 1),\n )\n\n self.report.append(str(iter) + \" , \" + str(\"{:4,.10f}\".format(float(np.nansum(self.gap)))))\n\n self.report.append(\"\")\n self.report.append(\"Running time: \" + str(\"{:4,.3f}\".format(perf_counter() - t)) + \"s\")",
"def fit(self, data, fit='quantiles'):\n if fit == 'MLE':\n self.setParamsMLE(data)\n self.setDistObj()\n isConverged = True # assume stats.fit will always return a distribution\n else:\n dataMoments = np.array([np.mean(data), np.std(data, ddof=1), moment(data, 3)])\n\n def objFunc(X):\n [self.shape, self.loc, self.scale] = X\n if self.fixedAtZero:\n self.loc = 0\n self.setDistObj()\n if fit == 'quantiles':\n obj = probPlotSqrErr(data, self, self.type, showPlots=False)[0]\n elif fit == 'MOM':\n distMoments = self.moments()\n weights = [1, 1,\n 0.1] # scale the influence of each moment # set last entry to remove skewness from the assessment\n # scale each moment error relative to the data moment value, but replace the data moment with a constant if it is close to zero\n obj = np.sum([abs(dataMoments[i] - distMoments[i]) / max(dataMoments[i], 1E-6) * weights[i] for i in\n range(\n self.nParams)]) # only use the number of moments needed to specify the distribution to match the data # np.sum((distMoments-dataMoments)**2) # np.sum([abs( (dataMoments[i]-distMoments[i])**(1/(i+1)) ) for i in range(3)]) #np.sum((dist.moments()-dataMoments)**2)\n return obj\n\n X = [self.shape, self.loc, self.scale]\n\n res = minimize(objFunc, X, method='SLSQP', options={'disp': True, 'maxiter': 600,\n 'ftol': 1e-8}) # , bounds=bnds, constraints=cons, # options={'maxiter': 500, 'gtol': 1e-6, 'disp': True}\n # method='SLSQP' 'TNC' 'L-BFGS-B' 'COBYLA' #\n # seems to ignore the constraint if bounds not included with method='SLSQP'\n isConverged = res.success\n if isConverged:\n [self.shape, self.loc, self.scale] = res.x\n else:\n [self.shape, self.loc, self.scale] = X # revert to previous values\n\n if self.fixedAtZero:\n self.loc = 0\n\n self.setDistObj()\n return isConverged",
"def monte_carlo(func,x,popt,raw_y_vals,iterations=200,red_chi_sqrs=False,yerrs=None):\n\n fit_y_vals = func(x,*popt)\n #plt.plot(x,fit_y_vals)\n std = sum(np.abs(raw_y_vals-fit_y_vals))/len(x)\n fit_params = []\n\n if red_chi_sqrs and yerrs is not None:\n red_chis = []\n if red_chi_sqrs and yerrs is None:\n print(\"You need to have the errors in y to calculate chis\", sys.exc_info()[0])\n raise\n\n for _ in range(iterations):\n t,sim_y_vals = gen_data(fit_y_vals,sigma=std)\n popt,pcov = curve_fit(func,x,sim_y_vals,popt)\n #plt.plot(x,sim_y_vals,\"o\")\n fit_params.append(popt)\n\n # Calucalate chi_squares\n if red_chi_sqrs:\n red_chis.append(redchi(func,x,popt,raw_y_vals,yerrs))\n\n\n if red_chi_sqrs: \n return np.array(fit_params), np.array(red_chis)\n\n else:\n return np.array(fit_params)",
"def fitting(fitfunc, X, Y, start_parm, correlated=True, verbose=True):\n errfunc = lambda p, x, y, error: np.dot(error, (y-fitfunc(p,x)).T)\n \n # compute inverse, cholesky decomposed covariance matrix\n if not correlated:\n cov = np.diag(np.diagonal(np.cov(Y.T)))\n else:\n cov = np.cov(Y.T)\n cov = (np.linalg.cholesky(np.linalg.inv(cov))).T\n\n # degrees of freedom\n dof = float(Y.shape[1]-len(start_parm)) \n # create results arrays\n res = np.zeros((Y.shape[0], len(start_parm)))\n res_cov = np.zeros((len(start_parm), len(start_parm)))\n chisquare = np.zeros(Y.shape[0])\n # The FIT to the boostrap samples\n for b in range(0, Y.shape[0]):\n p,cov1,infodict,mesg,ier = leastsq(errfunc, start_parm, \n args=(X, Y[b,:], cov), full_output=1, factor=0.1)\n chisquare[b] = float(sum(infodict['fvec']**2.))\n res[b] = np.array(p)\n if b==0:\n # print(cov1)\n res_cov = cov1*chisquare[b]/dof\n # print(res_cov)\t\n # calculate mean and standard deviation\n res_mean, res_std = af.calc_error(res)\n # chi2 = np.median(chisquare)\n # p-value calculated\n pvals_originfit = 1. - scipy.stats.chi2.cdf(chisquare[0], dof)\n \n # The fit to the mean value\n y = np.mean(Y, axis=0)\n p,cov1,infodict,mesg,ier = leastsq(errfunc, start_parm, \\\n args=(X, y, cov), full_output=1)\n chisquare_meanfit = float(sum(infodict['fvec']**2.))\n pvals_meanfit = 1. - scipy.stats.chi2.cdf(chisquare_meanfit, dof)\n # writing results to screen\n if verbose:\n if correlated:\n print(\"fit results for a correlated fit:\")\n else:\n print(\"fit results for an uncorrelated fit:\")\n print(\"degrees of freedom: %f\\n\" % dof)\n \n print(\"bootstrap fit:\")\n for rm, rs in zip(res_mean, res_std):\n print(\" %.6e +/- %.6e\" % (rm, rs))\n #print(\"Chi^2/dof: %.6e +/- %.6e\\n\" % (chi2/dof, np.std(chisquare)/dof))\n\n print(\"mean value fit:\")\n for rm, rs in zip(p, res_std):\n print(\" %.6e +/- %.6e\" % (rm, rs))\n print(\" Chi^2/dof: %.6e \" % (chisquare_meanfit / dof))\n print(\" p-value: %lf\" % pvals_meanfit) \n\n print(\"original data fit:\")\n for rm, rs in zip(res[0], res_std):\n print(\" %.6e +/- %.6e\" % (rm, rs))\n print(\" Chi^2/dof: %.6e \" % (chisquare[0]/dof))\n print(\" p-value: %lf\" % pvals_originfit) \n return res, res_cov.flatten(), chisquare[0]/dof, pvals_originfit",
"def _single_shot_batch_result(results):\r\n\r\n grads = []\r\n start = 1 if c0 is not None and f0 is None else 0\r\n r0 = f0 or results[0]\r\n\r\n output_dims = []\r\n # TODO: Update shape for CV variables\r\n for m in tape.measurements:\r\n if isinstance(m, ProbabilityMP):\r\n output_dims.append(2 ** len(m.wires))\r\n else:\r\n output_dims.append(1)\r\n\r\n for s in shapes:\r\n if s == 0:\r\n # parameter has zero gradient\r\n if not isinstance(results[0], tuple):\r\n g = qml.math.zeros_like(results[0])\r\n else:\r\n g = []\r\n for i in output_dims:\r\n zero = qml.math.squeeze(qml.math.zeros(i))\r\n g.append(zero)\r\n\r\n grads.append(g)\r\n continue\r\n\r\n res = results[start : start + s]\r\n start = start + s\r\n\r\n # compute the linear combination of results\r\n # and coefficients\r\n\r\n pre_grads = []\r\n\r\n if len(tape.measurements) == 1:\r\n res = qml.math.stack(res)\r\n c = qml.math.convert_like(coeffs, res)\r\n lin_comb = qml.math.tensordot(res, c, [[0], [0]])\r\n pre_grads.append(lin_comb)\r\n else:\r\n for i in range(len(tape.measurements)):\r\n r = qml.math.stack([r[i] for r in res])\r\n c = qml.math.convert_like(coeffs, r)\r\n lin_comb = qml.math.tensordot(r, c, [[0], [0]])\r\n pre_grads.append(lin_comb)\r\n\r\n # Add on the unshifted term\r\n if c0 is not None:\r\n if len(tape.measurements) == 1:\r\n c = qml.math.convert_like(c0, r0)\r\n pre_grads = [pre_grads[0] + r0 * c]\r\n else:\r\n for i in range(len(tape.measurements)):\r\n r_i = r0[i]\r\n c = qml.math.convert_like(c0, r_i)\r\n pre_grads[i] = pre_grads[i] + r_i * c\r\n\r\n coeff_div = qml.math.cast_like(\r\n qml.math.convert_like(1 / h**n, pre_grads[0]), pre_grads[0]\r\n )\r\n\r\n if len(tape.measurements) > 1:\r\n pre_grads = tuple(\r\n qml.math.convert_like(i * coeff_div, coeff_div) for i in pre_grads\r\n )\r\n else:\r\n pre_grads = qml.math.convert_like(pre_grads[0] * coeff_div, coeff_div)\r\n\r\n grads.append(pre_grads)\r\n # Single measurement\r\n if len(tape.measurements) == 1:\r\n if len(tape.trainable_params) == 1:\r\n return grads[0]\r\n return tuple(grads)\r\n\r\n # Reordering to match the right shape for multiple measurements\r\n grads_reorder = [[0] * len(tape.trainable_params) for _ in range(len(tape.measurements))]\r\n for i in range(len(tape.measurements)):\r\n for j in range(len(tape.trainable_params)):\r\n grads_reorder[i][j] = grads[j][i]\r\n\r\n # To tuple\r\n if len(tape.trainable_params) == 1:\r\n return tuple(elem[0] for elem in grads_reorder)\r\n return tuple(tuple(elem) for elem in grads_reorder)",
"def make_results(self):\n fitted = self.fitted\n self.results = OrderedDict()\n ## fitting results\n self.results.update(\n nfev = fitted.nfev,\n ndata = fitted.ndata,\n nvarys = fitted.nvarys, # number of varible paramters\n nfree = fitted.nfree, # degree of freem\n chisqr = fitted.chisqr,\n redchi = fitted.redchi,\n aic = fitted.aic,\n bic = fitted.bic)\n params = fitted.params\n pnames = list(params.keys())\n pvalues = OrderedDict()\n for pn in pnames:\n par = params.get(pn)\n pvalues[pn] = [par.value, par.min, par.max, par.vary]\n self.results[\"params\"] = pvalues\n ## confidence intervals\n if hasattr(self, \"ci\") and self.ci is not None:\n ci = self.ci\n ci_values = OrderedDict()\n ci_sigmas = [ \"ci%02d\" % (v[0]*100) for v in ci.get(pnames[0]) ]\n ci_names = sorted(list(set(ci_sigmas)))\n ci_idx = { k: [] for k in ci_names }\n for cn, idx in zip(ci_sigmas, range(len(ci_sigmas))):\n ci_idx[cn].append(idx)\n # parameters ci\n for pn in pnames:\n ci_pv = OrderedDict()\n pv = [ v[1] for v in ci.get(pn) ]\n # best\n pv_best = pv[ ci_idx[\"ci00\"][0] ]\n ci_pv[\"best\"] = pv_best\n # ci of each sigma\n pv2 = [ v-pv_best for v in pv ]\n for cn in ci_names[1:]:\n ci_pv[cn] = [ pv2[idx] for idx in ci_idx[cn] ]\n ci_values[pn] = ci_pv\n self.results[\"ci\"] = ci_values",
"def write_results(js, fit_rms, fit_err, hold_rms_=None, hold_err_=None, \\\n sigma2_=None):\n _bcs.f90wrap_write_results(js=js, fit_rms=fit_rms, fit_err=fit_err, \\\n hold_rms_=hold_rms_, hold_err_=hold_err_, sigma2_=sigma2_)",
"def estimate_calibration_params(model_results, timing_results):\n\n params = set(_FMM_STAGE_TO_CALIBRATION_PARAMETER.values())\n\n nresults = len(model_results)\n\n if nresults != len(timing_results):\n raise ValueError(\"must have same number of model and timing results\")\n\n uncalibrated_times = {}\n actual_times = {}\n\n for param in params:\n uncalibrated_times[param] = np.zeros(nresults)\n actual_times[param] = np.zeros(nresults)\n\n from pymbolic import evaluate\n\n for i, model_result in enumerate(model_results):\n context = model_result.params.copy()\n for param in params:\n context[param] = var(param)\n\n # Represents the total modeled cost, but leaves the calibration\n # parameters symbolic.\n total_modeled_cost = evaluate(\n sum(model_result.raw_costs.values()),\n context=context)\n\n collected_times = _collect(total_modeled_cost, params)\n\n for param, time in collected_times.items():\n uncalibrated_times[param][i] = time\n\n for i, timing_result in enumerate(timing_results):\n for param, time in timing_result.items():\n calibration_param = (\n _FMM_STAGE_TO_CALIBRATION_PARAMETER[param])\n actual_times[calibration_param][i] = time[\"process_elapsed\"]\n\n result = {}\n\n for param in params:\n uncalibrated = uncalibrated_times[param]\n actual = actual_times[param]\n\n if np.allclose(uncalibrated, 0):\n result[param] = float(\"NaN\")\n continue\n\n result[param] = (\n actual.dot(uncalibrated) / uncalibrated.dot(uncalibrated))\n\n return result",
"def fit_image(self):\n self.params = self.all_params['Fit 0']\n self.fit_results = minimize(self.fit_dict[self.fit_type], self.params,\n args = ())\n #report_fit(self.fit_results)\n sel.fparams = self.fit_results.params"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
loglike, grad = pylag.mlfit.StackedMLCrossSpectrum.log_likelihood(params, eval_gradient=True) Evaluate log(marginal likelihood), as well as its gradient, for the covariance matrix defined by some set of input parameters. The log(likelihood) for the stack of light curve pairs is the sum of the log(likelihood) evaluated for each pair of light curves
|
def log_likelihood(self, params, eval_gradient=True):
if eval_gradient:
segment_loglike = [c.log_likelihood(params, eval_gradient) for c in self.mlcross_spec]
# separate and sum the likelihoods and the gradients
like = np.array([l[0] for l in segment_loglike])
grad = np.array([l[1] for l in segment_loglike])
if np.all(np.isfinite(like)):
return np.sum(like), grad.sum(axis=0)
else:
return (-1e6, np.zeros(len([p for p in params if params[p].vary])) - 1e6)
else:
return np.sum([c.log_likelihood(params, eval_gradient) for c in self.mlcross_spec])
|
[
"def log_likelihood(self, params, eval_gradient=True):\n c = self.cov_matrix(params)\n\n # add white noise along the leading diagonal\n # this should be the Poisson noise term when calculating a PSD\n if self.noise is not None:\n c += np.diag(self.noise)\n\n try:\n L = cho_factor(c, lower=True, check_finite=False)[0]\n except np.linalg.LinAlgError:\n try:\n # try doubling the noise first\n L = cho_factor(c + np.diag(self.noise), lower=True, check_finite=False)[0]\n except np.linalg.LinAlgError:\n #printmsg(2, \"WARNING: Couldn't invert covariance matrix with parameters \" + param2array(params))\n return (-1e6, np.zeros(len([p for p in params if params[p].vary])) - 1e6) if eval_gradient else -1e6\n except ValueError:\n return (np.inf, np.zeros(len([p for p in params if params[p].vary]))) if eval_gradient else -np.inf\n\n alpha = cho_solve((L, True), self.data, check_finite=False)\n\n log_likelihood_dims = -0.5 * np.einsum(\"ik,ik->k\", self.data, alpha)\n log_likelihood_dims -= np.log(np.diag(L)).sum()\n log_likelihood_dims -= c.shape[0] / 2 * np.log(2 * np.pi)\n log_likelihood = log_likelihood_dims.sum(-1)\n\n if eval_gradient:\n c_gradient = self.cov_matrix_deriv(params)\n tmp = np.einsum(\"ik,jk->ijk\", alpha, alpha)\n tmp -= cho_solve((L, True), np.eye(c.shape[0]))[:, :, np.newaxis]\n gradient_dims = 0.5 * np.einsum(\"ijl,ijk->kl\", tmp, c_gradient)\n gradient = gradient_dims.sum(-1)\n\n # note we return -log_likelihood, so we can minimize it!\n return (log_likelihood, gradient) if eval_gradient else log_likelihood",
"def compute_log_likelihood(X, params):\n m, n, _ = X.shape\n likelihood = 0.\n for i in range(m):\n p_y_0 = p_y(0, params)\n p_y_1 = p_y(1, params)\n for j in range(n):\n x = X[i,j]\n p_y_0 += log_sum_exp(p_x_z(x,0,params) + p_z_y(0,0,params), p_x_z(x,1,params) + p_z_y(1,0,params))\n p_y_1 += log_sum_exp(p_x_z(x,0,params) + p_z_y(0,1,params), p_x_z(x,1,params) + p_z_y(1,1,params))\n likelihood += log_sum_exp(p_y_0, p_y_1)\n\n return likelihood",
"def GSM_log_likelihood(X, model):\n sum = 0\n for i in range(len(model.mix)):\n sum += logsumexp(np.log(model.mix[i]) + multivariate_normal.logpdf(X.T, cov=model.cov[i]))\n return sum",
"def grad_log_likelihood(kc, cb, eval_request, eval_result, model_params):\n if eval_request.type != KN_RC_EVALGA:\n print(\"*** grad_log_likelihood incorrectly called with eval type %d\" %\n eval_request.type)\n return -1\n params = eval_request.x\n\n np.savetxt(\"current_pars_k.txt\", params)\n\n mus_and_maybe_grad = model_params.mus_and_maybe_grad\n bases_surplus = model_params.bases_surplus\n observed_matching = model_params.observed_matching\n\n ncat_men, ncat_women = bases_surplus.shape[:-1]\n n_prod_categories = ncat_men * ncat_women\n\n mus, _, dmus = mus_and_maybe_grad(params, model_params, gr=True)\n\n grad_loglik = grad_loglik_all_mus(observed_matching, mus)\n\n gradN = grad_loglik[-1]\n gradxy = grad_loglik[:n_prod_categories].reshape(\n (ncat_men, ncat_women)) + gradN\n gradx0 = grad_loglik[n_prod_categories:(\n n_prod_categories + ncat_men)] + gradN\n grad0y = grad_loglik[(n_prod_categories + ncat_men):-1] + gradN\n\n der_muxy = np.einsum('ij,ijk->k', gradxy, dmus.muxy)\n der_mux0 = np.einsum('i,ik->k', gradx0, dmus.mux0)\n der_mu0y = np.einsum('i,ik->k', grad0y, dmus.mu0y)\n\n eval_result.objGrad = -(der_muxy + der_mux0 + der_mu0y)\n\n return 0",
"def grad_loglik_all_mus(observed_matching, simulated_matching):\n\n muxy_sim, mux0_sim, mu0y_sim = simulated_matching.unpack()\n n_households_sim = np.sum(muxy_sim) + np.sum(mux0_sim) + np.sum(mu0y_sim)\n\n muxy_obs, mux0_obs, mu0y_obs = observed_matching.unpack()\n n_households_obs = np.sum(muxy_obs) + np.sum(mux0_obs) + np.sum(mu0y_obs)\n\n der_x0 = mux0_obs * der_nplog(mux0_sim)\n der_0y = mu0y_obs * der_nplog(mu0y_sim)\n der_xy = muxy_obs * der_nplog(muxy_sim)\n n_prod_categories, ncat_men, ncat_women = \\\n muxy_obs.size, mux0_obs.size, mu0y_obs.size\n grad_loglik = np.zeros(n_prod_categories + ncat_men + ncat_women + 1)\n grad_loglik[:n_prod_categories] = der_xy.reshape(n_prod_categories)\n grad_loglik[n_prod_categories:(n_prod_categories + ncat_men)] \\\n = der_x0\n grad_loglik[(n_prod_categories + ncat_men):-1] \\\n = der_0y\n grad_loglik[-1] = \\\n -n_households_obs * der_bslog(n_households_sim)\n\n return grad_loglik",
"def log_marginal_likelihood(self, X):\n pass",
"def ICA_log_likelihood(X, model):\n\n sum = 0\n S = model.P.T.dot(X)\n for i in range(len(model.mix)):\n sum += GSM_log_likelihood(S[i].reshape(1, -1), model.models[i])\n return sum\n # TODO: YOUR CODE HERE",
"def logL(self, X, Xerr):\n if self.V is None or self.mu is None or self.weights is None:\n raise Exception(\"Model parameters not set.\")\n \n return self.GMM.logL(X,Xerr)",
"def log_likelihood_grad(X, Y, w, C=0.1):\n \n # d is dimensionality of a sample.\n d = len(w)\n # N is #training samples.\n N = len(X)\n s = np.zeros(d)\n \n for i in range(N):\n s += Y[i] * X[i] * logistic(-Y[i] * np.dot(X[i], w))\n \n s -= C*w\n return s",
"def log_likelihood_function(self, instance):\r\n\r\n \"\"\"\r\n In the previous tutorial the instance was a single `Gaussian` profile, meaning we could create the model data \r\n using the line:\r\n\r\n model_data = instance.gaussian.model_data_1d_via_xvalues_from(xvalues=self.data.xvalues)\r\n\r\n In this tutorial our instance is comprised of multiple 1D Gaussians, because we will use a `Collection` to\r\n compose the model:\r\n\r\n model = Collection(gaussian_0=Gaussian, gaussian_1=Gaussian).\r\n\r\n By using a Collection, this means the instance parameter input into the fit function is a\r\n dictionary where individual profiles (and their parameters) can be accessed as followed:\r\n\r\n print(instance.gaussian_0)\r\n print(instance.gaussian_1)\r\n print(instance.gaussian_0.centre)\r\n\r\n In this tutorial, the `model_data` is therefore the summed `model_data` of all individual Gaussians in the \r\n model. The function `model_data_from_instance` performs this summation. \r\n \"\"\"\r\n model_data = self.model_data_from_instance(instance=instance)\r\n\r\n residual_map = self.data - model_data\r\n chi_squared_map = (residual_map / self.noise_map) ** 2.0\r\n chi_squared = sum(chi_squared_map)\r\n noise_normalization = np.sum(np.log(2 * np.pi * noise_map**2.0))\r\n log_likelihood = -0.5 * (chi_squared + noise_normalization)\r\n\r\n return log_likelihood",
"def log_likelihood(self, *args, context=None):\n\n if self.owner is None:\n raise ValueError(\n \"Cannot compute a log-likelihood without being assigned as the function of an \"\n \"OptimizationControlMechanism. See the documentation for the \"\n \"ParameterEstimationControlMechanism for more information.\"\n )\n\n # Make sure we have instantiated the log-likelihood function.\n if self._ll_func is None:\n self._ll_func = self._make_objective_func(context=context)\n\n context.execution_phase = ContextFlags.PROCESSING\n ll, sim_data = self._ll_func(*args)\n context.remove_flag(ContextFlags.PROCESSING)\n\n return ll, sim_data",
"def log_marginal_likelihood(self, X):\n theta, beta = self.get_model()\n N,_ = X.shape \n logmarg_lk = 0.0\n for i in range(N):\n logmarg_lk += self.util_logmarginal_ind(X[i], theta, beta)\n return logmarg_lk\n # raise NotImplementedError",
"def get_lhm_log_model_prob_fn(\n sigma_z: float) -> Callable[[base.Output, hk.Params, base.Index], float]:\n\n def log_prob_fn(out: base.Output, params: hk.Params, index: base.Index):\n del out # Here we compute the log prob from params and index directly.\n predicate = lambda module_name, name, value: name == 'w'\n weight_matrices = hk.data_structures.filter(predicate, params)\n weight_matrices, _ = jax.tree_flatten(weight_matrices)\n weight_matrix = jnp.concatenate(weight_matrices, axis=1)\n weight_matrix_sq = jnp.matmul(weight_matrix, weight_matrix.T)\n _, log_det_w_sq = jnp.linalg.slogdet(weight_matrix_sq + 1e-6 *\n jnp.eye(weight_matrix_sq.shape[0]))\n log_det_w = 0.5 * log_det_w_sq\n index_l2_sq = jnp.sum(jnp.square(index))\n return -0.5 * index_l2_sq / sigma_z**2 - log_det_w\n\n return log_prob_fn",
"def forward(log_emlik, log_startprob, log_transmat):",
"def log_likelihood(test_series, cov, maps, residues):\n # try:\n # This makes heavy use of the matrix inversion lemma\n #test_series = np.concatenate(test_series, axis=0)\n n_samples = test_series.shape[0]\n white_test_series = test_series / residues\n residues_fit = np.sum(white_test_series ** 2)\n white_test_series /= residues\n white_projection = np.dot(white_test_series, maps)\n del white_test_series\n prec_maps = linalg.inv(cov)\n prec_maps += np.dot(maps.T / residues ** 2, maps)\n residues_fit -= np.trace(\n np.dot(np.dot(white_projection.T, white_projection),\n linalg.inv(prec_maps)))\n del white_projection\n white_maps = maps / residues[:, np.newaxis]\n prec_maps += np.dot(white_maps.T, white_maps)\n del white_maps\n det = fast_logdet(prec_maps)\n del prec_maps\n return (-residues_fit / n_samples - fast_logdet(cov)\n - det - 2 * np.sum(np.log(residues)))\n #except linalg.LinAlgError:\n # return -np.inf",
"def gradFunc(param):\n\n return np.array(\n GeneralizedExtremeValueDistribution.computeNegLogLikelihoodGrad(\n param[0], param[1], param[2], data\n ))",
"def gradient_log_linear(weights):\n\n pooling_pooled, pooling_reg_const = log_linear_pooling(P, weights)\n log_pooling = np.log(pooling_pooled)\n res = np.zeros(nviews)\n for i in np.arange(nviews):\n res[i] = np.sum(weights[i] * log_pooling / P[i])\n return res",
"def forward(log_emlik, log_startprob, log_transmat):\n num_state = log_startprob.shape[0] - 1\n num_frame = log_emlik.shape[0]\n \n logalpha = np.zeros((num_frame,num_state))\n\n for j in range(num_state):\n logalpha[0][j] = log_startprob[j] + log_emlik[0][j]\n\n for i in range(1,num_frame):\n for j in range(num_state):\n logalpha[i][j] = logsumexp(logalpha[i-1,:]+log_transmat[:,j]) +log_emlik[i][j]\n\n return logalpha",
"def gradient(self, z: np.ndarray, ado_ids: typing.List[str] = None, tag: str = mantrap.constants.TAG_OPTIMIZATION\n ) -> np.ndarray:\n ado_ids = ado_ids if ado_ids is not None else self.env.ado_ids\n ego_trajectory, grad_wrt = self.z_to_ego_trajectory(z, return_leaf=True)\n gradient = [m.gradient(ego_trajectory, grad_wrt=grad_wrt, tag=tag, ado_ids=ado_ids) for m in self.modules]\n gradient = np.sum(gradient, axis=0)\n\n self.logger.log_append(grad_overall=np.linalg.norm(gradient), tag=tag)\n module_log = {f\"{mantrap.constants.LT_GRADIENT}_{key}\": mod.grad_current(tag=tag)\n for key, mod in self.module_dict.items()}\n self.logger.log_append(**module_log, tag=tag)\n return gradient"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Searches a list for a given item and returns a boolean value as to whether it is present and the processing time needed to find the item.
|
def sequential_search(a_list, item):
strt_time = time.time()
pos = 0
found = False
while pos < len(a_list) and not found:
if a_list[pos] == item:
found = True
else:
pos = pos + 1
end_time = time.time()
run_time = end_time - strt_time
return (run_time, found)
|
[
"def search(self, item):\n current = self.head\n found = False\n while current is not None and not found:\n if current.get_data() is item:\n found = True\n else:\n current = current.get_next()\n return found",
"def search(self, item):\n found = False\n stop = False\n current = self.head\n while current is not None and not found and not stop:\n if current.get_data() == item:\n found = True\n elif current.get_data() > item:\n stop = True\n else:\n current = current.get_next()\n\n return found",
"def search(self, item):\n temp = self.head\n\n while(temp):\n if temp.data == item:\n return True\n temp = temp.next\n\n return False",
"def itemSearch(\n itemList, name, desiredNum=\"#1\", typeList=[], sortList=False\n): # noqa: C901\n logPrefix = \"sea: \"\n myitem = None\n debugItemSearch = False\n\n # strip out anything that's not a digit (i.e. number signs)\n desiredNum = int(re.sub(\"[^0-9]\", \"\", str(desiredNum)))\n\n if sortList:\n itemList = itemSort(itemList)\n\n dLog(\n logPrefix\n + \"Trying to search for item \"\n + name\n + \" #\"\n + str(desiredNum)\n + \" in \"\n + str([x.describe() for x in itemList]),\n debugItemSearch,\n )\n\n cnt = 0\n for oneitem in itemList:\n dLog(\n logPrefix + \"Checking item name \" + oneitem.getName() + \"...\",\n debugItemSearch,\n )\n if re.match(\"^\" + name.lower(), oneitem.getName().lower()): # fuzzy\n cnt += 1\n dLog(\n logPrefix\n + \"item name \"\n + oneitem.getName()\n + \" matched. Checking type...\",\n debugItemSearch,\n )\n if len(typeList) > 0:\n if oneitem.getType().lower() not in typeList:\n dLog(\n logPrefix\n + \"skipping item \"\n + oneitem.getName()\n + \" because it doesn't match type \"\n + str(typeList),\n debugItemSearch,\n )\n continue # skip if not the desired type\n else:\n dLog(\n logPrefix + \"skipping typecheck for item \" + oneitem.getName(),\n debugItemSearch,\n )\n dLog(\n logPrefix\n + \"Checking number for item name \"\n + oneitem.getName()\n + \" . Looking for #\"\n + str(desiredNum),\n debugItemSearch,\n )\n if cnt == desiredNum: # skip if not desired number\n dLog(\n logPrefix\n + \"Found item \"\n + oneitem.getName()\n + \" matching number \"\n + str(cnt),\n debugItemSearch,\n )\n myitem = oneitem\n break\n else:\n dLog(\n logPrefix\n + \"Could not find \"\n + oneitem.getName()\n + \" with matching number \"\n + str(desiredNum),\n debugItemSearch,\n )\n else:\n dLog(\n logPrefix + \"Item \" + oneitem.getName() + \" did not match.\",\n debugItemSearch,\n )\n if myitem:\n dLog(logPrefix + \"Found item \" + myitem.getName(), debugItemSearch)\n\n return myitem",
"def has(cls, item):\n return item in cls.list()",
"def search(self, item):\n current = self.head\n while current:\n if current.value == item:\n return True\n elif current.value < item:\n current = current.next\n else:\n break\n return False",
"def find(thelist, item=True, ret_all=False):\n if ret_all:\n return [i for i in xrange(len(thelist)) if thelist[i] == item]\n else:\n return next((i for i in xrange(len(thelist)) if thelist[i] == item), \\\n None)",
"def __contains__(self, item):\n\n # check for slycat path\n self.check_fs_path()\n\n # create hash from item\n digest = self.digest_hash(item)\n\n # get the item from the cache\n if digest in self._loaded:\n value = self._loaded[digest]\n\n # item was not in memory, check file system\n else:\n try:\n value = self._load(digest, item)\n\n except CacheError:\n # item was not in the cache or the file system\n return False\n\n # check if it has expired\n if value.expired():\n\n # cherrypy.log.error(\"[CACHE] value is expired for %s.\" % str(item))\n\n # contents were expired so we should delete them and return false\n self.expire(digest)\n return False\n\n return True",
"def linear_search(ls: list, item: object) -> bool:\n assert pythonic_is_sorted(ls)\n for index, e in enumerate(ls):\n if e == item:\n return index\n return -1",
"def contains(self, item: 'void *') -> \"SbBool\":\n return _coin.SbFifo_contains(self, item)",
"def better_linear_search(l, item):\n for i in range(len(l)):\n if l[i]==item:\n return i\n return False",
"def exists_in_queue(self, item):\r\n self.lock.acquire()\r\n count = self.queue.count(item)\r\n self.lock.release()\r\n return count != 0",
"def __contains__(self, item):\n first_hash = self._hash(item)\n have_wrapped = False\n if self._data[first_hash] == item:\n return True\n else:\n current_index = first_hash\n while self._data[current_index] is not None:\n if self._data[current_index] == item:\n # horay we found it\n return True\n if (current_index == first_hash) and have_wrapped:\n # back to original hash and didn't find item\n # phew - the hashtable is full!\n return False\n if current_index == (self.n_slots-1):\n # wrap back to start of hash table\n current_index = 0\n have_wrapped = True\n else:\n current_index += 1",
"def _wait_for_item(self, func_list, item_name,\n status=None, status_field=None,\n timeout=30, wait=0.5, negate=False):\n start = time.time()\n while(time.time() - start < timeout):\n LOG.debug(\"Waiting for item: {} since {:4.2f}s\"\n .format(item_name, time.time() - start))\n if negate:\n # check if item is gone\n if item_name not in [it.get(\"name\")\n for it in func_list()]:\n return item_name\n else:\n for it in func_list():\n if \"name\" in it:\n if item_name in it.get(\"name\"):\n # found: stop and return\n LOG.debug(\"Found item: {}\".format(it.get(\"name\")))\n if status is not None and status_field is not None:\n LOG.debug(\"Status is '{}' should '{}'\"\n .format(status_field(it),\n status))\n # also check status\n if status in status_field(it):\n return it.get(\"name\")\n if \"fail\" in status_field(it):\n raise OsmDeploymentException(\n \"{} failed\".format(item_name))\n else:\n return it.get(\"name\")\n time.sleep(wait)\n raise OsmTimeoutException(\"Item '{}' was not found in time\"\n .format(item_name))",
"def seqsearch(data, item):\n\tindex = 0\n\tfound = False\n\twhile index < len(data) and data[index] <= item and not found:\n\t\tif data[index] == item:\n\t\t\tfound = True\n\t\telse:\n\t\t\tindex += 1\n\tif found:\n\t\treturn index\n\telse:\n\t\treturn \"item not in list\"",
"def __contains__(self, item):\n return self.has_matching_inventory(item)",
"def contains(self, item : int) -> bool:\n return self.start <= item and item < self.end",
"def __contains__(self, value):\n # start at the beginning of the list\n # check every item\n # if you find a match, return True\n # if we get to the end, return False\n\n current = self.front\n while current is not None:\n if current.value == value:\n return True\n current = current.next_\n return False",
"def exists(self, key, item): # noqa\n return self.execute_command(CF_EXISTS, key, item)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests the 4 different search algorithms by generating 100 test lists of three different sizes, then calculates the average processing time for each and returns the results in string.
|
def main():
samp_size = [500, 1000, 10000]
tests = {'Sequential': 0,
'Ordered': 0,
'Bin Iterative': 0,
'Bin Recursive': 0}
for smpl in samp_size:
counter = 0
while counter < 100:
test_list = list_gen(smpl)
tests['Sequential'] += sequential_search(test_list, -1)[0]
tests['Ordered'] += ordered_sequential_search(test_list, -1)[0]
tests['Bin Iterative'] += binary_search_iterative(test_list, -1)[0]
tests['Bin Recursive'] += binary_search_recursive(test_list, -1)[0]
counter += 1
print 'For sample size %s:' % (smpl)
for tst in tests:
print ('%s Search took %10.7f seconds to run, '
'on average.') % (tst, tests[tst] / counter)
|
[
"def multi_results(benchmark):\n # Read in results\n tensat_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n taso_root = os.path.join(os.path.dirname(tensat_root), \"TASO\")\n\n taso_benchmark_name = benchmark\n if benchmark == 'nasneta':\n taso_benchmark_name = 'nasnet_a'\n elif benchmark == 'vgg':\n taso_benchmark_name = 'vgg19-7'\n taso_runtime_file = os.path.join(taso_root, \"examples/{}_time.txt\".format(taso_benchmark_name))\n\n with open(taso_runtime_file, 'r') as f:\n content = f.readlines()\n\n orig_runtimes = []\n for line in content[-5:]:\n times = line.split('\\t')\n orig_runtimes.append(float(times[0]))\n orig_mean = np.mean(orig_runtimes)\n\n\n # iter=0\n mean_iter_0, mean_sat_iter_0, mean_ext_iter_0, mean_nodes_iter_0 = get_iter_stats(benchmark, tensat_root, iter=0)\n\n # iter=1\n mean_iter_1, mean_sat_iter_1, mean_ext_iter_1, mean_nodes_iter_1 = get_iter_stats(benchmark, tensat_root, iter=1)\n\n # iter=2\n mean_iter_2, mean_sat_iter_2, mean_ext_iter_2, mean_nodes_iter_2 = get_iter_stats(benchmark, tensat_root, iter=2)\n\n # iter=3\n mean_iter_3, mean_sat_iter_3, mean_ext_iter_3, mean_nodes_iter_3 = get_iter_stats(benchmark, tensat_root, iter=3)\n\n # Plot runtime & optimizer time v.s. iter\n speedup = [orig_mean/mean_iter_0, orig_mean/mean_iter_1, orig_mean/mean_iter_2]\n optimizer_time = [mean_sat_iter_0+mean_ext_iter_0, mean_sat_iter_1+mean_ext_iter_1, mean_sat_iter_2+mean_ext_iter_2]\n if mean_iter_3 > 0:\n speedup.append(orig_mean/mean_iter_3)\n optimizer_time.append(mean_sat_iter_3+mean_ext_iter_3)\n\n speedup = [(i-1)*100 for i in speedup]\n\n nodes = [mean_nodes_iter_0, mean_nodes_iter_1, mean_nodes_iter_2, mean_nodes_iter_3]\n\n result = {}\n result['speedup'] = speedup\n result['optimizer'] = optimizer_time\n result['nodes'] = nodes\n\n return result",
"def main():\n list_size = [500, 1000, 10000]\n sort_result = {'insertion':0, 'shell':0, 'python':0}\n for i in list_size:\n list_count = 0\n while list_count < 100:\n random_number_list = get_me_random_list(i)\n sort_result['insertion']+=insertion_sort(random_number_list)\n sort_result['shell'] += shell_sort(random_number_list)\n sort_result['python'] += python_sort(random_number_list)\n list_count+=1\n\n for key, val in sort_result.items():\n print('%s sort took %10.7f seconds to run, on average'%(key, val/100))",
"def analyse_time(size_to_test, no_of_trials):\n \n if sys.version_info < (3, 3):\n get_time = time.clock\n else:\n get_time = time.perf_counter\n REZ = time.get_clock_info('perf_counter').resolution \n\n total_time = 0 \n for trial in range(no_of_trials):\n list_to_test = generate_random_array(size_to_test)\n start = get_time()\n sol = giftwrap_e(list_to_test)\n end = get_time()\n total_time += (end - start)\n time_taken_per_locate = (1.0*total_time) / no_of_trials\n print('finish timing for array with {} random points'.format(size_to_test))\n \n #Uncomment if want graph\n #draw_graph(list_to_test, sol)\n \n print(size_to_test)\n #print(time_taken_per_locate)\n return time_taken_per_locate",
"def test_MultiRun():\n\n # For each optimizer.\n for optimizer in [PSO, MOL, DE, LUS, PS]:\n # For different search-space dimensionalities.\n for dim in [2, 47]:\n # For different display intervals.\n for display_interval in [0, 11, 167]:\n # For different number of fitness evaluations.\n for max_evaluations in [53, 10391]:\n # For different fitness-trace-lengths.\n for trace_len in [0, 101]:\n # For different number of optimization runs.\n for num_runs in [1, 5]:\n # For parallel and non-parallel.\n for parallel in [True, False]:\n # Take a benchmark problem at random.\n problem_class = random.choice(Problem.all_benchmark_problems)\n problem = problem_class(dim=dim)\n\n # Run the test using this configuration.\n yield _do_test_MultiRun, optimizer, problem, dim, max_evaluations, display_interval, trace_len, parallel, num_runs",
"def evaluate_existing_algorithms(algorithm, data, fast=False):\n acc = None\n if algorithm == \"ICU\":\n if data == \"SAFT Thai\":\n file = Path.joinpath(Path(__file__).parent.parent.absolute(), 'Data/SAFT/test.txt')\n acc = compute_accuracy(file, \"icu\")\n elif data == \"BEST\":\n if fast:\n acc = compute_accuracy_best(starting_text=40, ending_text=45, algorithm=\"icu\", exclusive=False)\n else:\n acc = compute_accuracy_best(starting_text=40, ending_text=60, algorithm=\"icu\", exclusive=False)\n elif data == \"exclusive BEST\":\n if fast:\n acc = compute_accuracy_best(starting_text=40, ending_text=45, algorithm=\"icu\", exclusive=True)\n else:\n acc = compute_accuracy_best(starting_text=40, ending_text=60, algorithm=\"icu\", exclusive=True)\n elif data == \"SAFT Burmese\":\n file = Path.joinpath(Path(__file__).parent.parent.absolute(), 'Data/SAFT_burmese_test_limited.txt')\n acc = compute_accuracy(file, \"icu\")\n elif data == \"my\":\n file = Path.joinpath(Path(__file__).parent.parent.absolute(), 'Data/my_test_segmented.txt')\n acc = compute_accuracy(file, \"icu\")\n elif data == \"exclusive my\":\n file = Path.joinpath(Path(__file__).parent.parent.absolute(), 'Data/my_test_segmented_exclusive.txt')\n acc = compute_accuracy(file, \"icu\")\n\n if algorithm == \"Deepcut\":\n if data == \"SAFT Thai\":\n file = Path.joinpath(Path(__file__).parent.parent.absolute(), 'Data/SAFT/test.txt')\n acc = compute_accuracy(file, \"deep\")\n elif data == \"BEST\":\n if fast:\n acc = compute_accuracy_best(starting_text=40, ending_text=45, algorithm=\"deep\", exclusive=False)\n else:\n acc = compute_accuracy_best(starting_text=40, ending_text=60, algorithm=\"deep\", exclusive=False)\n elif data == \"exclusive BEST\":\n if fast:\n acc = compute_accuracy_best(starting_text=40, ending_text=45, algorithm=\"deep\", exclusive=True)\n else:\n acc = compute_accuracy_best(starting_text=40, ending_text=60, algorithm=\"deep\", exclusive=True)\n\n if acc is None:\n print(\"Warning: the evaluation for this combination of data and algorithm is not supported.\")\n else:\n print(\n \"{} accuracy on {} data set: BIES accuracy = {}, F1-score = {}\".format(algorithm, data,\n acc.get_bies_accuracy(),\n acc.get_f1_score()))\n return acc",
"def average_search_efficiency(config):\n \n #get parameters of the distributions depending on the chosen model\n if config['model'] == 'powerlaw':\n parameters = [config['beta']]\n #get policy from benchmark model\n policy = get_policy_from_dist(n_max = config['time_ep'], \n func = pdf_powerlaw,\n beta = config['beta']\n )\n \n elif config['model'] == 'double_exp':\n parameters = [config['d_int'], config['d_ext'], config['p']]\n #get policy from benchmark model\n policy = get_policy_from_dist(n_max=config['time_ep'],\n func = pdf_multimode,\n lambdas = np.array(parameters[:2]),\n probs = np.array([parameters[2], 1-parameters[2]])\n )\n \n \n #run the walks in parallel\n efficiencies = walk_from_policy(policy=policy,\n time_ep=config['time_ep'],\n n=config['n'],\n L=config['L'],\n Nt=config['Nt'],\n r=config['r'],\n lc=config['lc'])\n \n #get the mean search efficiency over the walks\n mean_eff = np.mean(efficiencies) \n tune.report(mean_eff = mean_eff)\n \n #save results\n if config['results_path']:\n np.save(config['results_path']+'efficiencies_'+ str([np.round(p, 10) for p in parameters])+'.npy', efficiencies)",
"def testOPFBenchmarks(self):\n # Check for benchmark misspellings\n for bm in self.listOfBenchmarks:\n if not bm in self.allBenchmarks:\n raise Exception(\"Unknown benchmark %s\" % bm)\n\n # Set up FIFO queue for handling the different directories that are created\n # for the tests\n fifodirs = deque()\n baseoutdir = self.outdir\n iterations = self.iterations\n exportDicts = self.setUpExportDicts()\n for iter in range(iterations):\n for exports in exportDicts:\n if len(exportDicts)>1:\n prependDict = exports\n else:\n prependDict = dict()\n if self.iterations > 1:\n prependDict[\"iteration\"] = iter\n prepend = self.generatePrependPath(prependDict)\n self.outdir = os.path.join(baseoutdir, prepend)\n if(\"sine\" in self.listOfBenchmarks):\n tmpsine = self.benchmarkSine()\n fifodirs.append(tmpsine)\n if(\"hotgym\" in self.listOfBenchmarks):\n tmphotgym = self.benchmarkHotGym()\n fifodirs.append(tmphotgym)\n if(\"twovars\" in self.listOfBenchmarks):\n tmptwovars = self.benchmarkTwoVars()\n fifodirs.append(tmptwovars)\n if(\"twovars2\" in self.listOfBenchmarks):\n tmptwovars2 = self.benchmarkTwoVarsSquare()\n fifodirs.append(tmptwovars2)\n if(\"threevars\" in self.listOfBenchmarks):\n tmpthreevars = self.benchmarkThreeVars()\n fifodirs.append(tmpthreevars)\n if(\"fourvars\" in self.listOfBenchmarks):\n tmpfourvars = self.benchmarkFourVars()\n fifodirs.append(tmpfourvars)\n if(\"categories\" in self.listOfBenchmarks):\n tmpcategories = self.benchmarkCategories()\n fifodirs.append(tmpcategories)\n if(\"sawtooth\" in self.listOfBenchmarks):\n tmpcategories = self.benchmarkSawtooth()\n fifodirs.append(tmpcategories)\n if(\"hotgymsc\" in self.listOfBenchmarks):\n tmphotgymsc = self.benchmarkHotGymSC()\n fifodirs.append(tmphotgymsc)\n self.outdir = baseoutdir\n self.syncFiles()\n if self.filesOnly:\n return\n if(self.maxConcurrentJobs==1):\n self.runBenchmarks = self.runBenchmarksSerial\n else:\n self.runBenchmarks = self.runBenchmarksParallel\n for iter in range(iterations):\n for exports in exportDicts:\n if(\"sine\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"sine\", exports))\n if(\"hotgym\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"hotgym\", exports))\n if(\"twovars\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"twovars\", exports))\n if(\"twovars2\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"twovars2\", exports))\n if(\"threevars\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"threevars\", exports))\n if(\"fourvars\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"fourvars\", exports))\n if(\"categories\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"categories\", exports))\n if(\"sawtooth\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"sawtooth\", exports))\n if(\"hotgymsc\" in self.listOfBenchmarks):\n assert(self.runBenchmarks(fifodirs.popleft(), \"hotgymsc\", exports))\n\n # Poll processes until they all finish.\n self.runJobs(self.maxConcurrentJobs)\n # Disabled removing the temporary directory\n if self.__trainFraction < 1.0:\n self.runProductionWorkers()\n self.waitForProductionWorkers()\n self.printResults()\n self.assertResults()",
"def run_search(outFolder, federateNumber, messageNumber, bytesNumber, updateInterval, simTime, logLevel, logFiles, uninterruptible, coreType, coreTick, coreTimeout, simulationTimeout, coSimPlatform, experimentType):\n\n df = pd.DataFrame(columns=['experiment type','co-simulation platform','core type','status','federates','messages','bytes','initialization time (cpu)','execution time (cpu)','closing time (cpu)','initialization time (wall)','execution time (wall)','closing time (wall)'])\n \n if coSimPlatform == 'FNCS':\n coSimType = 0\n elif coSimPlatform == 'HELICS':\n coSimType = 1\n else:\n print(\"ERROR: unknown Co-Simulation platform specified\")\n return df\n\n if experimentType == 'ManyToOne':\n expType = 0\n elif experimentType == 'Meshed':\n expType = 1\n else:\n print(\"ERROR: unknown Co-Simulation experiment type specified\")\n return df\n\n count = 1\n totalTestNum = len(federateNumber) * len(messageNumber) * len(bytesNumber) * len(coreType)\n for fedNum in federateNumber:\n for messNum in messageNumber:\n for bytesNum in bytesNumber:\n for coreNum in coreType: \n tempFolder = outFolder / coreNum / str('test_f_' + str(fedNum) + '_m_' + str(messNum) + '_b_' + str(bytesNum))\n if coSimType == 0:\n if expType == 0:\n create_many_to_one_experiment_fncs(tempFolder, fedNum, messNum, bytesNum, updateInterval, simTime, logLevel, logFiles)\n else:\n create_meshed_experiment_fncs(tempFolder, fedNum, fedNum-1, messNum, bytesNum, updateInterval, simTime, logLevel, logFiles)\n else:\n if expType == 0:\n create_many_to_one_experiment_helics(tempFolder, fedNum, messNum, bytesNum, updateInterval, simTime, logLevel, logFiles, uninterruptible, coreNum, coreTick, coreTimeout)\n else:\n create_meshed_experiment_helics(tempFolder, fedNum, fedNum-1, messNum, bytesNum, updateInterval, simTime, logLevel, logFiles, uninterruptible, coreNum, coreTick, coreTimeout) \n print(\"running\", coSimPlatform, \"test\", count, \"of\", totalTestNum, \"with federates=\" + str(fedNum), \"messages=\" + str(messNum), \"bytes=\" + str(bytesNum), \"core=\" + str(coreNum), \"status=\", end='', flush=True)\n result, initTime, execTime, closeTime , initTimeWall, execTimeWall, closeTimeWall = run_experiment(tempFolder, simulationTimeout) \n if result == 0:\n simStatus = 'success'\n print(colored(simStatus, 'green'), flush=True)\n elif result == 1:\n simStatus = 'failure'\n print(colored(simStatus, 'red'), flush=True)\n else:\n simStatus = 'timeout'\n print(colored(simStatus, 'yellow'), flush=True)\n\n df = df.append(pd.DataFrame([[experimentType, coSimPlatform, coreNum, simStatus, fedNum, messNum, bytesNum, initTime, execTime, closeTime, initTimeWall, execTimeWall, closeTimeWall]], columns=['experiment type','co-simulation platform','core type','status','federates','messages','bytes','initialization time (cpu)','execution time (cpu)','closing time (cpu)','initialization time (wall)','execution time (wall)','closing time (wall)']), ignore_index=True)\n count += 1\n\n # save the data from the runs\n df.to_csv(outFolder / 'data.csv', index_label='experiment')\n\n return df",
"def run_avg_results():\n\n # List of logs to be measured (tested)\n items = [\"logs_2017-06-23_14-16-00\",\n \"logs_2017-06-23_14-16-59\",\n \"logs_2017-06-23_14-17-58\",\n \"logs_2017-06-23_14-18-48\",\n \"logs_2017-06-23_14-19-39\"]\n\n results = []\n game = \"2048\"\n evals = 1000\n for item in items:\n prefix = \"C:/Users/Jan/Documents/GitHub/general-ai/Experiments/best_models_repeats/2048/MLP+ES/\"\n postfix = \"/best/best_0.json\"\n file_name = prefix + item + postfix\n logdir = prefix + item\n\n # SELECT PROPER MODEL\n model = MLP.load_from_file(file_name, game)\n # model = EchoState.load_from_file(file_name, game)\n\n # RUN MODEL\n # 2048\n result = run_2048_extended(model, evals)\n\n # MARIO\n # result = eval_mario_winrate(model=model, evals=evals, level=\"spikes\", vis_on=False)\n\n # ALHAMBRA\n # First element is result of our model (rest are original models from previous work)\n # result = eval_alhambra_avg_score(model, evals)[0]\n\n # TORCS\n # For reinforcement learning, please run model separately (tensorflow needs to be restarted)\n results.append(result)\n\n results = np.array(results)\n file_name = \"{}_stats_{}.txt\".format(game, utils.miscellaneous.get_pretty_time())\n with open(file_name, \"w\") as f:\n f.write(\"--GAME {} STATISTICS-- {} trainings of the same model\".format(game.upper(), len(items)))\n f.write(os.linesep)\n f.write(\"Model: {}\".format(model.get_name()))\n f.write(os.linesep)\n f.write(\"Total games: {} (for each model)\".format(evals))\n f.write(os.linesep)\n f.write(\"MAX TEST: {}\".format(np.max(results)))\n f.write(os.linesep)\n f.write(\"AVG TEST: {}\".format(np.mean(results)))\n f.write(os.linesep)\n f.write(\"MIN TEST: {}\".format(np.min(results)))",
"def main(argv):\n import getopt\n from random import randint\n\n separator = \"@\"\n rand_repeats = 10\n random_benchmark = False\n try:\n opts, args = getopt.getopt(argv, \"ht:r:s:z\", [\"testdir=\", \"reference=\", \"separator=\", \"random\"])\n except getopt.GetoptError:\n print(\"Usage: ./evaluate_WSD.py -t <testdir> -r <reffile> [-s <separator>] [-z]\")\n sys.exit(2)\n for opt, arg in opts:\n if opt == '-h':\n print(\"Usage: ./evaluate_WSD.py -t <testdir> -r <reffile> [-s <separator>] [-z]\")\n sys.exit()\n elif opt in (\"-t\", \"--testdir\"):\n test_dir = arg\n elif opt in (\"-r\", \"--reference\"):\n ref_file = arg\n elif opt in (\"-s\", \"--separator\"):\n separator = arg\n elif opt in (\"-z\", \"--random\"):\n random_benchmark = True\n\n ari_list = []\n vscore_list = []\n fscore_list = []\n wsd_score_list = []\n eval_files = []\n true_answers = read_answers(ref_file, separator)\n\n # compare reference to random benchmark\n if random_benchmark:\n fscore = 0\n for repeat in range(rand_repeats):\n predictions = {}\n for word in true_answers.keys():\n num_senses = len(set(true_answers[word]))\n predictions[word] = [randint(1, num_senses) for i in range(len(true_answers[word]))]\n scores = compute_metrics(true_answers, predictions)\n fscore += scores[2]\n print(\"\\nAverage fscore after {} random benchmarks: {}\\n\".format(rand_repeats, fscore / rand_repeats))\n # compare reference to test\n else:\n for test_file in os.listdir(test_dir):\n print(\"Evaluating: {}\".format(test_file))\n eval_files.append(test_file)\n predictions = read_answers(test_dir + \"/\" + test_file, separator)\n ari, vscore, fscore, punished_fscore = compute_metrics(true_answers, predictions)\n ari_list.append(ari)\n vscore_list.append(vscore)\n fscore_list.append(fscore)\n wsd_score_list.append(punished_fscore)\n print('\\n')\n\n max_ari = max(ari_list)\n ari_indexes = [i for i, j in enumerate(ari_list) if j == max_ari]\n print(\"Best ari: {} in files {}\\n\".format(max_ari, [eval_files[i] for i in ari_indexes]))\n max_vscore = max(vscore_list)\n vscore_indexes = [i for i, j in enumerate(vscore_list) if j == max_vscore]\n print(\"Best vscore: {} in files {}\\n\".format(max_vscore, [eval_files[i] for i in vscore_indexes]))\n max_fscore = max(fscore_list)\n fscore_indexes = [i for i, j in enumerate(fscore_list) if j == max_fscore]\n print(\"Best fscore: {} in files {}\\n\".format(max_fscore, [eval_files[i] for i in fscore_indexes]))\n max_wsd_score = max(wsd_score_list)\n wsd_score_indexes = [i for i, j in enumerate(wsd_score_list) if j == max_wsd_score]\n print(\"Best WSD score: {} in files {}\\n\".format(max_wsd_score, [eval_files[i] for i in wsd_score_indexes]))",
"def evaluate_on_all(w):\n if isinstance(w, dict):\n w = Embedding.from_dict(w)\n\n # Calculate results on similarity\n logger.info(\"Calculating similarity benchmarks\")\n similarity_tasks = {\n \"MEN\": fetch_MEN(),\n \"WS353\": fetch_WS353(),\n \"WS353R\": fetch_WS353(which=\"relatedness\"),\n \"WS353S\": fetch_WS353(which=\"similarity\"),\n \"SimLex999\": fetch_SimLex999(),\n \"RW\": fetch_RW(),\n \"RG65\": fetch_RG65(),\n \"MTurk\": fetch_MTurk(),\n \"TR9856\": fetch_TR9856(),\n }\n\n similarity_results = {}\n\n for name, data in iteritems(similarity_tasks):\n similarity_results[name] = evaluate_similarity(w, data.X, data.y)\n logger.info(\"Spearman correlation of scores on {} {}\".format(name, similarity_results[name]))\n\n # Calculate results on analogy\n logger.info(\"Calculating analogy benchmarks\")\n analogy_tasks = {\n \"Google\": fetch_google_analogy(),\n \"MSR\": fetch_msr_analogy()\n }\n\n analogy_results = {}\n\n for name, data in iteritems(analogy_tasks):\n analogy_results[name] = evaluate_analogy(w, data.X, data.y)\n logger.info(\"Analogy prediction accuracy on {} {}\".format(name, analogy_results[name]))\n\n analogy_results[\"SemEval2012_2\"] = evaluate_on_semeval_2012_2(w)['all']\n logger.info(\"Analogy prediction accuracy on {} {}\".format(\"SemEval2012\", analogy_results[\"SemEval2012_2\"]))\n\n # Calculate results on categorization\n logger.info(\"Calculating categorization benchmarks\")\n categorization_tasks = {\n \"AP\": fetch_AP(),\n \"BLESS\": fetch_BLESS(),\n \"Battig\": fetch_battig(),\n \"ESSLI_2c\": fetch_ESSLI_2c(),\n \"ESSLI_2b\": fetch_ESSLI_2b(),\n \"ESSLI_1a\": fetch_ESSLI_1a()\n }\n\n categorization_results = {}\n\n # Calculate results using helper function\n for name, data in iteritems(categorization_tasks):\n categorization_results[name] = evaluate_categorization(w, data.X, data.y)\n logger.info(\"Cluster purity on {} {}\".format(name, categorization_results[name]))\n\n # Construct pd table\n cat = pd.DataFrame([categorization_results])\n analogy = pd.DataFrame([analogy_results])\n sim = pd.DataFrame([similarity_results])\n results = cat.join(sim).join(analogy)\n\n return results",
"def compare_search(sizes=[1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7]):\n\tres = []\n\tfor k in sizes:\n\t\tlist = []\n\t\tn = k\n\t\twhile n > 0:\n\t\t\ta = 0\n\t\t\tlist.append(a)\n\t\t\ta = a+1\n\t\t\tn = n-1\n\t\t\tprint(list)\n\t\tl_st=time_search(linear_search,list,-1)\n\t\tb_st=time_search(binary_search,list,-1)\n\t\tres.append((k,l_st,b_st))\n\t\tlist =[]\n\n\n\n\treturn res",
"def performance_tester(home, work_dir, use_old_opt):\n build_dir = 'out'\n test_dir = 'test'\n\n work_dir = os.path.abspath(work_dir)\n\n #find the mechanisms to test\n mechanism_list = {}\n if not os.path.exists(work_dir):\n print ('Error: work directory {} for '.format(work_dir) +\n 'performance testing not found, exiting...')\n sys.exit(-1)\n for name in os.listdir(work_dir):\n if os.path.isdir(os.path.join(work_dir, name)):\n #check for cti\n files = [f for f in os.listdir(os.path.join(work_dir, name)) if\n os.path.isfile(os.path.join(work_dir, name, f))]\n for f in files:\n if f.endswith('.cti'):\n mechanism_list[name] = {}\n mechanism_list[name]['mech'] = f\n mechanism_list[name]['chemkin'] = f.replace('.cti', '.dat')\n gas = ct.Solution(os.path.join(work_dir, name, f))\n mechanism_list[name]['ns'] = gas.n_species\n\n thermo = next((tf for tf in files if 'therm' in tf), None)\n if thermo is not None:\n mechanism_list[name]['thermo'] = thermo\n\n if len(mechanism_list) == 0:\n print('No mechanisms found for performance testing in '\n '{}, exiting...'.format(work_dir)\n )\n sys.exit(-1)\n\n if os.getenv('TCHEM_HOME'):\n tchem_home = os.getenv('TCHEM_HOME')\n else:\n raise SystemError('TCHEM_HOME environment variable not set.')\n\n cpu_repeats = 10\n gpu_repeats = 10\n\n def false_factory():\n return False\n\n import multiprocessing #for cpu count\n max_cpu = multiprocessing.cpu_count()\n num_threads = [1]\n while num_threads < max_cpu:\n num_threads.append(min(max_cpu, num_threads[-1] * 2))\n c_params = {'lang' : 'c',\n 'cache_opt' : [False],\n 'finite_diffs' : [False, True],\n 'num_threads' : num_threads\n }\n\n #check that nvcc installed\n cuda_params = {}\n try:\n subprocess.check_call(['nvcc', '--version'])\n #if we have NVCC, assume we can execute CUDA\n cuda_params = {'lang' : 'cuda',\n 'cache_opt' : [False],\n 'shared' : [False, True],\n 'finite_diffs' : [False, True]\n }\n except OSError:\n #otherwise simply skip cuda\n pass\n #tchem seems not to be openmp parallelizable, nor do we care\n tchem_params = {'lang' : 'tchem',\n 'num_threads' : [1]}\n\n for mech_name, mech_info in sorted(mechanism_list.items(),\n key=lambda x:x[1]['ns']\n ):\n #get the cantera object\n gas = ct.Solution(os.path.join(work_dir, mech_name, mech_info['mech']))\n pmod = any([is_pdep(rxn) for rxn in gas.reactions()])\n\n #ensure directory structure is valid\n os.chdir(os.path.join(work_dir, mech_name))\n subprocess.check_call(['mkdir', '-p', build_dir])\n subprocess.check_call(['mkdir', '-p', test_dir])\n\n num_conditions = 0\n npy_files = [f for f in os.listdir(os.path.join(work_dir, mech_name))\n if f.endswith('.npy')\n and os.path.isfile(f)]\n data = None\n with open('data.bin', 'wb') as file:\n #load PaSR data for different pressures/conditions,\n # and save to binary C file\n for npy in sorted(npy_files):\n state_data = np.load(npy)\n state_data = state_data.reshape(state_data.shape[0] *\n state_data.shape[1],\n state_data.shape[2]\n )\n if data is None:\n data = state_data\n else:\n data = np.vstack((data, state_data))\n num_conditions += state_data.shape[0]\n print(num_conditions, data.shape)\n if num_conditions == 0:\n print('No data found in folder {}, continuing...'.format(mech_name))\n continue\n data.tofile(file)\n\n #figure out gpu steps\n step_size = 1\n steplist = []\n while step_size < num_conditions:\n steplist.append(step_size)\n step_size *= 2\n if step_size / 2 != num_conditions:\n steplist.append(num_conditions)\n\n the_path = os.getcwd()\n first_run = True\n op = OptionLoop(c_params, false_factory)\n op = op + OptionLoop(cuda_params, false_factory)\n op = op + OptionLoop(tchem_params, false_factory)\n\n haveOpt = False\n if os.path.isfile(os.path.join(os.getcwd(),\n build_dir, 'optimized.pickle')\n ):\n haveOpt = True\n\n for state in op:\n lang = state['lang']\n temp_lang = 'c' if lang != 'cuda' else 'cuda'\n FD = state['finite_diffs']\n if FD:\n filename = 'fd_jacob{}'.format(utils.file_ext[temp_lang])\n shutil.copy(os.path.join(home, filename),\n os.path.join(build_dir, filename)\n )\n\n opt = state['cache_opt']\n smem = state['shared']\n\n #handle threading\n num_threads = -1\n if 'num_threads' in state:\n num_threads = state['num_threads']\n\n\n if any([isinstance(rxn, ct.PlogReaction) or\n isinstance(rxn, ct.ChebyshevReaction) for rxn in gas.reactions()\n ]) and lang == 'tchem':\n print('TChem performance evaluation disabled; '\n 'not compatible with Plog or Chebyshev reactions.'\n )\n continue\n\n data_output = ('{}_{}_{}_{}_{}'.format(lang, 'co' if opt else 'nco',\n 'smem' if smem else 'nosmem',\n 'fd' if FD else 'ajac',\n num_threads\n ) +\n '_output.txt'\n )\n\n data_output = os.path.join(the_path, data_output)\n if lang != 'cuda':\n repeats = cpu_repeats\n num_completed = check_file(data_output)\n todo = {num_conditions: repeats - num_completed}\n else:\n repeats = gpu_repeats\n todo = check_step_file(data_output, steplist)\n for x in todo:\n todo[x] = repeats - todo[x]\n if not any(todo[x] > 0 for x in todo):\n continue\n\n if opt and haveOpt and not use_old_opt:\n raise Exception('Previous optimization file found... exiting')\n\n if lang != 'tchem':\n create_jacobian(lang, mech_info['mech'],\n optimize_cache=opt,\n build_path=build_dir,\n no_shared=not smem,\n num_blocks=8, num_threads=64,\n multi_thread=multiprocessing.cpu_count()\n )\n\n #now we need to write the reader\n filename = ('read_initial_conditions'\n '{}'.format(utils.file_ext[temp_lang])\n )\n shutil.copy(os.path.join(home, filename),\n os.path.join(os.getcwd(), build_dir, filename)\n )\n\n #write the tester\n file_data = {'datafile' : os.path.join(the_path, 'data.bin')}\n if lang == 'c' or lang == 'cuda':\n filename = 'tester{}.in'.format(utils.file_ext[temp_lang])\n with open(os.path.join(home, filename), 'r') as file:\n src = Template(file.read())\n src = src.substitute(file_data)\n else:\n file_data['mechfile'] = mech_info['chemkin']\n if 'thermo' in mech_info:\n file_data['thermofile'] = mech_info['thermo']\n else:\n #it's the same file\n file_data['thermofile'] = mech_info['chemkin']\n with open(os.path.join(home,\n 'tc_tester.c.in'), 'r') as file:\n src = Template(file.read())\n src = src.substitute(file_data)\n filename = 'test{}'.format(utils.file_ext[temp_lang])\n with open(os.path.join(build_dir, filename), 'w') as file:\n file.write(src)\n\n #copy timer\n shutil.copy(os.path.join(home, 'timer.h'),\n os.path.join(os.getcwd(), build_dir, 'timer.h')\n )\n\n #get file lists\n i_dirs = [build_dir]\n files = ['test', 'read_initial_conditions']\n\n lib = None\n #now build the library\n if lang != 'tchem':\n lib = generate_library(lang, build_dir, test_dir,\n finite_difference=FD, shared=not STATIC\n )\n\n lib = os.path.normpath(lib)\n lib = (lib[lib.index('lib') +\n len('lib'):lib.index('.so' if not STATIC else '.a')]\n )\n else:\n files += ['mechanism', 'mass_mole']\n\n # Compile generated source code\n structs = [file_struct(lang, temp_lang, f, i_dirs,\n (['-DFINITE_DIFF'] if FD else []),\n build_dir, test_dir, not STATIC\n ) for f in files\n ]\n if lang != 'cuda':\n for s in structs:\n s.args.append('-fopenmp')\n\n pool = multiprocessing.Pool()\n results = pool.map(compiler, structs)\n pool.close()\n pool.join()\n if any(r == -1 for r in results):\n sys.exit(-1)\n\n linker(lang, temp_lang, test_dir, files, lib)\n\n if lang == 'tchem':\n #copy periodic table and mechanisms in\n shutil.copy(os.path.join(tchem_home, 'data',\n 'periodictable.dat'),\n 'periodictable.dat'\n )\n\n with open(data_output, 'a+') as file:\n for stepsize in todo:\n for i in range(todo[stepsize]):\n print(i, \"/\", todo[stepsize])\n subprocess.check_call(\n [os.path.join(the_path,\n test_dir, 'speedtest'),\n str(stepsize), str(num_threads)], stdout=file\n )",
"def compute_files(user1, user2, file_list, dir_pre, start_num):\n match_total = 0\n test_total = 0\n gold_total = 0\n for fi in file_list:\n file1 = dir_pre + user1 + \"/\" + fi + \".txt\"\n file2 = dir_pre + user2 + \"/\" + fi + \".txt\"\n if not os.path.exists(file1):\n print(\"*********Error: \", file1, \"does not exist*********\", file=ERROR_LOG)\n return -1.00\n if not os.path.exists(file2):\n print(\"*********Error: \", file2, \"does not exist*********\", file=ERROR_LOG)\n return -1.00\n try:\n file1_h = open(file1, \"r\")\n file2_h = open(file2, \"r\")\n except IOError:\n print(\"Cannot open the files\", file1, file2, file=ERROR_LOG)\n break\n cur_amr1 = amr.AMR.get_amr_line(file1_h)\n cur_amr2 = amr.AMR.get_amr_line(file2_h)\n if cur_amr1 == \"\":\n print(\"AMR 1 is empty\", file=ERROR_LOG)\n continue\n if cur_amr2 == \"\":\n print(\"AMR 2 is empty\", file=ERROR_LOG)\n continue\n amr1 = amr.AMR.parse_AMR_line(cur_amr1)\n amr2 = amr.AMR.parse_AMR_line(cur_amr2)\n test_label = \"a\"\n gold_label = \"b\"\n amr1.rename_node(test_label)\n amr2.rename_node(gold_label)\n (test_inst, test_rel1, test_rel2) = amr1.get_triples()\n (gold_inst, gold_rel1, gold_rel2) = amr2.get_triples()\n if verbose:\n print(\"Instance triples of file 1:\", len(test_inst), file=DEBUG_LOG)\n print(test_inst, file=DEBUG_LOG)\n print(\"Attribute triples of file 1:\", len(test_rel1), file=DEBUG_LOG)\n print(test_rel1, file=DEBUG_LOG)\n print(\"Relation triples of file 1:\", len(test_rel2), file=DEBUG_LOG)\n print(test_rel2, file=DEBUG_LOG)\n print(\"Instance triples of file 2:\", len(gold_inst), file=DEBUG_LOG)\n print(gold_inst, file=DEBUG_LOG)\n print(\"Attribute triples of file 2:\", len(gold_rel1), file=DEBUG_LOG)\n print(gold_rel1, file=DEBUG_LOG)\n print(\"Relation triples of file 2:\", len(gold_rel2), file=DEBUG_LOG)\n print(gold_rel2, file=DEBUG_LOG)\n (best_match, best_match_num) = smatch.get_best_match(test_inst, test_rel1, test_rel2,\n gold_inst, gold_rel1, gold_rel2,\n test_label, gold_label)\n if verbose:\n print(\"best match number\", best_match_num, file=DEBUG_LOG)\n print(\"Best Match:\", smatch.print_alignment(best_match, test_inst, gold_inst), file=DEBUG_LOG)\n match_total += best_match_num\n test_total += (len(test_inst) + len(test_rel1) + len(test_rel2))\n gold_total += (len(gold_inst) + len(gold_rel1) + len(gold_rel2))\n smatch.match_triple_dict.clear()\n (precision, recall, f_score) = smatch.compute_f(match_total, test_total, gold_total)\n return \"%.2f\" % f_score",
"def main():\n\tpool = Pool(params.num_processes, init_worker)\n\tresults = [pool.apply_async(harmony_search) for i in xrange(params.num_iterations)]\n\tpool.close() #no more tasks will be submitted to the pool\n\tpool.join() #wait for all tasks to finish before moving on\n\t\n\t#find best harmony from all iterations and output\n\tbest_harmony = None\n\tbest_fitness = float('-inf') if params.maximize else float('+inf')\n\tfor result in results:\n\t\tharmony = result.get() #multiprocessing.pool.AsyncResult is returned for each process, so we need to call get() to pull out the value\n\t\tif (params.maximize and harmony[-1] > best_fitness) or (not params.maximize and harmony[-1] < best_fitness):\n\t\t\tbest_harmony = harmony\n\t\t\tbest_fitness = harmony[-1]\n\tprint best_harmony[:-1], best_fitness",
"def completion_test(self, topk=3):\n self.test_log('test phase is beginning...')\n start_time = time.time()\n self.tt_correct_count = 0.0\n self.nt_correct_count = 0.0\n self.topk_nt_correct_count = 0.0\n self.topk_tt_correct_count = 0.0\n self.identifier_correct_count = 0 # 用以计算对Identifier的预测准确率\n self.identifier_incorrect_count = 0\n self.identifier_accu_list = []\n test_times = 10000\n test_step = 0\n self.generator = DataGenerator()\n sub_data_generator = self.generator.get_test_subset_data()\n\n for index, subset_test_data in sub_data_generator: # 遍历每个sub test dataset\n one_test_start_time = time.time()\n for token_sequence in subset_test_data: # 遍历该subset中每个nt token sequence\n test_step += 1\n # 对一个ast sequence进行test\n self.query(token_sequence, topk=topk)\n if test_step % show_every_n == 0:\n one_test_end_time = time.time()\n duration = (one_test_end_time - one_test_start_time) / show_every_n\n one_test_start_time = one_test_end_time\n identifier_accu = (self.identifier_correct_count+1) / (self.identifier_correct_count + self.identifier_incorrect_count+1)\n self.identifier_accu_list.append(identifier_accu)\n nt_accu = self.nt_correct_count / test_step\n tt_accu = self.tt_correct_count / test_step\n topk_nt_accu = self.topk_nt_correct_count / test_step\n topk_tt_accu = self.topk_tt_correct_count / test_step\n log_info = 'test step:{} '.format(test_step) + \\\n 'nt_accuracy:{:.2f}% '.format(nt_accu * 100) + \\\n 'tt_accuracy:{:.2f}% '.format(tt_accu * 100) + \\\n 'nt_top{}_accuracy:{:.2f}% '.format(define_topk, topk_nt_accu * 100) + \\\n 'tt_top{}_accuracy:{:.2f}% '.format(define_topk, topk_tt_accu * 100) + \\\n 'identifier accuracy:{:.2f}% '.format(identifier_accu * 100) + \\\n 'average time cost:{:.2f}s '.format(duration)\n self.test_log(log_info)\n\n if test_step >= test_times:\n break\n\n nt_accuracy = self.nt_correct_count / test_step\n tt_accuracy = self.tt_correct_count / test_step\n topk_nt_accu = self.topk_nt_correct_count / test_step\n topk_tt_accu = self.topk_tt_correct_count / test_step\n end_time = time.time()\n log_info = '{}th subset of test data '.format(index) + \\\n 'there are {} nt_sequence to test '.format(test_step) + \\\n 'accuracy of non-terminal token: {:.2f}% '.format(nt_accuracy * 100) + \\\n 'accuracy of terminal token: {:.2f}% '.format(tt_accuracy * 100) + \\\n 'top{} accuracy of non-terminal:{:.2f}% '.format(define_topk, topk_nt_accu * 100) + \\\n 'top{} accuracy of terminal:{:.2f}% '.format(define_topk, topk_tt_accu * 100) + \\\n 'total time cost of this subset: {:.2f}s '.format(end_time - start_time) + \\\n 'average time cost per case: {:.2f}s '.format((end_time - start_time) / test_step)\n self.test_log(log_info)\n\n file = open('identifier_prediction_accu.pkl', 'wb')\n pickle.dump(self.identifier_accu_list, file)",
"def run(n_processors=4):\n\n # 1) define simulation hyperparameters\n name = \"Data/australia9\" # name of simulation, used as prefix of all associated input filenames\n prefix = \"Data/australia9.3\" # name of simulation, used as prefix of all associated output filenames\n\n agents = 16 # number of agents to use in simulation\n iterations = 120 # number of iterations to run each simulation\n simulations = 100 # number of simulations to run\n sigma_n = 0.1 # sampling noise std. dev. on hifi data (should match distribution's generational parameter)\n console = False # boolean indicating if intermediate output should print to console\n log = True # boolean indicating if output should be logged to CSV for performance analysis\n # plotter = Plotter([-eps, 1 + eps, -eps, 1 + eps]) # x_min, x_max, y_min, y_max\n plotter = None # do not plot\n np.random.seed(1234) # seed random generator for reproducibility\n\n algorithms = [\"todescato_nsf\",\n \"choi_nsf\",\n \"todescato_hsf\",\n \"choi_hsf\",\n \"todescato_hmf\",\n \"choi_hmf\",\n # \"periodic_nsf\", \"periodic_hsf\", \"periodic_hmf\",\n \"lloyd\"]\n\n # 2) load distributional data\n truth = pd.read_csv(f\"{name}_hifi.csv\") # CSV specifying ground truth (x,y,z=f(x,y)) triples\n mf_hyp = pd.read_csv(f\"{name}_mf_hyp.csv\") # CSV specifying multi-fidelity GP hyperparameters\n sf_hyp = pd.read_csv(f\"{name}_sf_hyp.csv\") # CSV specifying single-fidelity GP hyperparameters\n null_prior = pd.read_csv(\"Data/null_prior.csv\") # Use a null prior\n human_prior = pd.read_csv(f\"{name}_prior.csv\") # CSV specifying prior to condition GP upon before simulation\n\n # 3) run each algorithm sequentially, repeating \"simulations\" times with multiprocessing\n for algo in algorithms:\n\n print(slash_break + f\"Start Algorithm : {algo}\" + slash_break)\n algo_start = time.time()\n\n out_name = f\"{prefix}_{algo}\"\n loss_log, agent_log, sample_log = [], [], [] # reset logging lists for this algo\n\n # 4) select hyperparameters for this algorithm\n if \"mf\" in algo:\n hyp = mf_hyp\n else:\n hyp = sf_hyp\n\n # 5) select prior for this algorithm\n if \"_n\" in algo:\n prior = null_prior\n else:\n prior = human_prior\n\n # 6) configure arguments to pass to simulation\n args = [(out_name, algo, sim_num, iterations, agents, truth, sigma_n, prior, hyp, console, plotter, log)\n for sim_num in range(simulations)]\n\n # 7) pool and map simulations on all processors\n if n_processors > 1:\n with Pool(processes=n_processors) as pool:\n out = pool.map(run_sim, args)\n else:\n out = []\n for arg in args:\n out.append(run_sim(arg))\n\n # 8) reconstruct return data from multiprocessing\n for sim_num in range(simulations):\n loss_log.extend(out[sim_num][0]) # 0th element in each tuple is loss_log_t\n agent_log.extend(out[sim_num][1]) # 1st element in each tuple is agent_log_t\n sample_log.extend(out[sim_num][2]) # 2nd element in each tuple is sample_log_t\n\n # 9) save dataframes from simulation results for post-analysis\n if log:\n loss_df = pd.DataFrame(loss_log)\n loss_df.to_csv(f\"{out_name}_loss.csv\")\n agent_df = pd.DataFrame(agent_log)\n agent_df.to_csv(f\"{out_name}_agent.csv\")\n sample_df = pd.DataFrame(sample_log)\n sample_df.to_csv(f\"{out_name}_sample.csv\")\n\n algo_end = time.time()\n print(slash_break + f\"End Algorithm : {algo}\\n\"\n f\"Time : {algo_end - algo_start}\\n\"\n f\"Time/Sim : {(algo_end - algo_start) / simulations}\" + slash_break)",
"def time_it(input_list):\n for i in range(501):\n start = time.time()\n radix_sort(input_list)\n time_passed = time.time() - start\n avg_time = time_passed / 500\n return avg_time",
"def get_result(correct_chunks, true_chunks, pred_chunks,\n correct_counts, true_counts, pred_counts, verbose=True):\n # sum counts\n sum_correct_chunks = sum(correct_chunks.values())\n sum_true_chunks = sum(true_chunks.values())\n sum_pred_chunks = sum(pred_chunks.values())\n\n sum_correct_counts = sum(correct_counts.values())\n sum_true_counts = sum(true_counts.values())\n\n nonO_correct_counts = sum(v for k, v in correct_counts.items() if k != 'O')\n nonO_true_counts = sum(v for k, v in true_counts.items() if k != 'O')\n\n chunk_types = sorted(list(set(list(true_chunks) + list(pred_chunks))))\n\n # compute overall precision, recall and FB1 (default values are 0.0)\n prec, rec, f1 = calc_metrics(sum_correct_chunks, sum_pred_chunks, sum_true_chunks)\n res = (prec, rec, f1)\n if not verbose:\n return res\n\n # print overall performance, and performance per chunk type\n \n #print(\"processed %i tokens with %i phrases; \" % (sum_true_counts, sum_true_chunks), end='')\n #print(\"found: %i phrases; correct: %i.\\n\" % (sum_pred_chunks, sum_correct_chunks), end='')\n \n #print(\"accuracy: %6.2f%%; (non-O)\" % (100*nonO_correct_counts/nonO_true_counts))\n #print(\"accuracy: %6.2f%%; \" % (100*sum_correct_counts/sum_true_counts), end='')\n #print(\"precision: %6.2f%%; recall: %6.2f%%; FB1: %6.2f\" % (prec, rec, f1))\n\n # for each chunk type, compute precision, recall and FB1 (default values are 0.0)\n for t in chunk_types:\n prec, rec, f1 = calc_metrics(correct_chunks[t], pred_chunks[t], true_chunks[t])\n #print(\"%17s: \" %t , end='')\n #print(\"precision: %6.2f%%; recall: %6.2f%%; FB1: %6.2f\" %\n # (prec, rec, f1), end='')\n #print(\" %d\" % pred_chunks[t])\n\n return res\n # you can generate LaTeX output for tables like in\n # http://cnts.uia.ac.be/conll2003/ner/example.tex\n # but I'm not implementing this"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Make list of lists of pixels, each inner list is a row in original picture. If ratio is specified, shrink the picture by it and return smaller one.
|
def make_list(filename, ratio):
f = filename
make_smaller = ratio
if not f.endswith(".ppm"):
f += ".ppm"
(width, height), data = read_file(f)
width, height = int(width.strip("b'")), int(height.strip("b'").strip("\\\\n"))
final_list = []
final_small = []
print(width, height)
data = grouper(3, data)
for i, line in enumerate(range(height)):
line_out = []
current_line = data[line*width:(line+1)*width]
for j, pixel in enumerate(current_line):
#print(pixel)
line_out.append(Pixel(pixel, (i, j)))
final_list.append(line_out)
if make_smaller:
if len(make_smaller) == 1:
ratio_x = int(make_smaller[0])
ratio_y = ratio_x
elif len(make_smaller) == 2:
ratio_x = int(make_smaller[0])
ratio_y = int(make_smaller[1])
else:
raise ZeroDivisionError("One or two argumets only!")
for x in range(ceil(height/ratio_x)):
small_line = []
for y in range(ceil(width/ratio_y)):
a = BigPixel()
small_line.append(a)
final_small.append(small_line)
for line in final_list:
for pixel in line:
x, y = pixel.get_big_coord(ratio_x, ratio_y)
final_small[x][y] += pixel
return final_small
return final_list
|
[
"def split_image(pixels, corner, square_size):\n opposite_corner = (corner[0] + square_size, corner[1] + square_size)\n\n square_rows = pixels[corner[0]:opposite_corner[0]]\n square = []\n for row in square_rows:\n square.append(row[corner[1]:opposite_corner[1]])\n\n return square",
"def get_image_cells(pixels, width, sq_width, sq_height):\n\n height = int(ceil(len(pixels) / width))\n\n new_height = int(ceil(height / sq_height))\n new_width = int(ceil(width / sq_width))\n for top in range(0, new_height*sq_height, sq_height):\n for left in range(0, new_width*sq_width, sq_width):\n\n if (top == (new_height*sq_height)-sq_height):\n sh = height-top\n else:\n sh = sq_height\n\n if (left == (new_width*sq_width)-sq_width):\n sw = width-left\n else:\n sw = sq_width\n\n rows = []\n for i in range(sh):\n rows.extend(pixels[((top + i) * width) + left : ((top + i) * width) + (left + sw)])\n yield rows",
"def reshapeToPixels(pix):\n a = diff(pix)\n b = ones(len(pix)-1)\n if len(pix) == N**2: return pix.reshape(N,N).T\n elif sum(abs(a-b))==0:\n # if pix contains something in the top row...\n #i = arange(len(pix)/2, dtype=int)\n #one = pix[2*i]\n #two = pix[2*i+1]\n #done = vstack((one,two))\n done = reshape(pix, (-1,N)).T\n # make the rows the proper length\n return done\n else:\n a = diff(pix) - 1\n pixelsInLine = nonzero(a)[0][0] + 1\n i = arange(len(pix)/pixelsInLine, dtype=int)\n\n done = zeros(len(pix)/pixelsInLine)\n for j in arange(pixelsInLine):\n part = pix[pixelsInLine*i + j]\n done = vstack((done, part))\n\n done = done[1:]\n return done",
"def get_image_cells_with_width(pixels, width, sq_width, sq_height):\n\n height = int(ceil(len(pixels) / width))\n\n new_height = int(ceil(height / sq_height))\n new_width = int(ceil(width / sq_width))\n for top in range(0, new_height*sq_height, sq_height):\n for left in range(0, new_width*sq_width, sq_width):\n\n if (top == (new_height*sq_height)-sq_height):\n sh = height-top\n else:\n sh = sq_height\n\n if (left == (new_width*sq_width)-sq_width):\n sw = width-left\n else:\n sw = sq_width\n\n rows = []\n for i in range(sh):\n rows.append(pixels[((top + i) * width) + left : ((top + i) * width) + (left + sw)])\n yield rows",
"def convert_pixels(filename):\n img = Image.open(filename)\n \n pixels = list(img.getdata())\n # Size of 3000x2500 pixel image\n # Larger images takes too much memory, will not convert into pixels\n if len(pixels) > 7500000:\n return [None, None, None]\n\n if (isinstance(pixels[0], tuple)):\n # RGB, convert to lists\n pixels = [list(i[0:3]) for i in pixels]\n else:\n # grayscale, spoof RGB\n pixels = [[pixel,pixel,pixel] for pixel in pixels]\n \n width, height = img.size\n \n return [pixels, width, height]",
"def splitImage(image, size):\n W, H = image.size[0], image.size[1] \n m, n = size \n w, h = int(W/n), int(H/m) \n\n # image list \n imgs = [] \n \n for j in range(m): \n for i in range(n): \n \n imgs.append(image.crop((i*w, j*h, (i+1)*w, (j+1)*h))) \n return imgs",
"def split_board(img) -> List[Image]:\n # TODO use https://stackoverflow.com/questions/16856788/slice-2d-array-into-smaller-2d-arrays\n arr = []\n sq_len = int(img.shape[0] / 8)\n for i in range(8):\n for j in range(8):\n image = img[i * sq_len: (i + 1) * sq_len, j * sq_len: (j + 1) * sq_len]\n arr.append(cv_to_pil(image))\n\n return arr",
"def crop(image, tl_row, tl_col, target_size):\n rows = image.size()[0]\n cols = image.size()[1]\n x = [[[image.pixels[i][j][k] for k in range(cols) \\\n if ((j >= tl_row) and (j <= tl_row + target_size[0])) and \\\n ((k >= tl_col) and (k <= tl_col + target_size[1]))]\n for j in range(rows)] for i in range(len(image.pixels))]\n my_x = list(map(lambda lst: list(filter(lambda val: \\\n True if len(val) > 0 else False, lst)), x))\n return RGBImage(my_x)",
"def _resize_images(images: List) -> List:\n return list(\n map(\n lambda i: i.resize((64, 64)),\n images\n )\n )",
"def split_image(image, n = 8):\r\n\r\n if (image.size[0] % n != 0) or (image.size[1] % n != 0):\r\n print(\"Image dimensions are not divisible by\", n)\r\n return\r\n\r\n pieces = []\r\n\r\n for j in range(0, image.size[1], n):\r\n # Iterae through each row of image\r\n row = []\r\n \r\n for i in range(0, image.size[0], n):\r\n #Iterate through each element in row\r\n row.append(image.crop((i, j, i+n, j+n)))\r\n \r\n pieces.append(row)\r\n \r\n return pieces",
"def splitImage(numRows, numCols, image, piece_dims=(32,32,3)):\n piece_height, piece_width, piece_depth = piece_dims\n # large_width, large_height, large_depth = numRows * piece_height, numCols * piece_width, piece_depth\n # # resized_img = np.array(scipy.misc.imresize(image, (large_width, large_height, large_depth), interp='nearest'))\n # resized_img = np.array(resize(image, (large_width, large_height, large_depth), \n # preserve_range=True, mode='reflect')).astype(dtype=np.uint8)\n resized_img = image\n updated_pieced_dims = (piece_height + JIGGLE_ROOM, piece_width + JIGGLE_ROOM, piece_depth)\n #print(np.shape(image))\n hsplits = np.array(np.split(resized_img, numCols, axis=1))\n vsplits = np.array(np.split(hsplits, numRows, axis=1)) # Not 1 since we introduce one more dim.\n split_images = vsplits.reshape(-1, *updated_pieced_dims)\n #jiggled_imgs = []\n #for image in split_images:\n # x_start = np.random.randint(0, JIGGLE_ROOM, 1)[0]\n # y_start = np.random.randint(0, JIGGLE_ROOM, 1)[0]\n # jiggled_imgs.append(image[x_start:(x_start + piece_height), y_start:(y_start + piece_width) , :])\n #gc.collect()\n return split_images #jiggled_imgs",
"def prepare_list(self, list_images, size = 1000):\n batch = []\n list_batches = []\n counter = 0\n for item, img in enumerate(list_images):\n batch.append(img)\n counter += 1\n if counter >= size:\n counter = 0\n list_batches.append(batch)\n batch = []\n elif item == len(list_images) - 1:\n list_batches.append(batch)\n return list_batches",
"def pixels_to_tiles(p):\n return tuple(x / TILE_SIZE for x in p)",
"def crop_image_by_ratio(image, ratio):\n (height, width) = image.shape\n up = int(height*ratio[0])\n down = int(height*ratio[1])\n left = int(width*ratio[2])\n right = int(width*ratio[3])\n return image[up:down,left:right]",
"def splitHeightmap(heightmap):\n img = heightmap.img\n size = n.array(heightmap.size)\n fac = n.array([0.8,0.8])\n sub = n.floor(size[::-1]*fac).astype(int)\n sub1 = img[:sub[0], :sub[1]]\n sub2 = img[:sub[0], -sub[1]:]\n sub3 = img[-sub[0]:,-sub[1]:]\n sub4 = img[-sub[0]:,:sub[1]]\n subs = [sub1, sub2, sub3, sub4]\n hs = [Heightmap(s, sub[::-1], heightmap.step) for s in subs]\n return hs",
"def stitch_images(pieces, mode = 'L'):\r\n\r\n # width and height of image pieces\r\n w, h = pieces[0][0].size\r\n\r\n # width and height of resulting image\r\n width = len(pieces[0]) * w\r\n height = len(pieces) * h\r\n\r\n # create new image\r\n image = Image.new(mode, (width, height))\r\n\r\n for j in range(int(height/h)):\r\n # Iterate through each row\r\n for i in range(int(width/w)):\r\n # Iterate through each piece in row\r\n image.paste(pieces[j][i], (i*w, j*h))\r\n \r\n return image",
"def matrix_to_pixels(matrix, mode = 'L'):\r\n\r\n # 1D array containg data\r\n data = []\r\n # Image\r\n image = Image.new(mode, (len(matrix), len(matrix[0])))\r\n\r\n for i in range(len(matrix)):\r\n # Iterate through each row\r\n for j in range(len(matrix[0])):\r\n # Iterate through each element in row\r\n data.append(matrix[j][i])\r\n \r\n image.putdata(data)\r\n \r\n return image",
"def main_shrink_resolution():\n img = cv2.imread(IMAGE_GRAY)\n images = [(n, shrink_resolution(img, n)) for n in (3,5,7,20,100)]\n show_images(images)",
"def get_crops(image, xs, ys, win):\n batch_size = len(xs)\n data = np.ones((batch_size,win*win*3),dtype=np.single)\n count = 0\n for x, y in zip(xs,ys):\n xmin = x-win/2\n xmax = x+win/2\n ymin = y-win/2\n ymax = y+win/2\n temp = image[xmin:xmax,ymin:ymax,:]\n temp2 = misc.imresize(temp,(224,224))\n data[count,:] = vectorize(temp2)\n count += 1\n return np.transpose(data)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a short string summary of this Sequence
|
def summary(self) -> str:
if self.id:
id = self.id
else:
if len(self) <= 20:
id = "Sequence={}".format(str(self))
else:
id = "Sequence"
return "{};\n Alphabet={};\n Length={};\n Parent={};\n Type={}".format(
id, self.alphabet.name, len(self), repr(self.parent), self.sequence_type
)
|
[
"def summary(self) -> str:",
"def summary_func(self):\n return('The description of this major is' + ': ' + self.summ)",
"def summary(self):\n\n name='name:%s'%self.getName()\n damage='dégat:%s'%self.getDamage()\n ammos='munitions:%s'%self.getAmmos()\n return '\\n'.join([name, damage, ammos])",
"def summarystring(self, verbose=False):\n summarystring = \"\"\n summarystring += \"N=\" + str(self.n) + \" K=\" + str(self.k) + linesep\n for node in self.nodes:\n summarystring += str(node) + linesep\n summarystring += linesep\n summarystring += \"Basin:\" + linesep\n summarystring += self.basinstring() + linesep\n summarystring += \"Attractor:\" + linesep\n summarystring += self.attractorstring() + linesep\n summarystring += \"Check: next state would be...\" + linesep\n summarystring += str([RBN._nextstate(node, change_state=False) for node in self.nodes]) + linesep\n return summarystring",
"def summary(self):\n tpl = \"\\n\".join(\n [\n \"{} summary\",\n \"=\" * (len(self.name) + len(\" summary\")),\n \"- nodes: {}\",\n \"- edges: {}\",\n ]\n )\n return tpl.format(self.name, self.number_of_nodes(), self.number_of_edges())",
"def __str__(self):\n s = \"{0:15s} {1:30s}\".format(self.type, self.name)\n if (self.quantity):\n s += \" {0:10s}\".format(str(self.quantity))\n if (self.pct):\n s += \" ({0:5.1f}%)\".format(self.pct)\n if (len(self.properties) > 0):\n prop_strs = []\n for e in sorted(self.properties.keys()):\n prop_strs.append(self.properties[e].short_str())\n s += \" (\" + \", \".join(prop_strs) + \")\"\n return s",
"def full_str(self):\n outstr = self._title + \": \"\n outstr = outstr + str(self._date) + \"; \"\n outstr = outstr + str(self._time)\n return outstr",
"def show_summary(self):\n length = self.sum_length.first()\n coef = self.sum_coef.first()\n cons = length*coef/100\n print(\"Total length: \" + str(length) +\n \"\\nTotal consumption: \" + str(coef) +\n \"\\nTotal fuel used: \" + str(cons))",
"def summary(self):\n attribute_values = [round(x, 3) if isinstance(x, float) else x for x in self.attribute_values]\n return f'({repr(self.output_value)}, {attribute_values})'",
"def __str__(self):\n r = ''\n r += 'Timings:\\n' + \\\n '\\tOrdering:\\t\\t{}s\\n'.format(self.ordering_time) + \\\n '\\tConstruction:\\t{}s\\n'.format(self.construction_time) + \\\n '\\tMinimising:\\t{}s\\n'.format(self.minimising_time)\n r += 'Nodes:\\n' + \\\n '\\tNot minimized:\\t\\t{}\\n'.format(self.bdd_nodes) + \\\n '\\tMinimised:\\t\\t\\t{}'.format(self.min_bdd_nodes)\n return r",
"def summary(self, line=''):\n\n if self.flag is None:\n self.calc_lifetimes()\n fmt = '%i %.3f %5.3f %.2f %.3f %.4g %.4g\\n'\n efmt = '# %.3f %s: %s \\n'\n if self.flag is not None:\n line += efmt % (self.Z, self.name, self.flag)\n elif self.hb:\n line += fmt % (0, self.Z, self.mass, self.ALFOV,\n self.zahb_mcore, self.tau_he, 0.)\n else:\n line += fmt % (1, self.Z, self.mass, self.ALFOV,\n self.final_mcore, self.tau_he, self.tau_h)\n return line",
"def __repr__(self):\r\n s = 'text model name: ' + str(self.name) + '\\n'\r\n s += ' number of words: ' + str(len(self.words)) + '\\n'\r\n s += ' number of word lengths: '+str(len(self.word_lengths))+'\\n'\r\n s += ' number of stems: '+str(len(self.stems))+'\\n'\r\n s += ' number of sentence lengths: '+str(len(self.sentence_lengths))+'\\n'\r\n s += ' most Frequently used 3-word sequence: '+str(self.max_adjacent())\r\n return s",
"def attr_summary(self):\n\n print(self._attr_repr())",
"def _description_string(self) -> str:",
"def __str__(self):\n sc = \"\\nNo. of Blocks: {l}\\n\".format(l=len(self.chain))\n\n offset = len(str(len(self.chain)))\n for i, block in enumerate(self.chain):\n sc += \"\\tBlock {n}. {h}\\n\".format(\n n=str(i).rjust(offset), h=str(block))\n\n sc += \"\\n\"\n\n return sc",
"def toString(self):\n ln0 = Interval.toString(self)\n ln1 = \"Tone Frequency = %d Hz \\n\"%self.freq\n ln2 = \"freqType= %s\"%self.freqType #raj-change for freq\n return ln0+ln1+ln2 #raj- added ln2",
"def summary(self):\n if self._summary is None:\n self._summary = \"No summary for element.\"\n for doc in self.docstring:\n if doc.doctype == \"summary\":\n self._summary = doc.contents\n break\n\n #If a parameter, member or local tag has dimensions or other children,\n #then the inner-text is not the right thing to use; find a grand-child\n #summary tag instead.\n if self._summary == \"No summary for element.\" and len(self.docstring) > 0:\n summary = self.doc_children(\"summary\")\n if len(summary) > 0:\n self._summary = summary[0].contents\n else:\n self._summary = self.docstring[0].contents\n\n return self._summary",
"def __str__(self):\n # Probably some performance issues with this code because of Python's\n # immutable strings. This code is only ever called in development or\n # testing, so it should be fine.\n i = 1\n result_string = \"\"\n for line in self._instructions:\n result_string += \"{0}: {1}\\n\".format(i, line)\n i += 1\n\n result_string += 'Labels:'\n for label, line_number in self._labels.iteritems():\n result_string += \"\\n\\t{0}: {1}\".format(label, line_number+1)\n # Added 1 because the line numbers are stored 0-indexed,\n # but we are printing 1-indexed line numbers.\n\n return result_string",
"def get_short_string(self):\n string = '<annotation '\n string += 'annotator={} '.format(self.annotator)\n string += 'text=\"{}\" '.format(self.text.upper())\n string += 'classification={}'.format(self.classification)\n string += '></annotation>'\n return string"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Raises AlphabetError if this Sequence does not conform to its alphabet
|
def _validate_alphabet(self):
Sequence.validate_alphabet(str(self), self.alphabet)
|
[
"def checkAlphabet(self, sequence):\n ok = [ch for ch in sequence if ch in self.E]\n if len(ok) < len(sequence):\n return False \n return True",
"def get_alphabet_with_probabilities(self):\n raise self._alphabet_distribution",
"def test_unicode_letter_raises(letter):\n\n with pytest.raises(TypeError):\n letter_score(letter)",
"def __init__(self, alphabet=URL_SAFE):\n if alphabet:\n self.alphabet = alphabet\n self.base = len(self.alphabet)",
"def get_substitution_alphabet():\n alphabet = []\n\n for ch in get_key().lower() + string.ascii_lowercase:\n if ch not in alphabet:\n alphabet.append(ch)\n\n if len(alphabet) != len(string.ascii_lowercase):\n die('key must consist solely of ASCII letters')\n\n return alphabet",
"def validate(seq, alphabet='dna'):\n## Source : https://www.biostars.org/p/102/ Giovanni M Dall'Olio\n alphabets = {'dna': re.compile('^[acgtn]*$', re.I), \n 'protein': re.compile('^[acdefghiklmnpqrstvwy]*$', re.I)}\n if alphabets[alphabet].search(seq) is not None:\n return True\n else:\n return False",
"def ord_self(alphabet):\n return ord(alphabet) - 96",
"def test_isInAlphabet(self):\n sEncodings = StringUtils.stringEncodings()\n lEncodings = StringUtils.languageEncodings()\n \n self.assertTrue(isInAlphabet(\"Howdy\", sEncodings['ASCII'],lEncodings['ENGLISH']))\n self.assertTrue(isInAlphabet(\"First\", sEncodings['ASCII'], lEncodings['ENGLISH']))\n self.assertFalse(isInAlphabet(\"0123456789\", sEncodings['ASCII'], lEncodings['ENGLISH']))\n self.assertTrue(isInAlphabet(\"g\", sEncodings['ASCII'], lEncodings['ENGLISH']))",
"def validateAminoSequence(self, sequence_list):\r\n\r\n # Create a list of invalid amino acid characters for later validation\r\n # all except the specified characters are invalid.\r\n invalid = re.compile(r'[^ARNDBCEQZGHILKMFPSTWYV]')\r\n for string in sequence_list:\r\n if re.search(invalid, string) is not None:\r\n return 0\r\n return 1",
"def seq_validator(sequence):\n\n # checks for ascii characters that should not appear in a fasta sequence\n seq_val = re.compile(\"[^ATKMBVCNSWD-GUYRHatkbbvcnswdguyrh]\")\n\n # if any illegal characters found return False\n if seq_val.search(sequence):\n return False\n\n return True",
"def compare_alphabet(self):\n\n\n self.actual_frequency=self.make_storage()\n self.combine_alphabet_cases()\n total_letters=0.0\n for i in range (26):\n total_letters=total_letters+self.char_frequency[32+i][1]\n print(\" LETTER Expected Frequency Actual Frequency\")\n for i in range(26):\n temp=self.char_frequency[32+i][1]/total_letters\n if (i==0):\n print(\"first letter is : \"+self.actual_frequency[64+i][0]+\" \")\n print(temp)\n \n self.actual_frequency[64+i][1]=temp\n print(\" \"+chr(i+65)+\"-------------\"+str(self.expected_char_frequency[64+i][1])+\"----------------\"+str(temp))",
"def get_alphabet(self):\n alpha_cols = self.serie.str.isalpha()\n alpha_cols = alpha_cols.to_frame()\n return alpha_cols[self.col_name].values.sum()",
"def random_alphabet():\n return ALPHA_CHARS[randint(0, len(ALPHA_CHARS) - 1)]",
"def test_random_word_large_min_letter_count(self):\n for letter in self.letters:\n pytest.raises(ValueError, self.rw.random_word, letter, min_letter_count=3443)",
"def get_alphabet1(self):\n return self._alph1",
"def validate_seqs(x,\n alphabet=None,\n restrict_seqs_to_alphabet=True):\n # Cast as np.array\n if isinstance(x, str):\n x = np.array([x])\n elif isinstance(x, (list, np.ndarray)):\n x = np.array(x).astype(str)\n elif isinstance(x, pd.Series):\n x = x.values.astype(str)\n else:\n check(False, f'type(x)={type(x)} is invalid.')\n\n # Make sure array is 1D\n check(len(x.shape) == 1, f'x should be 1D; x.shape={x.shape}')\n\n # Get N and make sure its >= 1\n N = len(x)\n check(N >= 1, f'N={N} must be >= 1')\n\n # Make sure all x are the same length\n lengths = np.unique([len(seq) for seq in x])\n check(len(lengths) == 1,\n f\"Sequences should all be the same length\"\n \"; found multiple lengths={lengths}\")\n\n # If user requests to restrict sequences to a given alphabet\n if restrict_seqs_to_alphabet:\n\n # Check that alphabet is specified\n check(alphabet is not None,\n \"alphabet must be specified when restrict_seqs_to_alphabet=True.\")\n\n # Validate alphabet\n alphabet = validate_alphabet(alphabet)\n\n # Make sure all sequences are in alphabet\n seq_chars = set(''.join(x))\n alphabet_chars = set(alphabet)\n check(seq_chars <= alphabet_chars,\n f\"x contain the following characters not in alphabet:\"\n f\"{seq_chars-alphabet_chars}\")\n\n return x",
"def _is_in_alphabet(self, char: str):\n in_alphabet = False\n for rotor in self.rotors:\n in_alphabet = rotor._is_char_in_alphabet(char)\n if in_alphabet:\n break\n \n return in_alphabet",
"def is_in_alphabet(self, cur_ngram):\r\n for letter in cur_ngram:\r\n if letter not in self.alphabet:\r\n return False\r\n\r\n return True",
"def get_alphabet() -> List:\n return list(string.ascii_lowercase)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a new Sequence corresponding to the reverse complement of this Sequence. Location on parent, if it exists, is converted appropriately.
|
def reverse_complement(self, new_id: str = None, new_type: str = None) -> "Sequence":
if not self.alphabet.is_nucleotide_alphabet():
raise AlphabetError("Cannot reverse complement sequence with alphabet {}".format(self.alphabet))
location = self.location_on_parent.reverse_strand() if self.location_on_parent else None
strand = self.parent_strand.reverse() if self.parent_strand else None
rc_map = ALPHABET_TO_NUCLEOTIDE_COMPLEMENT[self.alphabet]
try:
seq_data = "".join([rc_map[c] for c in str(self)[::-1]])
except KeyError as e:
raise AlphabetError("Character {} not found for alphabet {}".format(str(e), self.alphabet))
rc_parent = Parent(strand=strand, location=location) if strand or location else None
return Sequence(
seq_data,
self.alphabet,
id=new_id,
type=new_type,
parent=rc_parent,
validate_alphabet=False,
)
|
[
"def reverse_complement_SeqRecord(record):\n return SeqRecord(seq = record.seq.reverse_complement(), \\\n id = record.id, description = \"reverse complement\" )",
"def get_reversed(self) -> _Node:\n parent = _NullPathNode()\n for node in _NodeIterator(self):\n parent = _PathNode(node.get_item(), node.get_weight(), parent)\n\n return parent",
"def reverse_complement(self):\n\n # [::-1] slice reverses a string\n reverse_barcode = self.index_seq[::-1]\n\n # convert string into list of chars\n reverse_barcode_list = list(reverse_barcode)\n\n # makes list of reverse complement, combines back into string using join function\n reverse_complement = ''.join([self.complement[i] for i in reverse_barcode_list])\n\n return(reverse_complement)",
"def get_reverse_complement(self):\n return self.get_strand_complement()[::-1]",
"def getReverse(self):\n\t\treverse=copy.deepcopy(self)\n\t\tfor n in reverse.depGraph.nodes():\n\t\t\tfor s in reverse.depGraph.node[n]['cpt']:\n\t\t\t\ttemp=reverse.depGraph.node[n]['cpt'][s][0]\n\t\t\t\treverse.depGraph.node[n]['cpt'][s][0]=reverse.depGraph.node[n]['cpt'][s][1]\n\t\t\t\treverse.depGraph.node[n]['cpt'][s][1]=temp\n\t\treturn reverse",
"def _reverse(self):\n o = self.copy()\n # Clear ok reversed flag\n o._reversed = not o._reversed\n\n if o.bits == 8:\n # No need for reversing\n return o.copy()\n\n if o.is_top:\n # A TOP is still a TOP after reversing\n si = o.copy()\n return si\n\n else:\n if not o.is_integer:\n # We really don't want to do that... but well, sometimes it just happens...\n logger.warning(\"Reversing a real strided-interval %s is bad\", self)\n\n # Reversing an integer is easy\n rounded_bits = ((o.bits + 7) // 8) * 8\n list_bytes = []\n si = None\n\n for i in range(0, rounded_bits, 8):\n b = o._unrev_extract(min(i + 7, o.bits - 1), i)\n list_bytes.append(b)\n\n for b in list_bytes:\n si = b if si is None else si.concat(b)\n si.uninitialized = self.uninitialized\n si._reversed = o._reversed\n return si",
"def reverse(self):\n reactants = {}\n for r in self.reactants:\n reactants[r] = - self.reactants[r]\n return Reaction(reactants, self.catalysts,\n self.reversible, self.data)",
"def invert(self) -> 'BitSequence':\n self._seq = bytearray([x ^ 1 for x in self._seq])\n return self",
"def reverse(self, in_place=False):\n pass",
"def reverse(self):\r\n return segment(self.v, self.u)",
"def reversed(self):\n if self._reversed is None:\n self._reversed = self.create_reversed()\n\n return self._reversed",
"def invert(self) -> 'BaseFlow':\n return InverseFlow(self)",
"def reversed(self):\n path = self.path.reversed()\n t0 = path.len() - self.t0\n t1 = path.len() - self.t1\n return TrattoPath(path, t1, t0)",
"def reversed(self) -> 'segment':\n\n return segment(self.second, self.first)",
"def reverse_copy(self):\n return Graph(self.map(lambda n: n.reverse_copy()))",
"def applyReverse(self, rv):\n from .trace import applyReverseTransform\n return applyReverseTransform(self, rv)",
"def __invert__(self: bitlist) -> bitlist:\n return bitlist(list(reversed([1-b for b in self.bits])))",
"def inverse(self) -> \"SbRotation\":\n return _coin.SbRotation_inverse(self)",
"def invert(self) -> \"SbRotation &\":\n return _coin.SbRotation_invert(self)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the Parent object representing the closest ancestor (parent, parent of parent, etc.) of this sequence which has the given sequence type. If include_self is True and this sequence has the given type, returns a new Parent object representing this sequence. Raises NoSuchAncestorException if no ancestor with the given type exists.
|
def first_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> Parent:
if include_self and self.sequence_type == sequence_type:
return Parent(sequence=self)
if self.parent:
return self.parent.first_ancestor_of_type(sequence_type, True)
raise NoSuchAncestorException
|
[
"def has_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> bool:\n if include_self and self.sequence_type == sequence_type:\n return True\n if self.parent:\n return self.parent.has_ancestor_of_type(sequence_type, include_self=True)\n return False",
"def get_parent_of(self, type_name):\n parent = None\n # look, if this node the parent\n for child in self.children:\n if type_name == child.type_name:\n parent = self.type_name\n break\n\n # search in child nodes for parent\n if parent is None:\n for child in self.children:\n parent = child.get_parent_of(type_name)\n if parent:\n break\n return parent",
"def get_ancestor_of_type(self, type_code):\n try:\n return self.get_ancestors().get(location_type__code=type_code)\n except self.DoesNotExist:\n return None",
"def closest_parent(self):\n # type: () -> Optional[Tag]\n parent = self.parent\n while parent:\n if parent.name in self.PARENT_TAGS:\n return parent\n parent = parent.parent\n return None # pragma: no cover",
"def ancestor(resource, cls, include_self=False): # noqa\n\n def predicate(resource):\n return isinstance(resource, cls)\n\n return first(ancestor_finder(resource, predicate, include_self))",
"def FindParent(self, parentClass):\n parent = self.parent\n while parent and not isinstance(parent, parentClass):\n if isinstance(parent, Element):\n parent = parent.parent\n else:\n parent = None\n return parent",
"def get_parent_of_type(self, t):\n try:\n if self.content_type.name == t:\n return self\n else:\n return self.get_parent().specific.get_parent_of_type(t)\n except (AttributeError):\n return None",
"def parent(self) -> Optional['Tree[T]']:\n return self._parent",
"def getparent(self, cls):\n current = self\n while current and not isinstance(current, cls):\n current = current.parent\n return current",
"def getParent(self) -> \"SoType const\":\n return _coin.SoType_getParent(self)",
"def Parent(self, *args):\n return _snap.TUnionFind_Parent(self, *args)",
"def getParent(self):\n assert self != None\n return self.parentTree",
"def ancestor_model(resource, cls, include_self=False): # noqa\n\n def predicate(resource):\n return hasattr(resource, \"model\") and isinstance(resource.model, cls)\n\n o = first(ancestor_finder(resource, predicate, include_self))\n return o.model if o else None",
"def _find_grandparent(self):\n if self._parent is None:\n return None\n else:\n parent_node = self._parent\n return parent_node._parent # Node Sure about this",
"def get_parent(self):\n return self.__return(self.node.parent())",
"def get_parent(self) -> 'Node':\n return self.parent",
"def fetch_parent(self):\n if not self.parent_id:\n return None\n return self._fetch(self.client, self.parent_id)",
"def parent(self):\n parent_elem = self.element_info.parent\n\n if parent_elem:\n return self.backend.generic_wrapper_class(parent_elem)\n else:\n return None",
"def get_parent(self, flag=None):\n if flag is None:\n if self.has_parent():\n return self.parent\n else:\n if self.has_parent(flag):\n parent = self.parent\n if parent.is_marked(flag):\n return parent\n else:\n return parent.get_parent(flag=flag)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns True if some ancestor (parent, parent of parent, etc.) of this sequence which has the given sequence type, or False otherwise. If include_self is True and this sequence has the given type, returns True.
|
def has_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> bool:
if include_self and self.sequence_type == sequence_type:
return True
if self.parent:
return self.parent.has_ancestor_of_type(sequence_type, include_self=True)
return False
|
[
"def first_ancestor_of_type(self, sequence_type: str, include_self: bool = True) -> Parent:\n if include_self and self.sequence_type == sequence_type:\n return Parent(sequence=self)\n if self.parent:\n return self.parent.first_ancestor_of_type(sequence_type, True)\n raise NoSuchAncestorException",
"def is_ancestor_of(self, other, include_self=False):\n return other.is_descendant_of(self, include_self=include_self)",
"def is_descendant_of(self, other, include_self=False):\n if other.pk == self.pk:\n return include_self\n\n return self._closure_model.objects.filter(\n parent=other,\n child=self\n ).exclude(pk=self.pk).exists()",
"def inheritsFrom(self, type):\n arrayList = self\n while self != None:\n if self.getType() == type:\n return True\n self = self.getInherits()\n return False",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoBase_isOfType(self, type)",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.ScXMLObject_isOfType(self, type)",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoDetail_isOfType(self, type)",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoField_isOfType(self, type)",
"def is_in_tree(self, type_to_search):\n return self.get_sub_tree(type_to_search) is not None",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoEvent_isOfType(self, type)",
"def has_ancestor(self, node):\n if self is node:\n return True\n elif self._parent is None:\n return False\n else:\n return self._parent.has_ancestor(node)",
"def is_parent(self, thing):\n\n d = self.ensure_driver(thing,\n \"Can only be the parent of a Driver or Entity.\")\n\n return self in d.contents()",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoAction_isOfType(self, type)",
"def __contains__(self, included):\n \n if not isinstance(included, (Segment, Timeline)):\n raise TypeError(\"unsupported type '%s'. Must be\"\n \"Segment or Timeline.\" % type(included).__name__)\n \n # True if `included` segment exists in timeline,\n # False otherwise\n if isinstance(included, Segment):\n try:\n i = self.index(included)\n return True\n except Exception, e:\n return False\n \n # True if every segment of included timeline \n # exists in timeline, False otherwise\n elif isinstance(included, Timeline):\n return all([segment in self for segment in included])",
"def has_parent(self, flag=None):\n if flag is None:\n return self.type != 'ROOT'\n else:\n if self.type != 'ROOT':\n parent = self.parent\n if parent.is_marked(flag):\n return True\n else:\n return parent.has_parent(flag=flag)\n else:\n return False",
"def isDerivedFrom(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoType_isDerivedFrom(self, type)",
"def isOfType(self, type: 'SoType') -> \"SbBool\":\n return _coin.SoError_isOfType(self, type)",
"def get_ancestor_of_type(self, type_code):\n try:\n return self.get_ancestors().get(location_type__code=type_code)\n except self.DoesNotExist:\n return None",
"def can_be_parent(self, ptype, otype):\n if ptype < otype:\n return True\n return False"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a FASTAformatted string for this sequence. These are linebroken every num_chars.
|
def to_fasta(self, num_chars: Optional[int] = 60) -> str:
if self.is_empty:
raise EmptySequenceFastaError("Cannot write FASTA for empty Sequence")
r = [f">{self.id}"]
for i in range(0, self._len, num_chars):
r.append(str(self)[i : i + num_chars])
return "\n".join(r)
|
[
"def pretty(self, width=2):\n ret = \"\"\n full_width = width * 10\n seq = ((\" \" * ((self.start_pos - 1) % full_width)) + \"{}\" \\\n + (\" \" * ((width - 1) - ((self.end_pos - 1) % full_width)))).format(\n self.sequence)\n sa = (self.start_pos - 1) // full_width\n ea = (self.end_pos - 1) // full_width\n ret += (\"{pos!s:<12}: \" + \" \".join([\"{ind}\"] * width) + \"\\n\").format(\n pos=self.chromosome, ind=\"1234567890\")\n for i in range(ea - sa + 1):\n ret += (\"{pos:<12}: \" + \" \".join([\"{}\"] * width) + \"\\n\").format(\n *[seq[i * full_width + j * 10:i * full_width + (j + 1) * 10] for \\\n j in range(width)],\n pos=(sa + i) * full_width)\n return ret",
"def fasta_format(sequence, name, width=70, header=True):\n if header:\n output = '>'+name+'\\n'\n else:\n output = ''\n for start in range(0, len(sequence), width):\n end = start + width\n if end > len(sequence):\n end = len(sequence)\n output += sequence[start:end]+'\\n'\n return output",
"def format(self, allowed_line_length):",
"def padlines(txt, width):\r\n\r\n return '\\n'.join([' ' * width + line for line in txt.splitlines()])",
"def create_underline(length, char):\n ret = \"\"\n for _ in range(0, length):\n ret += char\n return ret",
"def fastaformat(filename, linesize):\n records = fasta(filename)\n for r in records:\n s = r.fastasequence\n print \">%s\" % s.name\n print fastasequence.prettyprint_dna(s.seq, linesize)\n records.close()",
"def __str__(self) -> str:\n result = \"\"\n for index, digit in enumerate(str(self.value)):\n\n # take a line break after we reach the width\n if index % self.image_width == 0 and index > 0:\n result += \"\\n\"\n\n result += digit\n\n return result",
"def line(width, decorated=False):\n if decorated:\n if width < 2:\n return \"\"\n else:\n return '>' + repeat((width - 2), \"-\") + '<'\n else:\n return repeat(width, \"-\")",
"def _fill_line(line):\n # Length must be 164: TID, FGCID, IFX, IFY, 4 * 10 * (PFiPX, PFjPY, occupancy, speed)\n while len(line) < 164:\n line.append('')\n line.append('')\n line.append('')\n line.append('')\n return line",
"def format_line(text, width):\n text = unicode(text)\n if len(text) < width:\n result = text.ljust(width)\n elif width > 2:\n result = text[:width-2] + '… '\n elif width == 2:\n result = '… '\n else:\n result = ' ' * width\n return result.encode('utf-8')",
"def add_line_numbers(source: str) -> str:\n return \"\\n\".join(f\"{n: <4}{line}\" for (n, line) in enumerate(source.split(\"\\n\"), 1))",
"def build_line(\n beginning_spaces: int,\n star_count: int,\n door_width: int,\n has_doorknob: bool,\n) -> str:\n\n line = (\n beginning_spaces * CHARSET[\"space\"]\n + CHARSET[\"line_start\"]\n + int(star_count / 2 - door_width / 2) * CHARSET[\"star\"]\n + build_door_line(door_width, has_doorknob)\n + int(star_count / 2 - door_width / 2) * CHARSET[\"star\"]\n + CHARSET[\"line_end\"]\n )\n return line",
"def render_blank_note_in_ascii(self):\n return \" \" * 4",
"def build_dna_string(length):\n from random import choice\n\n dna_sequence = \"\"\n\n for count in range(length):\n dna_sequence += choice(\"ACGT\")\n\n return dna_sequence",
"def get_short_string(self):\n short_string = self.__dna_string[:40]\n if len(short_string) > 32:\n short_string = short_string[:32] + \"...\" + short_string[len(short_string) - 3:]\n return short_string",
"def wrap(s, n=60):\n return '\\n'.join(s[i:(i+n)] for i in range(0,len(s), n))",
"def chunks(seq, size):\n seq = str(seq) # in case seq is a Biopython sequence\n seq_list = [seq[i:i+size] for i in range(0, len(seq), size)]\n return '\\n'.join(seq_list)",
"def __str__(self):\r\n \r\n prefix = 'Character occurences : \\n' + \"======================\\n\"\r\n suffix = \"\\n======================\"\r\n return prefix + self.char_occ.to_string(index=False, justify='left') + suffix",
"def format(self):\n ret = ''\n for nccd, ccd in enumerate(self._data):\n ret += 'CCD ' + str(nccd+1) + ':\\n'\n ret += ccd.format()+'\\n'\n return ret"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Run a job. This applies the function node, and returns a |ResultMessage| when complete. If an exception is raised in the job, the |ResultMessage| will have ``'error'`` status.
|
def run_job(key, node):
try:
result = node.apply()
return ResultMessage(key, 'done', result, None)
except Exception as exc:
return ResultMessage(key, 'error', None, exc)
|
[
"def run_job(job_name, job_config):\n\n logging.info(f\"Running job {job_name}\")\n results, query_id = execute_query(centra, query=job['query'])\n\n if 'csv' in job['output']:\n output_csv(job_name, results)\n \n if 'stdout' in job['output']:\n logging.info(f\"Displaying results for job {job_name}:\")\n print(json.dumps(results, indent=4))\n\n # If the job calls to label the agents, perform the labeling\n if 'label' in job:\n label_config = job['label']\n automatic_label_agents(query_id, len(results), label_config['key'], label_config['value'])\n\n return",
"def execute_job(self, job, node=None, verbose=True):\n\n if not self.running:\n logger.warn(\"The cluster was stopped. Starting it automatically\")\n self.start()\n\n if node is None:\n node = self.master\n\n exec_dir = \"/tmp\"\n\n # Copy necessary files to cluster\n files_to_copy = job.get_files_to_copy()\n action = Put([node], files_to_copy, exec_dir)\n action.run()\n\n # Get command\n command = job.get_command(exec_dir)\n\n # Execute\n logger.info(\"Executing spark job. Command = {\" + self.bin_dir +\n \"/spark-submit \" + command + \"} in \" + str(node))\n\n proc = SshProcess(self.bin_dir + \"/spark-submit \" + command, node)\n\n if verbose:\n red_color = '\\033[01;31m'\n\n proc.stdout_handlers.append(sys.stdout)\n proc.stderr_handlers.append(\n ColorDecorator(sys.stderr, red_color))\n\n proc.start()\n proc.wait()\n\n # Get job info\n job.stdout = proc.stdout\n job.stderr = proc.stderr\n job.success = (proc.exit_code == 0)\n\n return proc.stdout, proc.stderr",
"def run_job():",
"def run_job(job_name):\n return jsonify(job.run_job(job_name))",
"def job(func):\n async def inner(running_jobs, obj, output_queue, *args, **kwargs):\n id = obj['id']\n try:\n res = await func(obj, output_queue, *args, **kwargs)\n if res == True:\n await output_queue.put({'type': 'success', 'id': d})\n elif isinstance(res, dict):\n res['id'] = id\n await output_queue.put(res)\n else:\n log.error(\"%d: Unknown error\"%id)\n await output_queue.put({'type': 'failure', 'id':id, 'failure_type': 'unknown'})\n except asyncio.CancelledError as e:\n await output_queue.put({'type': 'failure', 'id':id, 'failure_type': type(e).__name__, 'message': str(e)})\n except Exception as e:\n logging.error(\"%d: %s : %s\"%(id, type(e).__name__, str(e)))\n logging.error(traceback.format_exc())\n await output_queue.put({'type': 'failure', 'id':id, 'failure_type': type(e).__name__, 'message': str(e)})\n finally:\n logging.info(\"%d: finished\"%id)\n if id in running_jobs:\n del running_jobs[id]\n return inner",
"def run_job(self):\n\n try:\n job_item = self.job_queue.get(block=False, timeout=1)\n except Exception:\n self.log.debug(\n \"Directord server found nothing to do, cooling down\"\n \" the poller.\"\n )\n return 512, time.time()\n else:\n restrict_sha256 = job_item.get(\"restrict\")\n if restrict_sha256:\n if job_item[\"task_sha256sum\"] not in restrict_sha256:\n self.log.debug(\n \"Job restriction %s is unknown.\", restrict_sha256\n )\n return 512, time.time()\n\n job_targets = job_item.pop(\"targets\", list())\n # NOTE(cloudnull): We run on all targets if query is used.\n run_query = job_item[\"verb\"] == \"QUERY\"\n\n if job_targets and not run_query:\n targets = list()\n for job_target in job_targets:\n job_target = job_target.encode()\n if job_target in self.workers:\n targets.append(job_target)\n else:\n self.log.critical(\n \"Target %s is in an unknown state.\", job_target\n )\n return 512, time.time()\n else:\n targets = self.workers.keys()\n\n if job_item.get(\"run_once\", False) and not run_query:\n self.log.debug(\"Run once enabled.\")\n targets = [targets[0]]\n\n if run_query:\n job_item[\"targets\"] = [i.decode() for i in targets]\n\n task = job_item.get(\"task\", utils.get_uuid())\n job_info = self.create_return_jobs(\n task=task, job_item=job_item, targets=targets\n )\n self.log.debug(\"Sending job:%s\", job_item)\n for identity in targets:\n if job_item[\"verb\"] in [\"ADD\", \"COPY\"]:\n for file_path in job_item[\"from\"]:\n job_item[\"file_sha256sum\"] = utils.file_sha256(\n file_path=file_path\n )\n if job_item[\"to\"].endswith(os.sep):\n job_item[\"file_to\"] = os.path.join(\n job_item[\"to\"],\n os.path.basename(file_path),\n )\n else:\n job_item[\"file_to\"] = job_item[\"to\"]\n\n if job_item[\"file_to\"] not in job_info[\"TRANSFERS\"]:\n job_info[\"TRANSFERS\"].append(job_item[\"file_to\"])\n\n self.log.debug(\n \"Sending file transfer message for\"\n \" file_path:%s to identity:%s\",\n file_path,\n identity.decode(),\n )\n self.driver.socket_send(\n socket=self.bind_job,\n identity=identity,\n command=job_item[\"verb\"].encode(),\n data=json.dumps(job_item).encode(),\n info=file_path.encode(),\n )\n else:\n self.log.debug(\n \"Sending job message for job:%s to identity:%s\",\n job_item[\"verb\"].encode(),\n identity.decode(),\n )\n self.driver.socket_send(\n socket=self.bind_job,\n identity=identity,\n command=job_item[\"verb\"].encode(),\n data=json.dumps(job_item).encode(),\n )\n\n self.log.debug(\"Sent job %s to %s\", task, identity)\n else:\n self.return_jobs[task] = job_info\n\n return 128, time.time()",
"def process(self):\n\n #Check if we should terminate our timer so the thread can cleanly exit\n if self.quitting:\n self.timer.stop()\n return\n\n if self.cancelled:\n if self.job:\n print \"Cancelled job\"\n self.cancelled = False\n self.state = None\n self.job = None\n\n #If there are enqueued jobs and we aren't working on one right now,\n #we should get one from the queue and work on it.\n if self.job is None and not self.queue.empty():\n print \"Starting new job\"\n self.job = self.queue.get_nowait()\n\n if self.job is None:\n return\n\n #Setup the generator that will do the actual computations for our\n #current job, if we haven't already done so.\n if self.state is None:\n self.state = self.setup_job_state(self.job)\n\n cleanup = False\n done = False\n\n #Do a single iteration and possibly emit the result if we're done\n try:\n done, result = self.state.next()\n except StopIteration:\n print \"Job failed to complete properly\"\n cleanup = True\n except BaseException as exc:\n print \"Error in executing job:\"\n traceback.print_exc()\n cleanup = True\n\n if done:\n #Emit the job finished signal so that our consumer can make\n #use of the result.\n print \"Finished job\"\n self.job_finished.emit(self.job, result)\n cleanup = True\n\n if cleanup:\n self.job = None\n self.state = None",
"def map(self, function, array):\n array = list(array)\n\n self._validate_function_arguments(function, array)\n tmp_dir = tempfile.mkdtemp(\n prefix=\"\", suffix='_SGE_job', dir=self.config[\"DIRECTORIES\"][\"TMP\"]\n )\n\n # jobs directory\n jobs_dir = os.path.join(tmp_dir, 'jobs')\n os.mkdir(jobs_dir)\n\n # create results directory\n os.mkdir(os.path.join(tmp_dir, 'results'))\n\n # store the function\n with open(os.path.join(tmp_dir, 'function.pickle'), 'wb') as my_file:\n cloudpickle.dump(function, my_file)\n\n # store execution context\n with open(\n os.path.join(tmp_dir, 'ExecutionContext.pickle'), 'wb'\n ) as my_file:\n cloudpickle.dump(self.execution_context, my_file)\n\n # store the array\n for task_nr, start_index in enumerate(\n range(0, len(array), self.chunk_size)\n ):\n with open(\n os.path.join(jobs_dir, str(task_nr + 1) + '.job'), 'wb'\n ) as my_file:\n cloudpickle.dump(\n list(array[start_index : start_index + self.chunk_size]),\n my_file,\n )\n\n nr_tasks = task_nr + 1\n\n batch_file = self._render_batch_file(nr_tasks, tmp_dir)\n\n with open(os.path.join(tmp_dir, 'job.sh'), 'w') as my_file:\n my_file.write(batch_file)\n\n # create job id\n job_db = job_db_factory(tmp_dir)\n job_db.create(len(array))\n\n # start the job with qsub\n subprocess.run( # noqa: S607,S603\n ['qsub', os.path.join(tmp_dir, 'job.sh')], stdout=subprocess.PIPE\n )\n\n # wait for the tasks to be finished\n finished = False\n while not finished:\n time.sleep(5)\n for k in range(nr_tasks)[::-1]: # check from last to first job\n # +1 offset for k b/c SGE array jobs start at 1, not at 0\n if job_db.wait_for_job(k + 1, self.time_h):\n break\n else:\n finished = True\n # from here on all tasks are finished\n\n # sleep to make sure files are entirely written\n time.sleep(5)\n\n # make the results array\n results = []\n had_exception = False\n for task_nr in range(nr_tasks):\n try:\n my_file = open(\n os.path.join(\n tmp_dir, 'results', str(task_nr + 1) + '.result'\n ),\n 'rb',\n )\n single_result = pickle.load(my_file)\n results += single_result\n my_file.close()\n except Exception as e:\n results.append(\n Exception(\n 'Could not load temporary ' 'result file:' + str(e)\n )\n )\n had_exception = True\n\n # delete the temporary folder if there was no problem\n # and execution context does not need it\n if self.execution_context.keep_output_directory:\n pass\n elif had_exception:\n tmp_dir = tmp_dir[:-1] if tmp_dir[-1] == '/' else tmp_dir\n os.rename(tmp_dir, tmp_dir + '_with_exception')\n else:\n shutil.rmtree(tmp_dir)\n job_db.clean_up()\n return results",
"def job(\n ctx,\n cluster_id,\n step_name,\n bucket,\n prefix,\n packages,\n action_on_failure,\n job_name,\n job_kwargs,\n):\n # Get Python code assets from S3\n distribution_prefix = join(\"s3://\", bucket, prefix)\n python_major_version = sys.version[:3]\n egg_key = join(\n distribution_prefix,\n \"{{cookiecutter.package_name}}-\"\n + __version__\n + \"-py\"\n + python_major_version\n + \".egg\",\n )\n pyspark_entrypoint_key = join(distribution_prefix, \"pyspark_entrypoint.py\")\n\n # Build the arguments to send to command-runner.jar\n spark_submit_cmd = [\"spark-submit\"]\n if packages:\n spark_submit_cmd.extend([\"--packages\", packages])\n spark_submit_cmd.extend([\"--py-files\", egg_key])\n spark_submit_cmd.append(pyspark_entrypoint_key)\n spark_submit_cmd.extend([\"--job-name\", job_name])\n if job_kwargs:\n spark_submit_cmd.extend([\"--job-kwargs\"] + list(job_kwargs))\n msg = f\"Will execute the following spark-submit command on EMR Master:\\n\\t{spark_submit_cmd}\\n\\n\"\n logger.info(msg)\n\n # Submit the EMR Step through the API\n client = boto3.client(\"emr\")\n response = client.add_job_flow_steps(\n JobFlowId=cluster_id,\n Steps=[\n {\n \"Name\": step_name,\n \"ActionOnFailure\": action_on_failure,\n \"HadoopJarStep\": {\n \"Jar\": \"command-runner.jar\",\n \"Args\": spark_submit_cmd,\n },\n },\n ],\n )\n logger.info(response)",
"def submit_job(slug, class_name, job_config, job_name=None, message_slug=None, queue_name=None, app_id=None):\n if settings.SUBMIT_JOB_THROUGH_YARN:\n return submit_job_through_yarn(slug, class_name, job_config, job_name, message_slug, queue_name=queue_name,\n app_id=app_id)\n else:\n return submit_job_through_job_server(slug, class_name, job_config, job_name, message_slug)",
"def run_job(job):\n\n # Process launch with a pickled Job as file path and session.location\n driver_file_path = _os.path.join(_os.path.dirname(__file__), '_graphlabJob.py')\n path = job._get_exec_dir()\n job_path = _os.path.join(path, 'job-definition')\n _os.makedirs(path)\n\n ExecutionEnvironment.prepare_job_exec_dir(job, path)\n\n env = _gl.sys_util.make_unity_server_env()\n log_file_path = _os.path.join(path, 'execution.log')\n log_file = open(log_file_path, 'w')\n\n import sys\n python_exec = sys.executable\n arglist = [python_exec, driver_file_path, job_path]\n\n # Launch the other process\n __LOGGER__.debug(\"Launching process with arglist: %s\" % arglist)\n\n if _sys.platform == 'win32':\n proc = _subprocess.Popen(arglist, env=env, stdin=_subprocess.PIPE,\n stdout=log_file, stderr=_subprocess.STDOUT, bufsize=-1)\n else:\n proc = _subprocess.Popen(arglist, env=env, stdin=_subprocess.PIPE,\n stdout=log_file, stderr=_subprocess.STDOUT, bufsize=-1,\n preexec_fn=lambda: _os.setpgrp())\n __LOGGER__.debug(\"Process launched with pid: %d\" % proc.pid)\n\n ret_job = _job.LocalAsynchronousJob(proc.pid,job)\n return ret_job",
"def webjob_run(job, run):\n url = 'https://clef2.scm.azurewebsites.net/api/triggeredwebjobs/%s/history/%s' % (job, run)\n auth = HTTPBasicAuth(os.environ['WEBJOBS_USER_NAME'], os.environ['WEBJOBS_PASSWORD'])\n resp = requests.get(url, auth=auth)\n if resp.status_code == 200:\n result = jsonpickle.decode(resp.content)\n for prop in ['job_name', 'id', 'name', 'status', 'start_time', 'end_time', 'duration',\n 'output_url', 'error_url', 'url', 'trigger']:\n click.echo('\\t%s:\\t%s' % (prop, result[prop]))\n resp = requests.get(result['output_url'], auth=auth)\n if resp.status_code == 200:\n click.echo('-------')\n click.echo('output:')\n click.echo('-------')\n click.echo(resp.content)\n click.echo('-------')\n else:\n click.echo('failed to fetch output. %s. %s' % (resp.status_code, resp.content))\n elif resp.status_code == 404:\n click.echo('Not run or job does not exist.')\n else:\n click.echo('%s. failed. %s' % (resp.status_code, resp.content))",
"def compute(self, args, ctx):\n # Clear out resource files\n to_overwrite = [_RESULT_FILENAME, _ERR_FILENAME, _LOG_FILENAME]\n for fn in to_overwrite:\n _touch(os.path.join(self.paths['base'], fn), overwrite=True)\n # Write out status\n self._set_status('pending')\n # Write out start and end time\n self.start_time = _write_time(self.paths['start_time'], ts=_time())\n self.end_time = _write_time(self.paths['end_time'], ts=None)\n func = self.coll.func\n try:\n self.result = func(self.ident, args, ctx)\n except Exception:\n # There was an error running the resource's function\n self.result = None\n format_exc = traceback.format_exc()\n traceback.print_exc()\n with open(self.paths['error'], 'a') as fd:\n fd.write(format_exc)\n self._set_status('error')\n return self\n finally:\n self.end_time = _write_time(self.paths['end_time'], ts=_time())\n self._set_status('complete')\n _json_dump(self.result, self.paths['result'])\n return self",
"def job_run(self, job_type, package_name, build_system=None, build_tag=None,\n release_slug=None, repo_type=None, repo_branch=None):\n ENDPOINT = \"/job/run\"\n _ENDPOINT = \"/package/\" + package_name + \"/exist\"\n pkg_exists = self._call_api(_ENDPOINT)\n if list(pkg_exists.values())[0]:\n if job_type == \"syncupstream\":\n payload = {'job_type': job_type, 'package_name': package_name,\n 'repo_type': repo_type, 'repo_branch': repo_branch}\n elif job_type == \"syncdownstream\":\n payload = {'job_type': job_type, 'package_name': package_name,\n 'build_system': build_system,\n 'build_tag': build_tag}\n elif job_type == \"stringchange\":\n payload = {'job_type': job_type, 'package_name': package_name,\n 'release_slug': release_slug}\n else:\n return {\"job_type\": \"Invalid job type\"}\n else:\n return {\"pkg_error\": \"Given package does not exists\"}\n\n return self._send_api(ENDPOINT, payload)",
"def do_image_job(image_job: tt.ImageJob, work_dir: str) -> tt.ImageJobResult:\n # Output directory is based on the name of job.\n output_dir = os.path.join(work_dir, image_job.name)\n\n # Delete and create output directory.\n gfuzz_common.remove(output_dir)\n os.makedirs(output_dir, exist_ok=True)\n\n name = image_job.name\n if name.endswith('.frag'):\n name = gfuzz_common.remove_end(name, '.frag')\n\n frag_file = os.path.join(output_dir, name + FRAG_SUFFIX)\n json_file = os.path.join(output_dir, name + JSON_SUFFIX)\n log_file = os.path.join(output_dir, LOGFILE_NAME)\n status_file = os.path.join(output_dir, STATUS_FILENAME)\n png_file = os.path.join(output_dir, name + PNG_SUFFIX)\n nondet_0 = os.path.join(output_dir, NONDET0_PNG)\n nondet_1 = os.path.join(output_dir, NONDET1_PNG)\n\n gfuzz_common.write_to_file(image_job.fragmentSource, frag_file)\n gfuzz_common.write_to_file(image_job.uniformsInfo, json_file)\n\n res = tt.ImageJobResult()\n\n # Set nice defaults to fields we will not update anyway\n res.passSanityCheck = True\n res.log = 'Start: ' + name + '\\n'\n\n with gfuzz_common.open_helper(log_file, 'w') as f:\n try:\n gfuzz_common.set_logfile(f)\n run_image_job(json_file, status_file, png_file, output_dir, image_job.skipRender)\n except Exception as ex:\n gfuzz_common.log(str(ex))\n gfuzz_common.log('Removing status file and continuing...')\n gfuzz_common.remove(status_file)\n finally:\n gfuzz_common.unset_logfile()\n\n if os.path.isfile(log_file):\n with gfuzz_common.open_helper(log_file, 'r') as f:\n res.log += f.read()\n\n if os.path.isfile(png_file):\n with gfuzz_common.open_bin_helper(png_file, 'rb') as f:\n res.PNG = f.read()\n\n if os.path.isfile(status_file):\n with gfuzz_common.open_helper(status_file, 'r') as f:\n status = f.read().rstrip()\n if status == STATUS_SUCCESS:\n res.status = tt.JobStatus.SUCCESS\n elif status == STATUS_CRASH:\n res.status = tt.JobStatus.CRASH\n elif status == STATUS_TIMEOUT:\n res.status = tt.JobStatus.TIMEOUT\n elif status == STATUS_UNEXPECTED:\n res.status = tt.JobStatus.UNEXPECTED_ERROR\n elif status == STATUS_NONDET:\n res.status = tt.JobStatus.NONDET\n with gfuzz_common.open_bin_helper(nondet_0, 'rb') as f:\n res.PNG = f.read()\n with gfuzz_common.open_bin_helper(nondet_1, 'rb') as f:\n res.PNG2 = f.read()\n else:\n res.log += '\\nUnknown status value: ' + status + '\\n'\n res.status = tt.JobStatus.UNEXPECTED_ERROR\n else:\n # Not even a status file?\n res.log += '\\nNo STATUS file\\n'\n res.status = tt.JobStatus.UNEXPECTED_ERROR\n\n return res",
"def submit(self) -> None:\n if self._job_id:\n raise JobError('Job has already been submitted!')\n self._job_id = self._backend.run(circuits=self._qobj)",
"def submit_job(self, event):\n logging.info(\"Submitting job\")\n return RestClient.submit_job(self,event)",
"def run_job(self):\n self.set_up_logging(quiet=self.options.quiet,\n verbose=self.options.verbose,\n stream=self.stderr)\n\n with self.make_runner() as runner:\n runner.run()\n\n if not self.options.no_output:\n for line in runner.stream_output():\n self.stdout.write(line)\n self.stdout.flush()",
"def submit_job(self, application, job):\n raise NotImplementedError(\n \"Abstract method `LRMS.submit_job()` called \"\n \"- this should have been defined in a derived class.\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculates the unbiased maximum likelihood estimation of the covariance matrix of a matrix x of shape (n_samples, n_features).
|
def get_mle_covariance(self, x = None, ddof = 1):
if is_none(x):
x = self.x
# small number to avoid singularities
return np.cov(x, ddof = 1, rowvar = False) + 1e-6 * np.identity(x.shape[1])
|
[
"def compute_covariance_matrix(X):\n return np.cov(X, rowvar=0)",
"def gauss(mu, covar, x):\n \n n, d = x.shape\n\n j, k = covar.shape\n\n # Check that the covariance matrix is the correct dimension\n if ((j != d) or (k !=d)):\n raise Exception('Dimension of the covariance matrix and data should match');\n \n invcov = la.inv(covar)\n mu.reshape((1, -1)) # Ensure that mu is a row vector\n\n x = x - mu\n fact = np.sum(((np.dot(x,invcov))*x), 1)\n\n y = np.exp(-0.5*fact)\n\n y = y/np.sqrt((2*np.pi)**d*la.det(covar))\n return y",
"def optimal_variance_nll(self, x):\n \n sigma = ((x - self.mu) ** 2).mean().sqrt()\n return Gaussian(mu=self.mu, sigma=sigma).nll(x)",
"def compute_covariance(X, target_dim = 0):\n\n if len(X.shape) != 2:\n raise ValueError(\"The input is not a m by n matrix\")\n elif target_dim != 0 and target_dim != 1:\n raise ValueError(\"The target dimension is not 0 or 1\")\n\n if target_dim == 1:\n X = X.T\n\n # Compute means\n X = np.float64(X)\n X -= np.mean(X, 1)[:, None]\n\n #trace_X = np.zeros((X.shape[0], 1))\n\n #for i in range(X.shape[0]):\n # trace_X[i, 0] = np.dot(X[i, :], X[i, :])\n\n cov_X = np.zeros((X.shape[0], X.shape[0]))\n\n\n\n for i in range(X.shape[0]):\n for j in range(i, X.shape[0]):\n comb_mean = np.dot(X[i, :], X[j, :]) / (X.shape[1] - 1)\n # corr_mean = comb_mean / np.sqrt(trace_X[i, 0] * trace_X[j, 0])\n cov_X[i, j] = comb_mean\n cov_X[j, i] = comb_mean\n if i% (int(X.shape[0]/100)) == 0:\n print(\"Computing covariance... %f done\" % (i/X.shape[0]))\n\n return cov_X",
"def covarMatrix(x):\n return np.matrix(x - np.mean(x, axis=0)[np.newaxis, :]).T * np.matrix(x - np.mean(x, axis=0)[np.newaxis, :])",
"def cal_cov_matrix(training_data):\n\t# cov_matrix = np.transpose(training_data).dot(training_data)/(training_data.shape[0] - 1)\n\tcov_matrix = training_data.T.dot(training_data)\n\t# cal cov_matrix by numpy\n\t# cov_matrix = np.cov(training_data, rowvar=False, bias=True)\n\tprint('cov_matrix shape ::: ', cov_matrix.shape)\n\t\"\"\" cal eig vector and value \"\"\"\n\teig_val, eig_vec = np.linalg.eig(cov_matrix)\n\t# print('val :::', eig_val)\n\t# print('sorted val :::', np.sort(eig_val))\n\t\"\"\" return the largest max_index eignvalues \"\"\"\n\tsort_index = np.argsort(-eig_val)\n\teig_val = sorted(eig_val, reverse=True)\n\t# eig_val = np.sort(-eig_val)\n\treturn sort_index, eig_val, eig_vec",
"def covariance_loss(X: torch.Tensor, Y: torch.Tensor) -> torch.Tensor:\n \n cVX = covariance(X, X)\n cVY = covariance(Y, Y)\n\n return ((cVX - cVY) * (cVX - cVY)).sum()",
"def calculate_scaled_covariance(self, X):\n p = X.shape[1]\n n = X.shape[0]\n S = np.cov(X, rowvar=False)\n diag_addition = np.power(n, -0.5) * math.log(p, 0.5)\n S = S + diag_addition*np.eye(p)\n return S",
"def calculate_covariance_matrix(X, Y=None):\n if Y is None:\n Y = X \n n_samples = np.shape(X)[0]\n covar_matrix = (1 / (n_samples-1)) * (X - X.mean(axis=0)).T.dot(Y - Y.mean(axis=0))\n return np.array(covar_matrix, dtype=float)",
"def _solve_column_with_cv(self, X, i):\n X_train, X_test = train_test_split(X, test_size=0.4, random_state=0)\n S_train = self.calculate_scaled_covariance(X_train)\n S_test = np.cov(X_test, rowvar=False)\n # Calculate the lambdas to check\n lambdas = np.arange(0.005, 51)\n lambdas = lambdas/50\n test_errors = []\n for l in lambdas:\n beta = self._solve_column_problem(S_train, i, l)\n error = self.column_likelihood_function(S_test, beta, i)\n test_errors.append(error)\n\n min_err_i = np.argmin(test_errors)\n\n best_l = lambdas[min_err_i]\n S = np.cov(X, rowvar=False)\n return self._solve_column_problem(S, i, best_l), best_l",
"def MVN_log_likelihood(X, model):\n return np.sum(multivariate_normal.logpdf(X.T, model.mean, model.cov))",
"def MVN_log_likelihood(X, model):\n\n return logsumexp(multivariate_normal.logpdf(X.T, mean=model.mean, cov=model.cov))",
"def covariance(X: torch.Tensor, Y: torch.Tensor) -> torch.Tensor:\n\n # means \n mX = torch.mean(X, dim=0)\n mY = torch.mean(Y, dim=0)\n\n # centered\n cX = X - mX\n cY = Y - mY\n\n return torch.matmul(cX.t(), cY) / cX.shape[0]",
"def _compute_det_variance(self):",
"def posterior_variance_jacobian(self, x: ndarray) -> ndarray:\n _, gp_variance_jacobian = self._gp.posterior_jacobians(x)\n\n return gp_variance_jacobian",
"def covariance (x, y):\n mean_x = mean(x)\n mean_y = mean(y)\n covar = 0.0\n for i in range(len(x)):\n covar += (x[i] - mean_x)*(y[i] - mean_y)\n return covar",
"def _sq_mahalanobis(self, x, center, cov_matrix):\n x_center_diff = x - center\n return x_center_diff.dot(np.linalg.inv(cov_matrix)).dot(\n x_center_diff.T)",
"def _compute_mean_and_non_diag_covariance(self, Xtest):\n ntest = len(Xtest)\n\n # Compute Xtrain/Xtest elements of covariance metrix\n K_12 = self.kernel.transform(self.Xtrain_, Xtest)\n\n # Compute the \"difference\" between the Xtrain data and the Xtest data.\n # L_11_ is the sqroot of Xtrain Covariance.\n # K_12 is the covariance of Xtrain and Xtest\n # Therefore, L_12 is the matrix that solves: (L_11)(L_12) = K_12.\n L_12 = np.linalg.solve(self.L_11_, K_12)\n\n # Compute the Xtrain covariance \"feature weighs\".\n # np.linalg.solve returns x in Ax=B, where A = L_11 and B = y_train\n # We can interpret x as the feature weights. In other words, this\n # step returns the feature weights where the inputs is the\n # Xtrain/Xtrain covariance matrix elements.\n cov_fw = np.linalg.solve(self.L_11_, self.ytrain_).reshape(ntest,)\n\n # Obtain the posterior mean by multiplying the cov_fw by the\n # \"difference\" b/w Xtrain and Xtest.\n # L12 is the \"difference\" b/w Xtrain/Xtest covariances.\n # cov_fw are the weights that produce ytrain when multiplied by\n # Xtrain.\n mu = np.dot(L_12.T, cov_fw)\n\n return mu, L_12",
"def covariance_matrix(self):\n self.covariance = np.dot(self.matrix, self.matrix.transpose())"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculates the maximum likelihood estimation of the mean vector of a matrix x of shape (n_samples, n_features).
|
def get_mle_mean(self, x = None):
if is_none(x):
x = self.x
return x.mean(0)
|
[
"def likelihood(self, x: np.ndarray) -> np.ndarray:",
"def MVN_log_likelihood(X, model):\n return np.sum(multivariate_normal.logpdf(X.T, model.mean, model.cov))",
"def MVN_log_likelihood(X, model):\n\n return logsumexp(multivariate_normal.logpdf(X.T, mean=model.mean, cov=model.cov))",
"def mlln(self, x, data):\n return np.sum(-np.log(x[1] * np.sqrt(2* np.pi) )-((data-x[0])**2) / (2*x[1]**2))",
"def optimal_variance_nll(self, x):\n \n sigma = ((x - self.mu) ** 2).mean().sqrt()\n return Gaussian(mu=self.mu, sigma=sigma).nll(x)",
"def calculate_score(self, x):\n # p = F.softmax(x, dim=1)\n # q = torch.mean(p, dim=0)\n # kl = torch.sum(p * (F.log_softmax(x, dim=1) - torch.log(q)), dim=1)\n # return torch.exp(reduce(kl, \"mean\")).data\n mean,std = inception_score(x)\n return mean",
"def softmax(x):\n if len(x.shape) > 1:\n # Matrix\n # substracting max leaves function unchanged due to softmax's invariance to sums by a constant \n # keepdims= True, because broadcasting requires trailing shape entries to match\n x -= np.max(x, axis=1, keepdims=True)\n x = np.exp(x)\n sum_exp_xj = np.sum(x, axis=1, keepdims=True)\n x = np.divide(x, sum_exp_xj)\n else:\n # Vector\n x -= np.max(x)\n x = np.exp(x)\n sum_exp_xj = np.sum(x)\n x = np.divide(x, sum_exp_xj)\n return x",
"def mean_absolute_error(w, X, y):\n #####################################################\n # TODO 1: Fill in your code here #\n #####################################################\n n = y.size\n MAE = 0.0\n \n for i in range(n):\n xi = X[i,:]\n val = np.absolute(xi.dot(w) - y[i])\n MAE = MAE + val\n err = MAE/n\n \n return err",
"def evaluate_likelihood(self, X):\n Y = np.apply_along_axis(self.likelihood_l, 1, X)\n return(Y)",
"def softmax_minus_max(x):\n\n exp_scores = np.exp(x - np.max(x, axis = 1, keepdims = True))\n probs = exp_scores/np.sum(exp_scores, axis = 1, keepdims = True)\n return probs",
"def ml_mean(values):\n\n # return the equation for mean\n return sum(values)/len(values)",
"def mean_absolute_error(w, X, y):\n #####################################################\n # TODO 1: Fill in your code here #\n #####################################################\n n = y.shape[0]\n y_hat = []\n inter_matrix = w * X\n for i in range(len(inter_matrix)):\n y_hat.append(sum(inter_matrix[i]))\n y_hat = np.array(y_hat)\n err = sum(abs(y_hat - y))/n\n return err",
"def means_loss(X: torch.Tensor, Y: torch.Tensor) -> torch.Tensor:\n \n mX = torch.mean(X, dim=0)\n mY = torch.mean(Y, dim=0)\n\n return ((mX - mY) * (mX - mY)).sum()",
"def softmax(x):\n x = x.T - np.max(x.T, axis=0)\n x = np.exp(x) / np.sum(np.exp(x),axis=0)\n\n return x.T",
"def _maxL_waveform(self, func, *args, **kwargs):\n ind = np.argmax(self[\"log_likelihood\"])\n kwargs[\"ind\"] = ind\n return func(*args, **kwargs)",
"def negative_log_likelihood(self, y):\n # y.shape[0] is (symbolically) the number of rows in y, i.e.,\n # number of examples (call it n) in the minibatch\n # T.arange(y.shape[0]) is a symbolic vector which will contain\n # [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of\n # Log-Probabilities (call it LP) with one row per example and\n # one column per class LP[T.arange(y.shape[0]),y] is a vector\n # v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,\n # LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is\n # the mean (across minibatch examples) of the elements in v,\n # i.e., the mean log-likelihood across the minibatch.\n\n ty = y.flatten()\n return -T.mean(T.log(self.p_y_given_x)[T.arange(ty.shape[0]), ty])",
"def score(self, X, Xerr):\n if self.V is None or self.mu is None or self.weights is None:\n raise Exception(\"Model parameters not set.\")\n \n logprob = self.GMM.logprob_a(X,Xerr)\n logLs = logsumexp(logprob,axis=-1)\n return np.mean(logLs)",
"def likelihood(m):\n\n\tx_0 = np.array([12,4]) # tower 0\n\tx_1 = np.array([5,7]) # tower 1\n\td_0 = 3.9 # distance measurement 0\n\td_1 = 4.5 # distance measurement 1\n\tvar_0 = 1 # variance 0\n\tvar_1 = 1.5 # variance 1\n\n\t# calculate the expected distance measurements\n\td_0_hat = math.sqrt(np.sum(np.square(m-x_0)))\n\td_1_hat = math.sqrt(np.sum(np.square(m-x_1)))\n\n\t# evaluate sensor model\n\t# scipy.stats.norm.pdf(x,mu,sigma)\n\tpdf_0 = scipy.stats.norm.pdf(d_0, d_0_hat, math.sqrt(var_0))\n\tpdf_1 = scipy.stats.norm.pdf(d_1, d_1_hat, math.sqrt(var_1))\n\n\treturn pdf_0 * pdf_1",
"def avg_max(self):\n max_val, max_ind = t.max(self.probs, dim=1)\n mean = t.mean(max_val)\n return mean"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Get x, raw, cost, classes from a pandas dataframe.
|
def _pandas_parser(self, df):
if is_none(df):
return
# just to make sure that stuff is sorted
# supress warning as this works like intended
pd.options.mode.chained_assignment = None
df.sort_values(['functional', 'basis', 'unrestricted', 'reaction'])
pd.options.mode.chained_assignment = "warn"
unique_reactions = df.reaction.unique()
unique_basis = df.basis.unique()
unique_functionals = df.functional.unique()
basis_to_id = {key:value for value, key in enumerate(unique_basis)}
func_to_id = {key:value for value, key in enumerate(unique_functionals)}
unres_to_id = {True: 1, False: 0}
self.id_to_basis = {value:key for value, key in enumerate(unique_basis)}
self.id_to_func = {value:key for value, key in enumerate(unique_functionals)}
self.id_to_unres = {1:True, 0:False}
energies = []
times = []
errors = []
for idx, reac in enumerate(unique_reactions):
sub_df = df.loc[df.reaction == reac]
energies.append(sub_df.energy.tolist())
errors.append(sub_df.error.tolist())
times.append(sub_df.time.tolist())
if idx == 0:
func = [func_to_id[x] for x in sub_df.functional.tolist()]
bas = [basis_to_id[x] for x in sub_df.basis.tolist()]
unres = [unres_to_id[x] for x in sub_df.unrestricted.tolist()]
classes = np.asarray([func, bas, unres], dtype=int)
self.x = np.asarray(errors)
self.raw = np.asarray(energies)
self.cost = np.median(times, axis=0)
self.classes = classes
|
[
"def metrics_classification(df):\n true_labels = df['True value']\n predictions = df.drop('True value', axis=1)\n\n scoreDf = pd.DataFrame(columns=[\"Model\", \"Accuracy\"])\n scoreDf.set_index(\"Model\")\n for model_name, predictions in predictions.iteritems():\n scoreDf = scoreDf.append(get_metrics_for_algorithm(model_name, true_labels, predictions), ignore_index=True)\n return scoreDf",
"def from_dataframe(df: pd.DataFrame):\n obj = Dataset()\n obj.labels = df.iloc[:, 0].to_numpy(dtype=int)\n obj.data = df.iloc[:, 1:].to_numpy(dtype=float)\n return obj",
"def inspect_data(df, n=5):\n # Getting first n rows of dataframe\n df_sample = df.head(n)\n # Getting type of each feature\n df_types = df.dtypes\n # Getting na values\n df_na = df.isna().sum()\n\n return df_sample, df_types, df_na",
"def get_data_frame_for_mRMR_method(self):\n values = self.get_feature_values_as_np_array() # *10e+7\n print(max(values))\n df = pd.DataFrame(data=values) # .astype(int))\n feat_list = list(['class'])\n feat_list.extend(self.feature_names)\n df[len(df.columns)] = self.get_labels_as_np_array() # add labels to the right last side\n df = df[[len(df.columns) - 1] + list(range(0, len(df.columns) - 1))] # put the labels into the left side\n df.columns = feat_list # keeping order of columns for the next concatenation\n return df",
"def classifaction_report_df(report):\n\n report_data = []\n lines = report.split('\\n')\n for line in lines:\n try:\n row = {}\n row_data = line.strip().split()\n for index in range(len(row_data)):\n if is_number(row_data[index]):\n start_ind = index\n break\n row['class'] = \" \".join(row_data[:(start_ind)])\n row['precision'] = float(row_data[start_ind])\n row['recall'] = float(row_data[start_ind + 1])\n row['f1_score'] = float(row_data[start_ind + 2])\n row['support'] = float(row_data[start_ind + 3])\n report_data.append(row)\n except:\n pass\n out = pd.DataFrame.from_dict(report_data)\n out[\"support\"] = out[\"support\"].apply(int)\n return out",
"def compute_class_df(self):\n \n if ((self.the_class) and (isinstance(self.the_class, int))):\n\n # Create the bins from the classes\n self.data['the_class'] = LogReg.create_the_class(self, self.data.iloc[:,0])\n \n # Compute the probability\n the_sum = self.data.iloc[:,1:].groupby('the_class').sum()\n the_count = self.data.iloc[:,1:].groupby('the_class').count()\n self.class_prob = (the_sum / the_count).reset_index()\n \n # Remove classes from the main dataframe\n self.data.drop('the_class', axis=1, inplace=True)\n \n else:\n self.class_prob = None",
"def getObjectCols(df):\n return df.columns[df.dtypes=='object']",
"def get_df_object_columns(df):\n return list(df.select_dtypes(include=\"object\").columns)",
"def process_native_data(df: pd.DataFrame):\n processed_df = process_all_labels(df.copy())\n return processed_df",
"def classifier(df):\n y = df.pop('label')\n X = df.values\n X_train, X_test, y_train, y_test = (\n train_test_split(X, y, test_size=0.33, random_state=42)\n )\n gbc = GradientBoostingClassifier(n_estimators=200, learning_rate=0.1, max_features=\"auto\")\n logistic = LogisticRegression(n_jobs=-1)\n mod4 = gbc.fit(X_train, y_train)\n mod3 = logistic.fit(X_train, y_train)",
"def extract_features_from_df(df):\n X_list = []\n for i in range(1, 3 + 1):\n X_list.append([\n df[f'emb{i}'].to_numpy()[np.newaxis, :],\n # np.array(df[f'emb{i}'].values.tolist()).T,\n np.array(df[f'cm{i}'].values.tolist()).T,\n df[f'pr{i}'].to_numpy()[np.newaxis, :],\n df[f'rc{i}'].to_numpy()[np.newaxis, :],\n df[f'iou{i}'].to_numpy()[np.newaxis, :],\n df[f'fpr{i}'].to_numpy()[np.newaxis, :],\n df[f'fnr{i}'].to_numpy()[np.newaxis, :],\n df[f'f1{i}'].to_numpy()[np.newaxis, :],\n df[f'a_exp{i}'].to_numpy()[np.newaxis, :],\n df[f'a_smp{i}'].to_numpy()[np.newaxis, :],\n df[f'fp_edt{i}'].to_numpy()[np.newaxis, :],\n df[f'fn_edt{i}'].to_numpy()[np.newaxis, :],\n # df[f'i_prf{i}'].to_numpy()[np.newaxis, :],\n # df[f'i_suf{i}'].to_numpy()[np.newaxis, :],\n ])\n\n X = None\n for x_l in X_list:\n x = np.vstack(x_l)[:, np.newaxis, :]\n X = x if X is None else np.append(X, x, 1)\n X = X.astype(np.float32).transpose([2, 1, 0]) # [n_samples, 3, n_features]\n\n # add one-hot encoded sample-type feature\n X = np.append(X, np.zeros((*X.shape[:2], X.shape[1])), -1)\n for i in range(X.shape[1]):\n X[:, i, -X.shape[1] + i] = 1\n\n X = X.reshape((-1, X.shape[-1])) # [n_samples * 3, n_features + 3]\n\n return X",
"def df_to_Xy(dataframe):\n y = zip(*dataframe.index)[1]\n y_num = numerize_y(y)\n return (dataframe.values, y_num)",
"def extract_sklearn_features_numerical(columns, dataset):\n X_numerical = dataset[columns].as_matrix().astype(float)\n return Imputer().fit_transform(X_numerical)",
"def fetch_column_names(df):\n return df.dtypes",
"def classification_report_df(report):\n report_data = []\n lines = report.split('\\n')\n for line in lines[2:-6]:\n row = {}\n row_data = line.split(' ')\n row_data = list(filter(None, row_data))\n row['class'] = row_data[0]\n row['precision'] = float(row_data[1])\n row['recall'] = float(row_data[2])\n row['f1_score'] = float(row_data[3])\n row['support'] = float(row_data[4])\n report_data.append(row)\n df_report = pd.DataFrame.from_dict(report_data)\n \n return df_report",
"def getData(df, target_col_name, test_size, show_shapes=True):\n \n \n if df[target_col_name].dtype == \"object\":\n encoder = LabelEncoder()\n df[target_col_name] = encoder.fit_transform(df[target_col_name])\n\n data_without_target = df.drop(columns=target_col_name)\n X_train, X_test, y_train, y_test = train_test_split(data_without_target, df[target_col_name], test_size=test_size, random_state=123)\n \n if show_shapes == True:\n for datas in [X_train, X_test, y_train, y_test]:\n print(datas.shape) \n\n return X_train, X_test, y_train, y_test",
"def choose_features(df):\n X = df.drop('quality', axis=1)\n return X",
"def split_data_columns(df: pd.DataFrame) -> Tuple[np.ndarray, np.ndarray]:\n\n # Index to before the last index, correct for python 0-indexing.\n samples = df.to_numpy(dtype=float)[:, :-1]\n labels = df.to_numpy(dtype=int)[:, -1] - 1\n\n return samples, labels",
"def get_multiclass_training_data():\n fname = \"data/dataset.csv\"\n dataframe = load_data(fname)\n dictionary = extract_dictionary(dataframe)\n X_train = generate_feature_matrix(dataframe, dictionary)\n Y_train = dataframe['label'].values.copy()\n\n return (X_train, Y_train, dictionary)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Minimize x'Cx, where C is the covariance matrix and x is the portfolio weights. The constraints sum(x) = 1 and m'x = 0 is used, with m being the asset means. Optionally the constraint x >= 0 is used if self.positive_constraint == False. l2 regularization can be included. Also max cost constraint
|
def zero_mean_min_variance(self, x = None, alpha = 0):
if is_none(x):
x = self.x
# in case of mixtures
mean = np.sum((self.mixture_weights[i] * self.mean[i]
for i in range(self.n_mixtures)), axis = 0)
E_2 = np.sum((self.mixture_weights[i] * (self.mean[i][:,None].dot(self.mean[i][None,:])
+ self.cov[i]) for i in range(self.n_mixtures)), axis = 0)
cov = E_2 - mean[:,None].dot(mean[None,:])
# suppress output
cvxopt.solvers.options['show_progress'] = False
# objective
P = cvxopt.matrix(cov + alpha * np.identity(self.n_assets))
q = cvxopt.matrix(0.0, (self.n_assets,1))
#### constraints ###
# optional constraint x >= 0 if positive_constraint == True
if self.positive_constraint and np.isinf(self.max_cost):
G = cvxopt.matrix(-np.identity(self.n_assets))
h = cvxopt.matrix(0.0, (self.n_assets, 1))
elif self.positive_constraint:
G = np.empty((self.n_assets + 1, self.n_assets))
G[:-1,:] = -np.identity(self.n_assets)
G[-1,:] = self.cost
G = cvxopt.matrix(G)
h = np.empty(self.n_assets+1)
h[:-1] = 0
h[-1] = self.max_cost
h = cvxopt.matrix(h, (self.n_assets+1, 1))
elif np.isinf(self.max_cost):
G = cvxopt.matrix(0.0, (1, self.n_assets))
h = cvxopt.matrix(0.0)
else:
G = cvxopt.matrix(self.cost, (1, self.n_assets))
h = cvxopt.matrix(self.max_cost)
# sum(x) = 1
A1 = np.ones((1, self.n_assets))
b1 = np.ones((1,1))
# mean.T dot x = 0
A2 = mean.reshape(1, self.n_assets)
b2 = np.zeros((1,1))
# combine them
A = cvxopt.matrix(np.concatenate([A1, A2]))
b = cvxopt.matrix(np.concatenate([b1, b2]))
sol = cvxopt.solvers.qp(P, q, G, h, A, b)
return np.asarray(sol['x']).ravel()
|
[
"def minVarPortfolio(self):\n\n try:\n self.inv_cov_matrix = self.inv_cov_matrix\n except:\n raise ValueError(\"Inverse Variance-Covariance matrix not assigned. Call var_covar function\")\n try:\n try:\n _min_variance_vector = self.inv_cov_matrix * self.ones_matrix\n except:\n _min_variance_vector = self.inv_cov_matrix * self.ones_matrix.T\n _sum_of_min_var_vector = np.sum(_min_variance_vector)\n self.normalized_min_variance_vector = _min_variance_vector / _sum_of_min_var_vector #portfolio weights for minimum variance portfolio\n try:\n self.expected_return_of_min_var_portfolio = self.avgReturnMatrix * self.normalized_min_variance_vector\n self.stdOfMinVariancePortfolio = np.array(((self.covariance_matrix * self.normalized_min_variance_vector).T * np.array(self.normalized_min_variance_vector)))**(0.5)\n except:\n raise ValueError('Cant assign the exp return and std of the min varaince portfolio')\n except:\n raise ValueError('Some matrices have been calucaled wrongly. Perhaps its the covar. Check shapes, oth sides shoud be uqula to number of varibles')",
"def test_cvx_simple():\n\n x_ = np.matrix([0.5, 1.5]).T\n\n x = cp.Variable(2)\n c = np.matrix([[2, 1]]).T\n A = np.matrix([[-1, 1],\n [ -1, -1],\n [ 0, -1],\n [ 1, -2],\n ])\n b = np.matrix([[ 1, -2, 0, 4 ]]).T\n\n prob = cp.Problem(\n cp.Minimize( c.T * x ),\n [A * x - b <= 0],\n )\n prob.solve()\n\n print prob.status\n x = np.matrix(x.value)\n print 'x', x\n print 'x*', x_\n assert np.allclose(x, x_)",
"def constraint(self, x) -> float:\n portfolio_capacity = self.portfolio_capacity\n for i in range(len(x)):\n portfolio_capacity -= x[i] * self.share_price[i]\n return portfolio_capacity",
"def maximize(self):\n return Problem(-self.objective, self.constraints).minimize()",
"def minimize_vol(target_return, er, cov):\n import numpy as np\n import edhec_risk_kit as erk\n from scipy.optimize import minimize\n \n n = er.shape[0] #no of assets, since er row headers will be the number of assets\n init_guess = np.repeat(1/n, n) #Initial guess of what the weights should be for a target return on portfolio, Equally weighted portfolio.\n #Repeat function inputs, 1. The number to repeated \n # 2. No. of times you want to repeat the number\n \n #A tuple of min and max weights that is 0% and 100% for each asset, hence multiplied by n. Try running this in console for better understanding.\n bounds = ((0.0, 1.0),)*n \n \n #Adding Constraints\n return_met = {\n 'type':'eq', #constraint type is equality\n 'args': (er,), #extra argument required is er\n 'fun': lambda weights, er: target_return - erk.portfolio_returns(weights, er)\n }\n \n weights_sum_to_1 = {\n 'type':'eq',\n 'fun': lambda weights: np.sum(weights)-1\n }\n \n #Calling the scipy.optimize function of \"Minimize\", now that we have all the inputs i.e. Function, Constraints, Bounds\n results = minimize(erk.portfolio_vol, init_guess,\n args= (cov,), method = \"SLSQP\", #Args means additional arguments required i.e. covariance matrix here for the portfolio vol function above #\"SLSQP\" is the method name for \"Quadratic Optimization\"\n bounds=bounds, #Bounds define minimum and maximum weights as we defined above\n constraints=(return_met, weights_sum_to_1),\n options={'disp':False}\n )\n return results.x",
"def add_minimize(self, co, var):\n if self._minimize is None:\n self._minimize = MinimizeConstraint()\n\n if co == 0:\n return\n\n self._minimize.elements.append((co, var))",
"def add_minimize(self, co, var):",
"def ConvexRegression(X,y):\n p,n = X.shape\n\n #Objective function\n def f(w):\n return ((np.dot(X, w) - y) ** 2).sum()\n \n def jac_f(w):\n return (-(2 * ((y - np.dot(X, w)).T).dot(X)))\n \n #Defining constraints\n def sum_con(w):\n return (np.ones((n)).dot(w) - 1)\n dic_sum_con = {\"type\": \"eq\", \"fun\": sum_con}\n \n def positive_con(w):\n return w\n dic_positive_con = {\"type\": \"ineq\", \"fun\": positive_con}\n \n cons = [dic_sum_con, dic_positive_con]\n \n #Scipy optimization\n result = scipy.optimize.minimize(f, np.ones(n)/n, jac=jac_f, constraints=cons, method=\"SLSQP\")\n \n return result",
"def _minimize_c(c, z=0, a_tilde=1, b_tilde=-1,\n Ascaling=900, omega_M_0=0.25, omega_lambda_0=0.75):\n\n # Fn 1 (LHS of Eqn 18)\n\n Y1 = np.log(2) - 0.5\n Yc = np.log(1+c) - c/(1+c)\n f1 = Y1/Yc\n\n # Fn 2 (RHS of Eqn 18)\n\n # Eqn 14 - Define the mean inner density\n rho_2 = 200 * c**3 * Y1 / Yc\n\n # Eqn 17 rearranged to solve for Formation Redshift\n # essentially when universe had rho_2 density\n zf = (((1 + z)**3 + omega_lambda_0/omega_M_0) *\n (rho_2/Ascaling) - omega_lambda_0/omega_M_0)**(1/3) - 1\n\n # RHS of Eqn 19\n f2 = ((1 + zf - z)**a_tilde) * np.exp((zf - z) * b_tilde)\n\n # LHS - RHS should be zero for the correct concentration\n return(f1-f2)",
"def add_minimize(self, cc, minimize):\n self._minimize = minimize\n self.add_constraint(cc, minimize)",
"def minimize_vol(target_return, er, cov):\n n = er.shape[0]\n initial_weights = np.repeat(1/n, n)\n bounds = ((0.0, 1.0),) * n\n return_is_target = {\n 'type': 'eq',\n 'fun': lambda w: target_return - portfolio_return(w, er)\n }\n weights_sum_to_1 = {\n 'type': 'eq',\n 'fun': lambda w: w.sum() - 1 \n }\n results = minimize(portfolio_vol, initial_weights, args=(cov,), \n method='SLSQP', options={'disp': False},\n constraints=(return_is_target, weights_sum_to_1),\n bounds=bounds)\n return results.x",
"def simplex_solve(c, p):\n\n # First, we combine the constraints and (negated) profit function\n # into a single matrix.\n constraints = np.array(c)[:, 0:-1]\n profit = np.array([-np.array(p)])\n m = np.append(constraints, profit, axis=0)\n\n # We add an identity matrix, which represents the slack variables\n # and profit.\n m = np.append(m, np.identity(len(m)), axis=1)\n\n # We add the constraint values (and a 0 for profit).\n last_column = np.array(c)[:, -1:] # Excludes the bottom row\n last_column = np.append(last_column, [[0]], axis=0)\n\n m = np.append(m, last_column, axis=1)\n\n # Then, run the Simplex loop!\n print(\"Starting m:\\n\", m)\n while True:\n pivot = find_pivot(m)\n\n if pivot is None:\n print(\"Done!\")\n break\n\n print(\"Pivot:\", pivot)\n\n (row_i, col_i) = pivot\n m = pivot_column(m, row_i, col_i) # Execute the pivot\n\n print(\"New m:\\n\", m)\n\n # The profit is the value in the last column, divided by the number\n # in the profit column.\n return m[-1, -1] / m[-1, -2]",
"def calculateSoftCost(self):\n self.solver.Add(self.solver.Sum((self.brkconstraints[i] * self.brkconstraints_cost[i])\n for i in range(self.nconstraints)) == self.cost)",
"def minimum_volatility(self, save_weights=True):\n if not isinstance(save_weights, bool):\n raise ValueError(\"save_weights is expected to be a boolean.\")\n args = (self.mean_returns.values, self.cov_matrix.values)\n # optimisation\n result = sco.minimize(\n min_fun.portfolio_volatility,\n args=args,\n x0=self.x0,\n method=self.method,\n bounds=self.bounds,\n constraints=self.constraints,\n )\n # if successful, set self.last_optimisation\n self.last_optimisation = \"Minimum Volatility\"\n # set optimal weights\n if save_weights:\n self.weights = result[\"x\"]\n self.df_weights = self._dataframe_weights(self.weights)\n return self.df_weights\n else:\n # not setting instance variables, and returning array instead\n # of pandas.DataFrame\n return result[\"x\"]",
"def calc_constraint_at(self, i: int, x: np.ndarray) -> float:\n return self.constraints[i](x)",
"def minimize(self, cost_function, initial_params=None, callback=None):\n\n if self.keep_value_history:\n cost_function = recorder(cost_function)\n\n jacobian = None\n if hasattr(cost_function, \"gradient\") and callable(\n getattr(cost_function, \"gradient\")\n ):\n jacobian = cost_function.gradient\n\n result = scipy.optimize.minimize(\n cost_function,\n initial_params,\n method=self.method,\n options=self.options,\n constraints=self.constraints,\n jac=jacobian,\n )\n opt_value = result.fun\n opt_params = result.x\n\n nit = result.get(\"nit\", None)\n nfev = result.get(\"nfev\", None)\n\n return optimization_result(\n opt_value=opt_value,\n opt_params=opt_params,\n nit=nit,\n nfev=nfev,\n history=cost_function.history if self.keep_value_history else [],\n )",
"def add_model_constraints(self):\n assert (\n self.state != self.STATE_UNDEFINED\n ), \"set_data() must be called before add_model_constraints()\"\n for k in self.get_required_components():\n m = self.models[k]\n tk = \"T\" + k.lower() # cost(time) key\n nk = \"N\" + k.lower() # nprocs key\n for i in range(0, len(m.cost) - 1):\n slope = (m.cost[i + 1] - m.cost[i]) / (\n 1.0 * m.ntasks[i + 1] - m.ntasks[i]\n )\n self.constraints.append(\n [\n self.X[tk] - slope * self.X[nk]\n >= m.cost[i] - slope * m.ntasks[i],\n \"T%s - %f*N%s >= %f\"\n % (\n k.lower(),\n slope,\n k.lower(),\n m.cost[i] - slope * m.ntasks[i],\n ),\n ]\n )\n if slope > 0:\n logger.warning(\n \"WARNING: Nonconvex cost function for model \"\n \"%s. Review costs to ensure data is correct \"\n \"(--graph_models or --print_models)\",\n k,\n )\n\n break\n if slope == 0:\n break",
"def m0positiveprior(self,width,x,components):\n\t\tval=components[0].copy().flatten()\n\t\tisnegative=val<0\n\t\tval[~isnegative]=0\n\t\tjacobian=np.zeros((val.size,self.npar))\n\t\tjacobian[:,self.im0]=self.regularizationDerivs[0].reshape((-1,self.im0.size)).copy()\n\t\tjacobian[~isnegative,:]=0\n\t\treturn val/width,val,jacobian",
"def activ(self, x):\n\n # Check that inputs are consistent:\n errstring = self.consist('gmm', x)\n if errstring != None:\n raise Exception(errstring)\n\n ndata = x.shape[0]\n a = np.zeros((ndata, self.ncentres)) # Preallocate matrix\n\n if self.covar_type == 'spherical':\n # Calculate squared norm matrix, of dimension (ndata, ncentres)\n n2 = dist2(x, self.centres)\n\n # Calculate width factors\n wi2 = 2*self.covars\n normal = (np.pi*wi2)**(float(self.nin)/2.0)\n \n # Now compute the activations\n a = np.exp(-(n2/wi2))/normal\n\n elif self.covar_type == 'diag':\n normal = (2*np.pi)**(float(self.nin)/2.0)\n s = np.prod(np.sqrt(self.covars), 1)\n for j in range(self.ncentres):\n diffs = x - self.centres[j, :]\n a[:, j] = np.exp(-0.5*np.sum((np.multiply(diffs, diffs)/self.covars[j:j+1, :]), 1))/(normal*s[j])\n\n elif self.covar_type == 'full':\n normal = (2*np.pi)**(float(self.nin)/2.0)\n for j in range(self.ncentres):\n diffs = x - self.centres[j, :]\n # Use Cholesky decomposition of covariance matrix to speed computation\n c = la.cholesky(self.covars[:, :, j])\n temp = la.solve(c, diffs.T).T\n a[:, j] = np.exp(-0.5*np.sum(np.multiply(temp, temp), 1))/(normal*np.prod(np.diag(c)))\n\n elif self.covar_type == 'ppca':\n log_normal = self.nin*math.log(2*np.pi)\n d2 = np.zeros((ndata, self.ncentres))\n logZ = np.zeros(self.ncentres)\n for i in range(self.ncentres):\n k = 1 - self.covars[i]/self.lambd[i]\n logZ[i] = log_normal + self.nin*math.log(self.covars[i]) - \\\n np.sum(np.log(1 - k))\n diffs = x - self.centres[i, :]\n proj = np.dot(diffs, self.U[:, :, i])\n d2[:,i] = (np.multiply(diffs,diffs).sum(1) - \\\n np.multiply(np.multiply(proj, k), proj).sum(1))/self.covars[i]\n a = np.exp(-0.5*(d2 + logZ))\n else:\n raise Exception('Unknown covariance type ' + self.covar_type)\n return a"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Determine the optimal l2 value for the min_variance_upper_mean_bound approach by n_splits x n_repeats repeated kfold cross validation
|
def min_variance_upper_mean_bound_cv(self):
l2 = 10**np.arange(-9, -5.01, 0.5)
return self.internal_cv(self.min_variance_upper_mean_bound, l2)
|
[
"def cross_validation(X, Y, sigmas, llambdas, Ntot):\n parameters = []\n random.seed(666)\n\n for i in range(len(sigmas)):\n K = laplacian_kernel(X, X, sigmas[i])\n\n for j in range(len(llambdas)):\n \n for m in range(5):\n maes = []\n split = range(Ntot)\n random.shuffle(split)\n\n train = int(len(split)*0.8)\n test = int(Ntot - train)\n\n training_index = split[:train]\n test_index = split[-test:]\n\n y_train = Y[training_index]\n y_test = Y[test_index]\n\n C = deepcopy(K[training_index][:,training_index])\n C[np.diag_indices_from(C)] += llambdas[j]\n\n alpha = cho_solve(C, y_train)\n\n y_est = np.dot((K[training_index][:,test_index]).T, alpha)\n\n diff = y_est - y_test\n mae = np.mean(np.abs(diff))\n maes.append(mae)\n\n parameters.append([llambdas[j], sigmas[i], np.mean(maes)])\n\n maes = [mae[2] for mae in parameters]\n index = maes.index(min(maes))\n\n print(\"minimum MAE after CV: \", min(maes))\n\n return parameters[index][0], parameters[index][1]",
"def internal_cross_validation(cls, kwargs, paramname, paramrange, statistic,\n X, y):\n\n # Delay these imports so that we don't have circular imports!\n from main import get_folds\n from stats import StatisticsManager\n\n # Much of this code is sourced from main.py's template. It simply creates\n # a StatisticsManager for each parameter value. It does the cross\n # validation on the same folds and picks the best value of the parameter.\n stats_managers = [StatisticsManager() for _ in paramrange]\n folds = get_folds(X, y, 5)\n for train_X, train_y, test_X, test_y in folds:\n for value, stats_manager in zip(paramrange, stats_managers):\n kwargs[paramname] = value\n train_time = time.time()\n classifier = cls(**kwargs)\n classifier.fit(train_X, train_y)\n train_time = train_time - time.time()\n predictions = classifier.predict(test_X)\n scores = classifier.predict_proba(test_X)\n stats_manager.add_fold(test_y, predictions, scores, train_time)\n log.debug('internal-cv: fold completed')\n\n # Get values for our statistic of interest.\n stat_values = []\n for i, mgr in enumerate(stats_managers):\n # pooled might as well be True, since we don't want a std\n stat = mgr.get_statistic(statistic, pooled=True)\n stat_values.append(stat)\n log.debug('internal-cv gets %s=%r for param %s=%r' %\n (statistic, stat, paramname, paramrange[i]))\n log.debug('internal-cv gets argmax=%d' % np.argmax(stat_values))\n # Get the parameter value that maximizes our statistic.\n selection = paramrange[np.argmax(stat_values)]\n log.info('internal-cv selects %s=%r' % (paramname, selection))\n return selection",
"def estimate_var(neighborhood: Neighborhood, k: int) -> float:\n upper = 0\n lower = len(neighborhood.neighbors) - 1\n est_residual = estimate_residual(neighborhood, k)\n for neighbor in neighborhood.neighbors:\n residual = calculate_score(neighbor.feat_vector[k:])\n upper += math.pow(residual - est_residual, 2)\n\n return upper/lower",
"def optimal_variance_nll(self, x):\n \n sigma = ((x - self.mu) ** 2).mean().sqrt()\n return Gaussian(mu=self.mu, sigma=sigma).nll(x)",
"def boostrapping_confidence_interval(pred_all, gs_all, eva_func, ci):\n import numpy as np\n import random\n # set random seed\n random.seed(0)\n\n # prediction-groundtruth pairs from all five fold cross validation\n tmp = np.array([pred_all, gs_all]).T\n # calculate overall correlation\n mb = eva_func(tmp[:,0], tmp[:,1])\n # start boostrapping ...\n eva_all = []\n for i in range(100):\n tmp_new = random.choices(tmp, k = len(tmp))\n tmp_new = np.array(tmp_new)\n eva = eva_func(tmp_new[:,0], tmp_new[:,1])\n eva_all.append(eva)\n eva_all = sorted(eva_all)\n #print(eva_all)\n lb = eva_all[round(100*(0.5-ci*0.5))]\n ub = eva_all[round(100*(0.5+ci*0.5))]\n return mb, lb, ub",
"def predefined_cross_validation(self, param_grid, fit_params, folds=None, n_jobs=-1):\n if self.is_random_search:\n # If it is random search, creates 6 random combinations of\n # the parameters grid/distribution for each fold\n paramGrid = ParameterSampler(param_grid, 6)\n else:\n # Regular GridSearch, obtains a combination of all possible parameters\n paramGrid = ParameterGrid(param_grid)\n print(self.estimator)\n\n # Find optimal threshold\n if self.estimator.algorithm_name == 'modSAR':\n internal_samples_sim = self.data_split.get_sim_matrix_internal_samples(self.split_number)\n _, threshold = GraphUtils.find_optimal_threshold(internal_samples_sim)\n\n fit_params['threshold'] = threshold\n\n \"\"\" Creats parallel tasks for the cross-validation.\n This is the same function used in the source code of GridSearchCV in sklearn.\n Parallel function will take care of all for loops defined here and will correctly\n allocate more computational resources when each for loop complete.\n Each for loop runs the function _fit_and_score defined above \"\"\"\n cross_validation_results = \\\n Parallel(n_jobs=n_jobs, verbose=True, pre_dispatch='n_jobs') \\\n (delayed(self._fit_and_score)(deepcopy(self.estimator), fold, params, fit_params)\n for fold in range(1, self.n_splits + 1) if folds is None or (folds is not None and fold in folds)\n for params in paramGrid)\n\n # After cross-validation, gather results and picks best model\n (results, cv_models) = zip(*cross_validation_results)\n results = pd.concat(results, ignore_index=True)\n\n bestFold = results[\"test_mae\"].idxmin()\n # Shows parameters of the best fold\n print(\"Metrics for best model in cross-validation:\")\n print(results.iloc[bestFold])\n best_model = cv_models[bestFold]\n\n # External Validation\n external_X = self.data_split.get_external_samples(self.split_number)\n external_y = self.data_split.get_external_Y(self.split_number)\n\n if self.estimator.algorithm_name == \"modSAR\":\n id_external_samples = self.data_split.get_id_external_samples(self.split_number)\n externalX_smiles = self.data_split.qsar_dataset.X_smiles.loc[id_external_samples]\n\n pred = best_model.predict(external_X, externalX_smiles)\n else:\n pred = best_model.predict(external_X)\n\n mae_external = mean_absolute_error(external_y, pred)\n rmse_external = mean_squared_error(external_y, pred) ** 0.5\n\n if best_model.algorithm_name in [\"OplraRegularised\", \"OplraFeatureSelection\"]:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': results.iloc[bestFold]['beta'],\n 'lambda': results.iloc[bestFold]['lambda'],\n 'no_regions': results.iloc[bestFold]['no_regions'],\n 'no_features': results.iloc[bestFold]['no_features']},\n index=np.arange(1))\n elif best_model.algorithm_name in [\"OplraEnsemble\"]:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': results.iloc[bestFold]['beta'],\n 'lambda': results.iloc[bestFold]['lambda'],\n 'no_repeats': results.iloc[bestFold]['no_repeats'],\n 'resampling': results.iloc[bestFold]['resampling'],\n 'avg_no_regions': results.iloc[bestFold]['avg_no_regions'],\n 'no_features': results.iloc[bestFold]['no_features']},\n index=np.arange(1))\n elif best_model.algorithm_name in [\"modSAR\"]:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'no_modules': results.iloc[bestFold]['no_modules'],\n 'no_classes': results.iloc[bestFold]['no_classes'],\n 'threshold': results.iloc[bestFold]['threshold'],\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': results.iloc[bestFold]['beta'],\n 'lambda': results.iloc[bestFold]['lambda']},\n index=np.arange(1))\n else:\n external_results = pd.DataFrame({'splitStrategy': 1, 'splitNumber': self.split_number,\n 'dataset': self.dataset_name, 'datasetVersion': self.dataset_version,\n 'fold': results.iloc[bestFold][\"fold\"], 'algorithm': best_model.algorithm_name,\n 'algorithm_version': best_model.algorithm_version, 'internal': 'FALSE',\n 'no_modules': None,\n 'no_classes': None,\n 'threshold': None,\n 'train_mae': 'NA', 'test_mae': mae_external,\n 'train_rmse': 'NA', 'test_rmse': rmse_external, 'fit_time': 'NA',\n 'beta': None,\n 'lambda': None},\n index=np.arange(1))\n\n results = pd.concat([results, external_results], ignore_index=True)\n\n return results, best_model",
"def cross_validation(dataset, k=10):\n fold_errV = 0\n n = len(dataset.examples)\n examples = dataset.examples\n for fold in range(k):\n random.shuffle(dataset.examples)\n train_data, val_data = train_test_split(dataset, fold * (n / k), (fold + 1) * (n / k))\n dataset.examples = train_data\n h = DecisionTreeLearner(dataset)\n fold_errV += err_ratio(h, dataset, val_data)\n\n # Reverting back to original once test is completed\n dataset.examples = examples\n return fold_errV/k",
"def _find_split_parameters(self, X, Y, n_min_leaf, n_trials):\n \"\"\"\n y\n ^\n |x x | o x \n | x| o \n |x |o o \n |_____|_______>\n | x\n \"\"\"\n # Instead of n_trials, I search for ALL points and dimensions\n X_len = X.shape[0]\n X_dim = X.shape[1]\n max_score = -np.float('inf')\n FOUND = False\n for d in range(X_dim):\n for i in range(X_len):\n # each X[i][d] is an split candidate\n Y_left = Y[X[:,d] < X[i,d], :]\n Y_right = Y[X[:,d] >= X[i,d], :]\n if (len(Y_left) <= n_min_leaf) or (len(Y_right) <= n_min_leaf):\n # This split is no good, continue with next one\n continue\n score = self._information_gain(Y, Y_left, Y_right)\n if score > max_score:\n FOUND = True\n split_dim = d\n split_threshold = X[i, d]\n max_score = score\n #print('New split')\n if FOUND:\n #print(\"X: \", X.shape)\n #print(\"y: \", Y.shape) \n #print('best split dim/threshold: ', split_dim, split_threshold)\n #plt.scatter(X[:, split_dim], np.argmax(Y[:], axis=1))\n #plt.scatter(split_threshold, 0, c='r')\n #plt.show()\n return (split_dim, split_threshold)\n else:\n return None",
"def cross_validation(Ps, data, algo, kfolds=5, **kwargs):\n scores_tr = np.zeros((kfolds, len(Ps)))\n scores_te = np.zeros((kfolds, len(Ps)))\n X_tr, y_tr, X_te, y_te, _ = data\n X_train_ = pd.concat((X_tr, X_te)).reset_index(drop=True).sample(frac=1)\n y_train_ = pd.concat((y_tr, y_te)).reset_index(drop=True).iloc[X_train_.index]\n X_train_, y_train_ = X_train_.reset_index(drop=True), y_train_.reset_index(drop=True)\n n = X_train_.shape[0]\n p = int(n // kfolds)\n for k in tqdm(range(kfolds)):\n print('Fold {}'.format(k+1))\n q = p * (k + 1) + n % kfolds if k == kfolds - 1 else p * (k + 1)\n idx_val = np.arange(p * k, q)\n idx_train = np.setdiff1d(np.arange(n), idx_val)\n X_train, y_train = X_train_.iloc[idx_train, :], y_train_.iloc[idx_train, :]\n X_val, y_val = X_train_.iloc[idx_val, :], y_train_.iloc[idx_val, :]\n s_tr, s_te = [], []\n for P in Ps:\n if algo == 'CSVM':\n alg = C_SVM(C=P, print_callbacks=False, **kwargs)\n elif algo == 'KLR':\n alg = KLR(lbda=P, **kwargs)\n elif algo == 'KRR':\n alg = KRR(lbda=P, **kwargs)\n else:\n NotImplementedError('Please choose between \"CSVM\", \"KRR\" or \"KLR\"')\n alg.fit(X_train, y_train)\n pred_tr = alg.predict(X_train)\n score_tr = alg.score(pred_tr, y_train)\n pred_te = alg.predict(X_val)\n score_te = alg.score(pred_te, y_val)\n s_tr.append(score_tr)\n s_te.append(score_te)\n print('Constant={}, train_acc={:0.4f}, val_acc={:0.4f}'.format(P, score_tr, score_te))\n scores_tr[k], scores_te[k] = s_tr, s_te\n mean_scores_tr, mean_scores_te = np.mean(scores_tr, axis=0), np.mean(scores_te, axis=0)\n p_opt = Ps[np.argmax(mean_scores_te)]\n print('Best constant={}, val_acc={:0.4f}'.format(p_opt, np.max(mean_scores_te)))\n return p_opt, scores_tr, scores_te, mean_scores_tr, mean_scores_te",
"def get_error_bound(x, y, model):\n\n var = variance(x, y, model)\n training_examples = len(y)\n bound = Z_95*sqrt(var/training_examples)\n print(bound)\n return bound",
"def score_cv(data, dim, lag, number_of_splits=10, validation_fraction=0.5):\n # we temporarily suppress very short-lived progress bars\n # with pyemma.util.contexts.settings(show_progress_bars=False):\n nval = int(len(data) * validation_fraction)\n scores = np.zeros(number_of_splits)\n for n in range(number_of_splits):\n ival = np.random.choice(len(data), size=nval, replace=False)\n inputdata = [d for i, d in enumerate(data) if i not in ival]\n vamp = pyemma.coordinates.vamp(inputdata, lag=lag, dim=dim)\n scores[n] = vamp.score([d for i, d in enumerate(data) if i in ival])\n return scores",
"def loess_normative_model(self,cv_folds=1):\n if self.bins is None:\n self._create_bins()\n \n # Format data\n data = self.data[[self.conf, self.score]].to_numpy(dtype=np.float64)\n\n # Take the controls\n ctr_mask, _ = self._get_masks()\n ctr = data[ctr_mask]\n\n # Cross-validation\n if cv_folds == 1:\n self.zm,self.zstd,self.zci = loess_fit(ctr,self.bins,self.bin_width)\n m, std = loess_predict(data,self.bins,self.zm,self.zstd)\n\n rmse = RMSE(self.data[self.score].values[ctr_mask],m[ctr_mask])\n smse = SMSE(self.data[self.score].values[ctr_mask],m[ctr_mask])\n \n else:\n kf = KFold(n_splits=cv_folds, shuffle=True)\n rmse = []\n smse = []\n print(f'Starting {cv_folds} folds of CV...')\n for i, (train_index, test_index) in enumerate(kf.split(ctr)):\n ctr_train, ctr_test = ctr[train_index], ctr[test_index]\n cv_zm,cv_zstd,_ = loess_fit(ctr_train,self.bins,self.bin_width)\n cv_m, _ = loess_predict(ctr_test,self.bins,cv_zm,cv_zstd)\n r = RMSE(ctr_test[:,1],cv_m)\n s = SMSE(ctr_test[:,1],cv_m)\n print(f'CV Fold {i}: RMSE={r:.3f} - SMSE={s:.3f}')\n rmse.append(r)\n smse.append(s)\n print('Done!')\n\n rmse = np.mean(rmse)\n smse = np.mean(smse)\n print(f'Average: RMSE={rmse:.3f} - SMSE={smse:.3f}')\n\n self.zm,self.zstd,self.zci = loess_fit(ctr,self.bins,self.bin_width)\n m, std = loess_predict(data,self.bins,self.zm,self.zstd)\n\n self.data['LOESS_pred'] = m\n self.data['LOESS_sigma'] = std\n self.data['LOESS_residuals'] = self.data[self.score] - self.data['LOESS_pred']\n self.data['LOESS_z'] = self.data['LOESS_residuals']/self.data['LOESS_sigma']\n\n self.RMSE_LOESS = rmse\n self.SMSE_LOESS = smse\n\n self._loess_rank()",
"def k_fold_validation(self, hyperparameters, X, y, scoring=None):\n model_instance = MLPipeline(self._pipeline)\n X = pd.DataFrame(X)\n y = pd.Series(y)\n\n if hyperparameters:\n model_instance.set_hyperparameters(hyperparameters)\n\n if self._problem_type == 'regression':\n scorer = self.regression_metrics[scoring or 'R2 Score']\n else:\n scorer = self.classification_metrics[scoring or 'F1 Macro']\n\n scores = []\n kf = KFold(n_splits=10, random_state=None, shuffle=True)\n for train_index, test_index in kf.split(X):\n model_instance.fit(X.iloc[train_index], y.iloc[train_index])\n y_pred = model_instance.predict(X.iloc[test_index])\n scores.append(scorer(y.iloc[test_index], y_pred))\n\n return np.mean(scores)",
"def cross_validation(num_folds, LAMBDA):\n\t\t\trow_ids = np.array(range(X_train.shape[0]))\n\t\t\t# devide the trainning set in to folds\n\t\t\t# each fold contains (len(row_ids) - len(row_ids) % num_folds)/num_folds\n\t\t\t# the remaining training instance is pushed in to the last fold to prevent error in the np.split() function\n\t\t\tvalid_ids = np.split(row_ids[:len(row_ids) - len(row_ids) % num_folds], num_folds)\n\t\t\tvalid_ids[-1] = np.append(valid_ids[-1], row_ids[len(row_ids - len(row_ids) % num_folds):])\n\t\t\ttrain_ids = [[k for k in row_ids if k not in valid_ids[i]] for i in range(num_folds)]\n\t\t\ttotal_RSS = 0\n\t\t\tfor i in range(num_folds):\n\t\t\t\ttrain_part = {'X': X_train[train_ids[i]], 'Y': Y_train[train_ids[i]]}\n\t\t\t\tvalid_part = {'X': X_train[valid_ids[i]], 'Y': Y_train[valid_ids[i]]}\n\t\t\t\tW = self.fit(train_part['X'], train_part['Y'], LAMBDA)\n\t\t\t\tY_predicted = self.predict(W, valid_part['X'])\n\t\t\t\ttotal_RSS += self.computeRss(Y_predicted, valid_part['Y'])\n\t\t\treturn total_RSS / num_folds",
"def run_validate(X,y,cvtype):\n numsubs=X.shape[2]\n X=np.reshape(X,[-1,numsubs])\n\n \n if cvtype == 'LOO':\n behav_pred_pos=np.zeros([numsubs])\n behav_pred_neg=np.zeros([numsubs])\n for loo in range(0,numsubs):\n\n print(\"Running LOO, sub no:\",loo)\n \n train_mats=np.delete(X,[loo],axis=1)\n train_pheno=np.delete(pheno,[loo],axis=0)\n \n test_mat=X[:,loo]\n test_pheno=y[loo]\n\n pos_fit,neg_fit,posedges,negedges=train_cpm(train_mats,train_pheno)\n\n pe=np.sum(test_mat[posedges.flatten().astype(bool)])/2\n ne=np.sum(test_mat[negedges.flatten().astype(bool)])/2\n\n if len(pos_fit) > 0:\n behav_pred_pos[loo]=pos_fit[0]*pe + pos_fit[1]\n else:\n behav_pred_pos[loo]='nan'\n\n if len(neg_fit) > 0:\n behav_pred_neg[loo]=neg_fit[0]*ne + neg_fit[1]\n else:\n behav_pred_neg[loo]='nan'\n\n \n Rpos=stats.pearsonr(behav_pred_pos,pheno)[0]\n Rneg=stats.pearsonr(behav_pred_neg,pheno)[0]\n\n return Rpos,Rneg\n\n\n elif cvtype == '5k':\n bp,bn,ba=kfold_cpm(X,y,5)\n\n\n\n ccp=np.array([stats.pearsonr(bp[i,:],ba[i,:]) for i in range(0,5)])\n Rpos_mean=ccp.mean(axis=0)[0]\n\n ccn=np.array([stats.pearsonr(bn[i,:],ba[i,:]) for i in range(0,5)])\n Rneg_mean=ccn.mean(axis=0)[0]\n\n\n\n elif cvtype == '10k':\n bp,bn,ba=kfold_cpm(X,y,10)\n\n\n ccp=np.array([stats.pearsonr(bp[i,:],ba[i,:]) for i in range(0,10)])\n Rpos_mean=ccp.mean(axis=0)[0]\n\n ccn=np.array([stats.pearsonr(bn[i,:],ba[i,:]) for i in range(0,10)])\n Rneg_mean=ccn.mean(axis=0)[0]\n\n\n\n elif cvtype == 'splithalf':\n bp,bn,ba=kfold_cpm(X,y,2)\n\n ccp=np.array([stats.pearsonr(bp[i,:],ba[i,:]) for i in range(0,2)])\n Rpos_mean=ccp.mean(axis=0)[0]\n\n ccn=np.array([stats.pearsonr(bn[i,:],ba[i,:]) for i in range(0,2)])\n Rneg_mean=ccn.mean(axis=0)[0]\n\n\n else:\n raise Exception('cvtype must be LOO, 5k, 10k, or splithalf')\n\n\n return Rpos_mean,Rneg_mean",
"def cross_validate(cv_data, data_splitter, estimator, estimator_params,\n lamdas_list, error_fn, error_fn_params, verbose=True,\n save_x_hats=False):\n\n # NOTE: lamdas_list must be a tuple of *np.ndarrays*\n lamdas_sizes = tuple(lamdas.size for lamdas in lamdas_list)\n num_lamdas = np.prod(lamdas_sizes)\n\n num_folds = data_splitter.num_folds\n error = np.empty((num_folds, num_lamdas))\n\n # Iterate over \"folds\"\n for k, (training_indices, test_indices) in enumerate(data_splitter):\n training_data = cv_data[..., training_indices]\n test_data = cv_data[..., test_indices]\n\n if verbose:\n print(k, end=' ', flush=True)\n\n # Ininitialize the state variable, which the estimator uses to avoid\n # re-computing certain expressions. This may change for different\n # training data, so we re-initialize it in every iteration.\n state = None\n\n # Find the best x for different regression parameters\n for i, lamdas in enumerate(itertools.product(*lamdas_list)):\n if i == 0:\n (x_hat_temp, state) = estimator(training_data, lamdas,\n estimator_params, state)\n x_hat = np.empty((x_hat_temp.size, num_lamdas),\n dtype=x_hat_temp.dtype)\n # NOTE: x_hat returned by the estimator must be 1D!\n x_hat[:, i] = x_hat_temp\n else:\n x_hat[:, i], state = estimator(training_data, lamdas,\n estimator_params, state)\n\n error[k, :] = error_fn(test_data, x_hat, lamdas_list, error_fn_params)\n\n if save_x_hats:\n if k == 0:\n x_hats = np.empty((x_hat.shape[0], num_folds, num_lamdas),\n dtype=x_hat.dtype)\n x_hats[:, k, :] = x_hat\n\n # Compute mean error\n mean_error = error.mean(axis=0)\n\n # Hence compute best lamda values\n # XXX Check this for multi-dimensional lamdas_list!!\n lamda_star_ravelled_index = np.argmin(mean_error)\n lamda_star_indices = np.unravel_index(lamda_star_ravelled_index,\n lamdas_sizes)\n lamda_stars = (lamdas_list[i][lamda_star_index]\n for (i, lamda_star_index) in enumerate(lamda_star_indices))\n\n if save_x_hats:\n return (lamda_stars, lamda_star_indices, error, mean_error, x_hats)\n\n return (lamda_stars, lamda_star_indices, error, mean_error)",
"def LinearTuning(dataset,k,budgets,Gammas,Lambdas,Alphas,runs,target='acc',l1=True,K=False):\n # Printing info\n print 'Linear tuning' + dataset.filename\n # Dataset loading\n X = dataset.get('/data')[:,:]\n if l1:\n X = L1Normalization(X)\n LG = dataset.get('/labels')[:]\n n = len(budgets)\n # Budget size tuning\n averageAcc = np.zeros((n,))\n averageObj = np.zeros((n,))\n for i in xrange(n):\n print 'Tuning budget %i of %i'%(i+1,n)\n budget = budgets[i]\n if K:\n KM = MiniBatchKMeans(n_clusters = budget)\n KM.fit(X)\n Budget = KM.cluster_centers_\n accs = np.zeros((runs,))\n objs = np.zeros((runs,))\n for j in xrange(runs):\n ok = OKMF(budget,k,30,10,0.8,0.1,0.2,'linear')\n try:\n if K:\n ok.fit(X,Budget=Budget)\n else:\n ok.fit(X)\n LF = np.argmax(ok.H,axis=0)\n if target == 'acc':\n accs[j] = accuracy(LF,LG)\n elif target == 'obj':\n objs[j] = ok.Error(X)\n except np.linalg.LinAlgError:\n print \"LinAlgError found\"\n if target == 'acc':\n accs[j] = float('-inf')\n elif target == 'obj':\n objs[j] = float('inf')\n del ok\n averageAcc[i] = np.average(accs)\n averageObj[i] = np.average(objs)\n # Tuned budget\n if target == 'acc':\n budget = budgets[np.argmax(averageAcc)]\n elif target == 'obj':\n budget = budgets[np.argmin(averageObj)]\n del averageAcc,averageObj\n Budget = None\n if K:\n KM = MiniBatchKMeans(n_clusters = budget)\n KM.fit(X)\n Budget = KM.cluster_centers_\n # Gamma tuning\n n = len(Gammas)\n averageAcc = np.zeros((n,))\n averageObj = np.zeros((n,))\n for i in xrange(n):\n print 'Tuning Gamma %i of %i'%(i+1,n)\n Gamma = Gammas[i]\n accs = np.zeros((runs,))\n objs = np.zeros((runs,))\n for j in xrange(runs):\n ok = OKMF(budget,k,30,10,Gamma,0.1,0.2,'linear')\n try:\n if K:\n ok.fit(X,Budget=Budget)\n else:\n ok.fit(X)\n LF = np.argmax(ok.H,axis=0)\n if target == 'acc':\n accs[j] = accuracy(LF,LG)\n elif target == 'obj':\n objs[j] = ok.Error(X)\n except np.linalg.LinAlgError:\n print \"LinAlgError found\"\n if target == 'acc':\n accs[j] = float('-inf')\n elif target == 'obj':\n objs[j] = float('inf')\n del ok\n averageAcc[i] = np.average(accs)\n averageObj[i] = np.average(objs)\n # Tuned Gamma\n if target == 'acc':\n Gamma = Gammas[np.argmax(averageAcc)]\n elif target == 'obj':\n Gamma = Gammas[np.argmin(averageObj)]\n del averageAcc,averageObj\n # Lambda tuning\n n = len(Lambdas)\n averageAcc = np.zeros((n,))\n averageObj = np.zeros((n,))\n for i in xrange(n):\n print 'Tuning Lambda %i of %i'%(i+1,n)\n Lambda = Lambdas[i]\n accs = np.zeros((runs,))\n objs = np.zeros((runs,))\n for j in xrange(runs):\n ok = OKMF(budget,k,30,10,Gamma,Lambda,0.2,'linear')\n try:\n if K:\n ok.fit(X,Budget=Budget)\n else:\n ok.fit(X)\n LF = np.argmax(ok.H,axis=0)\n if target == 'acc':\n accs[j] = accuracy(LF,LG)\n elif target == 'obj':\n objs[j] = ok.Error(X)\n except np.linalg.LinAlgError:\n print \"LinAlgError found\"\n if target == 'acc':\n accs[j] = float('-inf')\n elif target == 'obj':\n objs[j] = float('inf')\n del ok\n averageAcc[i] = np.average(accs)\n averageObj[i] = np.average(objs)\n # Tuned Lamnda\n if target == 'acc':\n Lambda = Lambdas[np.argmax(averageAcc)]\n elif target == 'obj':\n Lambda = Lambdas[np.argmin(averageObj)]\n del averageAcc,averageObj\n # Alpha tuning\n n = len(Alphas)\n averageAcc= np.zeros((n,))\n averageObj = np.zeros((n,))\n for i in xrange(n):\n print 'Tuning Alpha %i of %i'%(i+1,n)\n Alpha = Alphas[i]\n accs = np.zeros((runs,))\n objs = np.zeros((runs,))\n for j in xrange(runs):\n ok = OKMF(budget,k,30,10,Gamma,Lambda,Alpha,'linear')\n try:\n if K:\n ok.fit(X,Budget=Budget)\n else:\n ok.fit(X)\n LF = np.argmax(ok.H,axis=0)\n if target == 'acc':\n accs[j] = accuracy(LF,LG)\n elif target == 'obj':\n objs[j] = ok.Error(X)\n except np.linalg.LinAlgError:\n print \"LinAlgError found\"\n if target == 'acc':\n accs[j] = float('-inf')\n elif target == 'obj':\n objs[j] = float('inf')\n del ok\n averageAcc[i] = np.average(accs)\n averageObj[i] = np.average(objs)\n # Tuned Alpha\n if target == 'acc':\n Alpha = Alphas[np.argmax(averageAcc)]\n elif target == 'obj':\n Alpha = Alphas[np.argmin(averageObj)]\n del averageAcc,averageObj\n result = dict()\n result['budget'] = budget\n result['Gamma'] = Gamma\n result['Lambda'] = Lambda\n result['Alpha'] = Alpha\n return result",
"def customize_cross_validation(pre_fold_y, pre_fold_X, folds, seed):\n # Instance of StratifiedKFold\n skf = StratifiedKFold(n_splits=folds, shuffle=True, random_state=seed)\n custom_cv = []\n\n # Standard way to\n for train_index, test_index in skf.split(pre_fold_X, pre_fold_y):\n print(test_index)\n print(train_index)\n\n X_train, X_test = pre_fold_X[train_index], pre_fold_X[test_index]\n y_train, y_test = pre_fold_y[train_index], pre_fold_y[test_index]\n\n balanced_train_samples = balanced_subsample(pre_balance_x=X_train, pre_balance_y=y_train, balance_seed=seed)\n balanced_test_samples = balanced_subsample(pre_balance_x=X_test, pre_balance_y=y_test, balance_seed=seed)\n\n # attach all the indices of each folds, training set and test set\n balanced_index = [list(balanced_train_samples.index), list(balanced_test_samples.index)]\n custom_cv.append(balanced_index)\n\n return custom_cv",
"def estimate_residual(neighborhood: Neighborhood, k: int) -> float:\n upper = 0\n lower = len(neighborhood.neighbors)\n for neighbor in neighborhood.neighbors:\n upper += calculate_score(neighbor.feat_vector[k:])\n\n return upper/lower"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Determines the optimal regularization parameter of the given alphas, for a given method
|
def internal_cv(self, method, alphas):
se = np.zeros(alphas.size)
for i, (train, test) in enumerate(self.cv_generator.split(range(self.n_samples))):
for j, v in enumerate(alphas):
weights = method(x = self.x[train], alpha = v)
se[j] += sum(np.sum(weights * self.x[test], axis=1)**2)
best_idx = np.argmin(se)
#print(best_idx, alphas.size)
if best_idx == 0 and alphas[0] > 1e-9:
print("Warning: Consider lowering the minimum bound for alpha for method %s" % str(method))
elif best_idx == alphas.size-1:
print("Warning: Consider raising the minimum bound for alpha for method %s" % str(method))
return method(alpha = alphas[best_idx])
|
[
"def setRegularizationParameter(self, beta) -> None:\n ...",
"def fast_opt_svr_hyperparams(x, y, cs, epsilons, gammas, validation_method, parameter):\r\n \r\n if validation_method != 'cv' and validation_method != 'midknn':\r\n# print('\\'{0}\\' is unknown. Please check \\'validation_method\\'.'.format(validation_method))\r\n# return 0, 0, 0\r\n sys.exit('\\'{0}\\' is unknown. Please check \\'validation_method\\'.'.format(validation_method))\r\n\r\n \r\n x = np.array(x)\r\n y = np.array(y)\r\n cs = np.array(cs)\r\n epsilons = np.array(epsilons)\r\n gammas = np.array(gammas)\r\n \r\n print('1/4 ... pre-optimization of gamma')\r\n optimal_gamma = maximize_variance_of_gram_matrix(x, gammas)\r\n\r\n if validation_method == 'midknn':\r\n # make midknn data points\r\n x_midknn, y_midknn = make_midknn_dataset(x, y, parameter)\r\n \r\n # Optimize epsilon with cross-validation\r\n print('2/4 ... optimization of epsilon')\r\n if validation_method == 'cv':\r\n cross_validation = KFold(n_splits=parameter, random_state=9, shuffle=True)\r\n r2cvs = []\r\n for epsilon in epsilons:\r\n model = SVR(kernel='rbf', C=3, epsilon=epsilon, gamma=optimal_gamma)\r\n estimated_y_in_cv = cross_val_predict(model, x, y, cv=cross_validation)\r\n r2cvs.append(r2_score(y, estimated_y_in_cv))\r\n optimal_epsilon = epsilons[np.where(r2cvs==np.max(r2cvs))[0][0]]\r\n# model = GridSearchCV(SVR(kernel='rbf', C=3, gamma=optimal_gamma), {'epsilon': epsilons}, cv=parameter)\r\n# model.fit(x, y)\r\n# optimal_epsilon = model.best_params_['epsilon']\r\n elif validation_method == 'midknn':\r\n r2_midknns = []\r\n for epsilon in epsilons:\r\n model = SVR(kernel='rbf', C=3, epsilon=epsilon, gamma=optimal_gamma)\r\n model.fit(x, y)\r\n estimated_y_midknn = np.ndarray.flatten(model.predict(x_midknn))\r\n r2_midknns.append(float(1 - sum((y_midknn - estimated_y_midknn) ** 2) / sum((y_midknn - y_midknn.mean()) ** 2)))\r\n optimal_epsilon = epsilons[np.where(r2_midknns == np.max(r2_midknns))[0][0]]\r\n \r\n # Optimize C with cross-validation\r\n print('3/4 ... optimization of c')\r\n if validation_method == 'cv':\r\n r2cvs = []\r\n for c in cs:\r\n model = SVR(kernel='rbf', C=c, epsilon=optimal_epsilon, gamma=optimal_gamma)\r\n estimated_y_in_cv = cross_val_predict(model, x, y, cv=cross_validation)\r\n r2cvs.append(r2_score(y, estimated_y_in_cv))\r\n optimal_c = cs[np.where(r2cvs==np.max(r2cvs))[0][0]]\r\n# model = GridSearchCV(SVR(kernel='rbf', epsilon=optimal_epsilon, gamma=optimal_gamma), {'C': cs}, cv=parameter)\r\n# model.fit(x, y)\r\n# optimal_c = model.best_params_['C']\r\n elif validation_method == 'midknn':\r\n r2_midknns = []\r\n for c in cs:\r\n model = SVR(kernel='rbf', C=c, epsilon=optimal_epsilon, gamma=optimal_gamma)\r\n model.fit(x, y)\r\n estimated_y_midknn = np.ndarray.flatten(model.predict(x_midknn))\r\n r2_midknns.append(float(1 - sum((y_midknn - estimated_y_midknn) ** 2) / sum((y_midknn - y_midknn.mean()) ** 2)))\r\n optimal_c = cs[np.where(r2_midknns == np.max(r2_midknns))[0][0]]\r\n \r\n # Optimize gamma with cross-validation (optional)\r\n print('4/4 ... optimization of gamma')\r\n if validation_method == 'cv':\r\n r2cvs = []\r\n for gamma in gammas:\r\n model = SVR(kernel='rbf', C=optimal_c, epsilon=optimal_epsilon, gamma=gamma)\r\n estimated_y_in_cv = cross_val_predict(model, x, y, cv=cross_validation)\r\n r2cvs.append(r2_score(y, estimated_y_in_cv))\r\n optimal_gamma = gammas[np.where(r2cvs==np.max(r2cvs))[0][0]] # クロスバリデーション後の r2 が最も大きい候補\r\n# model = GridSearchCV(SVR(kernel='rbf', epsilon=optimal_epsilon, C=optimal_c), {'gamma': gammas}, cv=parameter)\r\n# model.fit(x, y)\r\n# optimal_gamma = model.best_params_['gamma']\r\n elif validation_method == 'midknn':\r\n r2_midknns = []\r\n for gamma in gammas:\r\n model = SVR(kernel='rbf', C=optimal_c, epsilon=optimal_epsilon, gamma=gamma)\r\n model.fit(x, y)\r\n estimated_y_midknn = np.ndarray.flatten(model.predict(x_midknn))\r\n r2_midknns.append(float(1 - sum((y_midknn - estimated_y_midknn) ** 2) / sum((y_midknn - y_midknn.mean()) ** 2)))\r\n optimal_gamma = gammas[np.where(r2_midknns == np.max(r2_midknns))[0][0]]\r\n \r\n return optimal_c, optimal_epsilon, optimal_gamma",
"def _hyperbolic_regularization(self, current_parameters, a=None, b=None):\n if a is None:\n a = self._scaling_factor\n if b is None:\n b = self._hyperbolic_beta\n\n reg = np.sum( ((np.asarray(current_parameters) )**2 + b**2)**(1/2.) - b)\n\n self.value = a * reg\n\n return self.value",
"def gaussNewton(function,startParam,paramRange, paramStep, target, optimise, cov = None, cov_iv=None,\r\n scalings=None, constraint_target=None, trace=False):\r\n\r\n\r\n\r\n # stage 0 -- setup\r\n nrandom = optimise.get('nrandom', None)\r\n deterministicPerturb = optimise.get('deterministicPertub', True)\r\n statusInfo='Continue'\r\n npt=len(target) # how many points we expect.\r\n if constraint_target is not None: npt+=1 # increment number of points to deal with constraint\r\n iterCount=0 # how many iterations we done.\r\n nFail = 0 # how many cases have failed since last sucess\r\n totalFail =0 # total number of failure we have had.\r\n prevBestParam=startParam[:] # copy startParam so have a prevBestParam if needed.\r\n\r\n # stage 1 -- Work out parameters for first iteration\r\n paramsGN, randIndx = rangeAwarePerturbations(startParam, paramRange, paramStep,\r\n nrandom = nrandom, deterministic = deterministicPerturb, trace=trace)\r\n statusList=[] # a list of the status\r\n while statusInfo == 'Continue':\r\n obsValuesGN, constraintGN = run_fn(function,paramsGN, npt, constraint_target=constraint_target) # run the functions.\r\n optStatus, paramsLS, err, err_constraint, infoGN =\\\r\n doGaussNewton(paramsGN,paramRange, obsValuesGN, target, cov=cov, scalings=scalings,\r\n constraint=constraintGN, constraint_target=constraint_target,\r\n studyJSON=optimise,trace=trace) # run GN\r\n # add some more information to the info dict.\r\n infoGN['err_constraint']=err_constraint\r\n infoGN['obsValues']=obsValuesGN\r\n infoGN['paramValues']=paramsGN\r\n if trace: # print out some information\r\n print \"GN: paramValues: \",paramsLS,\" err_constraint\",err_constraint[0]\r\n\r\n # run the functions on the linesearch values\r\n obsValuesLS, constraintLS = run_fn(function, paramsLS, npt, constraint_target=constraint_target)\r\n # need to merge paramsGS and paramsLS, obsValesGN & obsValuesGN & constraintGN and constraintLS\r\n params=np.vstack((paramsGN,paramsLS))\r\n obsValues=np.vstack((obsValuesGN,obsValuesLS))\r\n constraint=np.hstack((constraintGN,constraintLS))\r\n statusInfo, err, err_constraint, paramsGN, index, bestParam, infoLS = \\\r\n doLineSearch(params, paramRange, obsValues, target, paramStep, cov=cov, cov_iv=cov_iv,\r\n scalings=scalings,\r\n constraint=constraint, constraint_target=constraint_target,\r\n studyJSON=optimise,trace=trace) # run LS\r\n\r\n # add some information to the LineSearch info dict.\r\n infoLS['err_constraint']=err_constraint\r\n infoLS['paramValues']=paramsLS\r\n infoLS['obsValues']=obsValuesLS\r\n statusList.append({'gaussNewton':infoGN,'lineSearch':infoLS})\r\n iterCount += 1 # increase iteration count\r\n if trace:\r\n print \"LS: statusInfo %s Iter: %d Err_constraint\"%(statusInfo,iterCount),err_constraint\r\n\r\n\r\n if statusInfo == 'Continue' or statusInfo == 'Converged':\r\n nFail==0 # reset failure count as we are ok\r\n prevBestParam=bestParam[:] # update prevBestParam in case we restart\r\n else: # we've failed...\r\n nFail += 1 # increment failure count\r\n totalFail +=1 # and total fail count\r\n if (nrandom is not None) and (nFail < optimise.get('maxFails',0)): # random perturbation so allow retry\r\n # generate list of perturbed parameters.\r\n # the tricky issue here is that if we are running deterministically then\r\n # we will always get the same parameters perturbed...\r\n # Will hack this by passing a number to deterministic\r\n # then using that to increment the RNG.\r\n params, randIndx = rangeAwarePerturbations(prevBestParam, paramRange, paramStep, nrandom = nrandom,\r\n deterministic=totalFail + 1, trace=trace)\r\n statusInfo='continue' # keep going.\r\n\r\n if trace:\r\n print \"prevBestParam on iter %i is \"%iterCount,prevBestParam\r\n\r\n # end of iterative loop running doGaussNewton and doLineSearch.\r\n\r\n # rearrange the info array\r\n # start with the err_constraint from lineSearch\r\n if nrandom != None:\r\n raise NotImplementedError(\"Need to make info code work with random algorithm...If you don't care switch this off\")\r\n\r\n jacobian=[]\r\n hessian=[]\r\n alpha=[]\r\n err_constraint=[statusList[0]['gaussNewton']['err_constraint'][0]]\r\n bestParams=[startParam]\r\n iter=np.arange(len(statusList))\r\n for iterInfo in statusList:\r\n jacobian.append(iterInfo['gaussNewton']['jacobian'])\r\n hessian.append(iterInfo['gaussNewton']['hessian'])\r\n bestAlpha=iterInfo['lineSearch']['bestrun']\r\n err_constraint.append(iterInfo['lineSearch']['err_constraint'][bestAlpha])\r\n bestParams.append(iterInfo['lineSearch']['paramValues'][bestAlpha])\r\n alpha.append(iterInfo['lineSearch']['params']['alphas'][bestAlpha])\r\n\r\n #err_constraint=err_constraint[:-1]\r\n ## we don't want the last linesearch values...\r\n bestParams=bestParams[:-1]\r\n #for var in (jacobian,hessian,bestAlpha,err_constraint,bestParams,alpha):\r\n # var=np.asscalar(var)\r\n\r\n jacobian=np.asarray(jacobian)\r\n hessian=np.asarray(hessian)\r\n err_constraint=np.asarray(err_constraint)\r\n bestParams=np.asarray(bestParams)\r\n alpha=np.asarray(alpha)\r\n statusList={'jacobian':jacobian,'hessian':hessian,'alpha':alpha,'err_constraint':err_constraint,'iter':iter,'bestParams':bestParams}\r\n\r\n\r\n return prevBestParam, statusInfo, statusList # would also like to return a bunch of info to help trace the performance of the algorithm.\r",
"def rfparametertuning(regressor,xtrain,ytrain):\r\n param_grid = { \r\n 'n_estimators': [200,400,900],\r\n 'max_features': ['auto', 'sqrt', 'log2']\r\n }\r\n \r\n gsearch = GridSearchCV(estimator=regressor, param_grid=param_grid, cv= 3)\r\n \r\n gsearch= gsearch.fit(xtrain,ytrain)\r\n best_score = gsearch.best_score_\r\n best_parameters = gsearch.best_params_\r\n return best_score,best_parameters",
"def RR_prior(grammar, t, alpha=1.0):\n lp = 0.0\n\n for c in get_rule_counts(grammar, t):\n theprior = numpy.array( [alpha] * len(c), dtype=float )\n #theprior = np.repeat(alpha,len(c)) # Not implemented in numpypy\n lp += (beta(c+theprior) - beta(theprior))\n return lp",
"def freeze_adaptive_regularizer_param(self):\n if self.local_weights_hook is None:\n print(\"the local adaptive smoother weight is locked\")\n self.local_weights_hook = self.local_weights.register_hook(lambda grad: grad * 0)\n self.local_weights_hook_flag = True",
"def load_optimization_function(self, para_input: dict):\n\n # learning rate\n self.learning_rate = para_input[\"learning_rate\"]\n # maximum iteration number\n self.iter_num = para_input[\"iter_num\"]\n # the optimization method\n self.optimization_method_str = para_input[\"method\"]\n\n if (para_input[\"method\"] == \"Vanilla\"):\n self.optimization_function = lambda self, theta, idx: self.Vanilla_gradient_descent(theta)\n\n elif (para_input[\"method\"] == \"Nesterov\"):\n self.mu_momentum = para_input[\"mu\"]\n self.actual_loss_print_nesterov_flag = para_input[\"true_loss_print_flag\"]\n self.optimization_function = lambda self, theta, idx: self.Nesterov(theta)\n\n elif (para_input[\"method\"] == \"Adam\"):\n self.beta_1_adam = para_input[\"beta_1\"]\n self.beta_2_adam = para_input[\"beta_2\"]\n self.epsilon_adam = para_input[\"epsilon\"]\n self.optimization_function = lambda self, theta, idx: self.Adam(theta, idx)\n\n elif (para_input[\"method\"] == \"Nadam\"):\n self.beta_1_nadam = para_input[\"beta_1\"]\n self.beta_2_nadam = para_input[\"beta_2\"]\n self.epsilon_nadam = para_input[\"epsilon\"]\n self.optimization_function = lambda self, theta, idx: self.Nadam(theta, idx)\n\n elif (para_input[\"method\"] == \"AMSGrad\"):\n self.beta_1_amsgrad = para_input[\"beta_1\"]\n self.beta_2_amsgrad = para_input[\"beta_2\"]\n self.epsilon_amsgrad = para_input[\"epsilon\"]\n self.optimization_function = lambda self, theta, idx: self.AMSGrad(theta, idx)\n\n else:\n raise Exception(\"Wrong optimization method type!\")",
"def test_params_module():\n # Get the inputs required by the Scales object\n (profile, disp_phases, z0) = get_sim_data()\n\n\n # Test that the governing parameters are computed correctly\n # First, test a single dispersed phase\n model = params.Scales(profile, disp_phases[1])\n check_get_variables(model, z0, 0.15, 0.21724144538674975,\n 0.001724100901081246, 0.22611661456807244, 0.15)\n\n # Second, try a list of dispersed phases, where the dominant phase is\n # not the first one\n particles = [disp_phases[1], disp_phases[0], disp_phases[2]]\n model = params.Scales(profile, particles)\n check_get_variables(model, z0, 0.15, 1.1015134610748201,\n 0.001724100901081246, 0.33764577808309032, 0.15)\n\n # Third, make sure we get the same answer as the previous case if the\n # particles are in a different order (i.e., the original order)\n model = params.Scales(profile, disp_phases)\n check_get_variables(model, z0, 0.15, 1.1015134610748201,\n 0.001724100901081246, 0.33764577808309032, 0.15)\n\n # Using the latest Scales object, check that the other methods return\n # the correct results. Since these methods only depend on the values\n # of B, N, and us computed by the get_variables() method, only one case\n # needs to be tested\n assert_approx_equal(model.h_T(z0), 346.40139518559153, significant=6)\n assert_approx_equal(model.h_P(z0), 627.57408319500291, significant=6)\n assert_approx_equal(model.h_S(z0, 0.15), 295.45365120553163,\n significant=6)\n assert_approx_equal(model.lambda_1(z0, 0), 0.74523735215223819,\n significant=6)\n assert_approx_equal(model.u_inf_crit(z0), 0.063723667111426671,\n significant=6)",
"def RR_prior(grammar, t, alpha=1.0, add_counts={}):\n lp = 0.0\n\n for c in get_rule_counts(grammar, t, add_counts=add_counts):\n theprior = numpy.array( [alpha] * len(c), dtype=float )\n #theprior = np.repeat(alpha,len(c)) # Not implemented in numpypy\n lp += (beta(c+theprior) - beta(theprior))\n return lp",
"def _get_params(self):\r\n v_old=np.zeros_like(self.theta)\r\n for it in range(self.max_iter):\r\n v_new=self.gamma*v_old+self.learning_rate*self._gradient()\r\n self.theta=self.theta-v_new\r\n if np.linalg.norm(self._gradient())/len(self.theta)<10**-3:\r\n # checking if the difference is still significant, if not, stop.\r\n print('break at iter',it)\r\n print(self._cost())\r\n break\r\n v_old=v_new\r\n else:\r\n print('break at iter',self.max_iter)\r\n print(self._cost())\r\n return self.theta",
"def optimize_beta(self, num_itns):\n\n T, V = self.T, self.V\n\n beta, n, beta_n = self.beta, self.n, self.beta_n\n\n Nvt = self.Nvt_plus_beta_n - tile(beta_n, (T, 1)).T\n Nt = self.Nt_plus_beta - beta\n\n new_beta, new_beta_n = beta, beta_n.copy()\n\n for itn in xrange(1, num_itns + 1):\n\n new_beta *= ((psi(Nvt + tile(new_beta_n, (T, 1)).T)\n - psi(tile(new_beta_n, (T, 1)).T)).sum()\n / (V * (psi(Nt + new_beta) - psi(new_beta)).sum()))\n\n new_beta_n = new_beta * n\n\n self.beta, self.beta_n = new_beta, new_beta_n\n\n self.T_gammaln_beta = T * gammaln(new_beta)\n self.T_sum_gammaln_beta_n = T * gammaln(new_beta_n).sum()\n\n self.Nvt_plus_beta_n = Nvt + tile(new_beta_n, (T, 1)).T\n self.Nt_plus_beta = Nt + new_beta",
"def evaluate(self, Pr, LAMBDAS, SHIFTS, degree=2, debug=False):\n obj_barrier = 0.0\n grad_barrier = {}\n Hessian_barrier = {}\n LAMBDA_BAR = {}\n #w.r.t. constraints\n constraint_func, constraint_grads, constraint_Hessian = Pr.evalFullConstraintsGrad(degree)\n #w.r.t. objective \n obj_func, obj_grads, obj_Hessian = Pr.evalGradandUtilities(degree)\n\n\n\n for obj in obj_func:\n if degree < 0:\n continue\n #Objective\n obj_barrier += obj_func[obj]\n\n if degree<1:\n continue\n #Grad\n for index in obj_grads[obj]:\n if index in grad_barrier:\n grad_barrier[index] += obj_grads[obj][index]\n else:\n grad_barrier[index] = obj_grads[obj][index]\n if degree<2:\n continue\n #Hessian\n for index_pair in obj_Hessian[obj]:\n if index_pair in Hessian_barrier:\n Hessian_barrier[index_pair] += obj_Hessian[obj][index_pair]\n else:\n Hessian_barrier[index_pair] = obj_Hessian[obj][index_pair]\n\n\n\n for constraint in constraint_func:\n LAMBDA_BAR[constraint] = LAMBDAS[constraint] * SHIFTS[constraint] / (constraint_func[constraint] + SHIFTS[constraint])\n if degree < 0:\n continue\n #Objective\n try:\n obj_barrier += -1.0 * LAMBDAS[constraint] * SHIFTS[constraint] * math.log(constraint_func[constraint] + SHIFTS[constraint])\n except ValueError:\n obj_barrier = float(\"inf\")\n if degree<1:\n continue\n #Grad\n for index in constraint_grads[constraint]:\n grad_index = -1.0 * LAMBDA_BAR[constraint] * constraint_grads[constraint][index]\n if index in grad_barrier:\n grad_barrier[index] += grad_index\n else:\n grad_barrier[index] = grad_index\n\n if degree<2:\n continue\n #Hessian\n for index_pair in constraint_Hessian[constraint]:\n if index_pair in Hessian_barrier:\n Hessian_barrier[index_pair] += constraint_Hessian[constraint][index_pair]\n else:\n Hessian_barrier[index_pair] = constraint_Hessian[constraint][index_pair]\n\n return LAMBDA_BAR, obj_barrier, SparseVector(grad_barrier), SparseVector(Hessian_barrier)",
"def hyperopt(_x_train, _y_train, _path):\n model = GaussianNB()\n return model.get_params()",
"def update_parameters_with_adam(parameters, grads, v, s, t, learning_rate = 0.01,\r\n beta1 = 0.9, beta2 = 0.999, epsilon = 1e-8): \r\n \r\n L = len(parameters) // 2 # number of layers in the neural networks\r\n v_corrected = {} # Initializing first moment estimate, python dictionary\r\n s_corrected = {} # Initializing second moment estimate, python dictionary\r\n \r\n # Perform Adam update on all parameters\r\n for l in range(1, L + 1):\r\n v[\"dW\" + str(l)] = beta1 * v[\"dW\" + str(l)] + (1 - beta1) * grads[\"dW\" + str(l)]\r\n v[\"db\" + str(l)] = beta1 * v[\"db\" + str(l)] + (1 - beta1) * grads[\"db\" + str(l)]\r\n v_corrected[\"dW\" + str(l)] = v[\"dW\" + str(l)] / (1 - np.power(beta1,t))\r\n v_corrected[\"db\" + str(l)] = v[\"db\" + str(l)] / (1 - np.power(beta1,t))\r\n s[\"dW\" + str(l)] = beta2 * s[\"dW\" + str(l)] + (1 - beta2) * np.power(grads[\"dW\" + str(l)],2)\r\n s[\"db\" + str(l)] = beta2 * s[\"db\" + str(l)] + (1 - beta2) * np.power(grads[\"db\" + str(l)],2)\r\n s_corrected[\"dW\" + str(l)] = s[\"dW\" + str(l)] / ( 1 - np.power(beta2,t))\r\n s_corrected[\"db\" + str(l)] = s[\"db\"+ str(l)] / (1 - np.power(beta2,t)) \r\n parameters[\"W\" + str(l)] += (-learning_rate) * (v_corrected[\"dW\" + str(l)] / (np.sqrt(s_corrected[\"dW\" + str(l)])+epsilon)) \r\n parameters[\"b\" + str(l)] += (-learning_rate) * (v_corrected[\"db\" + str(l)] / (np.sqrt(s_corrected[\"db\" + str(l)]) +epsilon))\r\n\r\n return parameters, v, s, v_corrected, s_corrected",
"def set_alphas(self,alpha,optalpha):\n self.__alpha = alpha;\n self.__optalpha = optalpha;",
"def update_algo_parameter(self, parameter_name, new_parameter_value):\n if hasattr(self, parameter_name):\n setattr(self, parameter_name, new_parameter_value)\n if parameter_name == \"lr\":\n for param_group in self.pi_optimizer.param_groups:\n param_group['lr'] = new_parameter_value\n for param_group in self.q_optimizer.param_groups:\n param_group['lr'] = new_parameter_value\n for param_group in self.alpha_optimizer.param_groups:\n param_group['lr'] = new_parameter_value",
"def optimise_parameters(classifiers, train):\n\n ps = \\\n [\n {\n # 'C': np.arange(15, 30, 0.5),\n 'C': [\n 0.1,\n 0.5,\n 1,\n 2,\n 4,\n 8,\n 16,\n 32,\n 64,\n 128,\n 256,\n 512\n ],\n 'kernel':\n [\n 'linear',\n 'poly',\n 'rbf'\n ]\n },\n # {\n # 'solver': [\"lbfgs\", \"sgd\", \"adam\"],\n # \"learning_rate\": [\"constant\", \"invscaling\", \"adaptive\"],\n # \"activation\": [\"identity\", \"logistic\", 'tanh', \"relu\"],\n # \"hidden_layer_sizes\": [\n # (500, 250, 100, 10),\n # (600, 400, 200, 100, 50, 10),\n # (8, 5, 2),\n # (50, 20, 10, 2),\n # (100, 50, 20, 10, 5, 2),\n # (10, 10, 10, 10, 10, 10, 10, 10, 10, 10)\n # ]\n # },\n {\n 'n_estimators': [\n 110, 120, 130, 140, 150, 160, 170, 180, 190,\n ],\n },\n {\n 'n_neighbors':\n [\n 10, 20, 30, 40, 50, 60, 70, 80, 90, 100,\n 110, 120, 130, 140, 150, 160, 170, 180, 190,\n 200, 210, 220, 230, 240, 250\n ],\n 'weights': ['uniform', 'distance'],\n 'algorithm': ['auto', 'ball_tree', 'kd_tree', 'brute'],\n 'metric': ['euclidean', 'minkowski', 'manhattan']\n }\n ]\n\n i = 0\n\n b_params = ['', '', '', '']\n f1_scorer = make_scorer(f1_score, pos_label=1)\n\n print(train.drop([\"headline\", \"origin\", \"truth\"], axis=1))\n\n while i < len(classifiers):\n grid = GridSearchCV(\n classifiers[i], param_grid=ps[i], cv=5, verbose=3, scoring=f1_scorer)\n grid.fit(train.drop([\"headline\", \"origin\", \"truth\"], axis=1).values, train['truth'].values)\n scores = grid.best_score_\n best_parameters = grid.best_estimator_.get_params()\n param_list = ''\n for param_name in sorted(ps[i].keys()):\n param_list += '\\t%s: %r\\n' % (param_name,\n best_parameters[param_name])\n\n b_params[i] = '%s\\nBest score: %0.3f \\nBest parameters set: %s' % (\n scores, grid.best_score_, param_list)\n\n i += 1\n\n for pars in b_params:\n print(pars)",
"def update_parameters(self, grads, learning_rate, beta, beta1, beta2, epsilon, optimizer=\"gd\"):\n\n if optimizer == \"gd\":\n self.W, self.b = update_parameters_with_gd(self.W, self.b, grads,\n learning_rate=learning_rate)\n elif optimizer == \"momentum\":\n self.W, self.b, self.V = update_parameters_with_momentum(self.W, self.b, grads, self.V,\n learning_rate=learning_rate, beta=beta)\n elif optimizer == \"adam\":\n self.adam_counter += 1 # Adam counter\n self.W, self.b, self.V, self.S = update_parameters_with_adam(self.W, self.b, grads, self.V, self.S,\n learning_rate=learning_rate,\n beta1=beta1, beta2=beta2,\n adam_counter=self.adam_counter, epsilon=epsilon)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return the score given a metric, where y_pred is the prediction error.
|
def score(y_pred, metric):
if metric == 'mae':
#np.mean(abs(y_pred-y))
return np.mean(abs(y_pred))
elif metric == 'rmsd':
#np.sqrt(np.mean((y_pred-y)**2))
return np.sqrt(np.mean((y_pred)**2))
elif metric == 'max':
#np.max(abs(y_pred-y))
return np.max(abs(y_pred))
else:
quit("Unknown metric: %s" % self.metric)
|
[
"def metric_score(self, batch, y_pred):\n return -self.loss_score(batch, y_pred)",
"def performance_metric(y_true, y_predict):\n\n error = metrics.mean_squared_error(y_true,y_predict)\n return error",
"def score(y_true, y_pred):\n\n\treturn roc_auc_score(y_true, y_pred)",
"def get_metric(self, x, y, metric: str):\n\n if metric == 'score' or metric == 'accuracy':\n return self.score(x, y)\n elif metric == 'precision':\n y_pred = self.predict(x)\n from sklearn.metrics import precision_score\n return precision_score(y, y_pred)\n elif metric == 'recall':\n y_pred = self.predict(x)\n from sklearn.metrics import recall_score\n return recall_score(y, y_pred)\n else:\n print(\"Not supported.\")",
"def performance_metric(y_true, y_predict):\n\n #error = metrics.mean_absolute_error(y_true, y_predict)\n # acc = accuracy_score (y_true, y_predict,normalize=True, sample_weight=None)\n error = mean_squared_error(y_true, y_predict)\n # meanAbsErr = mean_absolute_error (y_true, y_predict)\n return error",
"def score_metric():\n return make_scorer(multiclass_roc_auc_score, greater_is_better=True)",
"def scoring(self, metric: Optional[str] = None, dataset: str = \"test\", **kwargs):\r\n metric_opts = CUSTOM_METRICS + list(SCORERS)\r\n\r\n # Check metric parameter\r\n if metric is None:\r\n return self._final_output()\r\n elif metric.lower() in METRIC_ACRONYMS:\r\n metric = METRIC_ACRONYMS[metric.lower()]\r\n elif metric.lower() not in metric_opts:\r\n raise ValueError(\r\n \"Unknown value for the metric parameter, \"\r\n f\"got {metric}. Try one of {', '.join(metric_opts)}.\"\r\n )\r\n\r\n # Check set parameter\r\n dataset = dataset.lower()\r\n if dataset not in (\"train\", \"test\"):\r\n raise ValueError(\r\n \"Unknown value for the dataset parameter. \"\r\n \"Choose between 'train' or 'test'.\"\r\n )\r\n\r\n if metric.lower() == \"cm\":\r\n return confusion_matrix(\r\n getattr(self, f\"y_{dataset}\"), getattr(self, f\"predict_{dataset}\")\r\n )\r\n elif metric.lower() == \"tn\":\r\n return int(self.scoring(\"cm\", dataset).ravel()[0])\r\n elif metric.lower() == \"fp\":\r\n return int(self.scoring(\"cm\", dataset).ravel()[1])\r\n elif metric.lower() == \"fn\":\r\n return int(self.scoring(\"cm\", dataset).ravel()[2])\r\n elif metric.lower() == \"tp\":\r\n return int(self.scoring(\"cm\", dataset).ravel()[3])\r\n elif metric.lower() == \"lift\":\r\n tn, fp, fn, tp = self.scoring(\"cm\", dataset).ravel()\r\n return float((tp / (tp + fp)) / ((tp + fn) / (tp + tn + fp + fn)))\r\n elif metric.lower() == \"fpr\":\r\n tn, fp, _, _ = self.scoring(\"cm\", dataset).ravel()\r\n return float(fp / (fp + tn))\r\n elif metric.lower() == \"tpr\":\r\n _, _, fn, tp = self.scoring(\"cm\", dataset).ravel()\r\n return float(tp / (tp + fn))\r\n elif metric.lower() == \"sup\":\r\n tn, fp, fn, tp = self.scoring(\"cm\", dataset).ravel()\r\n return float((tp + fp) / (tp + fp + fn + tn))\r\n\r\n # Calculate the scorer via _score_func to use the prediction properties\r\n scorer = SCORERS[metric]\r\n if type(scorer).__name__ == \"_ThresholdScorer\":\r\n if hasattr(self.estimator, \"decision_function\"):\r\n y_pred = getattr(self, f\"decision_function_{dataset}\")\r\n else:\r\n y_pred = getattr(self, f\"predict_proba_{dataset}\")\r\n if self.T.task.startswith(\"bin\"):\r\n y_pred = y_pred[:, 1]\r\n elif type(scorer).__name__ == \"_ProbaScorer\":\r\n if hasattr(self.estimator, \"predict_proba\"):\r\n y_pred = getattr(self, f\"predict_proba_{dataset}\")\r\n if self.T.task.startswith(\"bin\"):\r\n y_pred = y_pred[:, 1]\r\n else:\r\n y_pred = getattr(self, f\"decision_function_{dataset}\")\r\n else:\r\n y_pred = getattr(self, f\"predict_{dataset}\")\r\n\r\n return scorer._sign * float(\r\n scorer._score_func(\r\n getattr(self, f\"y_{dataset}\"), y_pred, **scorer._kwargs, **kwargs\r\n )\r\n )",
"def cross_validated_metric(model, metric: str) -> float:\n cv_metric = cross_val_score(model, X, y, cv=5, scoring=metric)\n return np.mean(cv_metric)",
"def performance(clf_trained, X, y_true, metric='auroc'):\n \n y_pred = clf_trained.predict(X)\n y_score = clf_trained.decision_function(X)\n tn, fp, fn, tp = metrics.confusion_matrix(y_true, y_pred, labels=[-1,1]).ravel()\n if metric == 'accuracy':\n return metrics.accuracy_score(y_true, y_pred)\n elif metric == 'auroc':\n return metrics.roc_auc_score(y_true, y_score)\n elif metric == 'f1_score':\n return metrics.f1_score(y_true, y_pred)\n elif metric == 'precision':\n return metrics.precision_score(y_true, y_pred)\n elif metric == 'sensitivity':\n if tp + fn > 0:\n return tp / (tp+fn)\n else:\n return 0.0\n elif metric == 'specificity':\n if tn + fp > 0:\n return tn / (tn+fp)\n else:\n return 0.0",
"def cal_classificationerror(y, y_pred):\n return 1-accuracy(y,y_pred)",
"def average_precision_score(y, y_pred):\n pass",
"def evaluate(self, groundtruth, predictions, metric, threshold=0.5):\n if metric in self.summary_metrics:\n metric_fn = self.summary_metrics[metric]\n value = metric_fn(groundtruth, predictions)\n elif metric in self.curve_metrics:\n metric_fn = self.curve_metrics[metric]\n value = metric_fn(groundtruth, predictions)\n elif metric in self.point_metrics:\n metric_fn = self.point_metrics[metric]\n value = metric_fn(groundtruth, predictions > threshold)\n else:\n raise ValueError(f\"Metric {metric} not supported.\")\n\n return value",
"def score(self, metric='rmse', doy_observed=None,\n to_predict=None, predictors=None):\n self._check_parameter_completeness()\n\n if doy_observed is None:\n doy_observed = self.obs_fitting\n elif isinstance(doy_observed, np.ndarray):\n if not isinstance(to_predict, pd.DataFrame) or not isinstance(predictors, pd.DataFrame):\n raise TypeError('to_predict and predictors must be pandas dataframes if ',\n 'evaluating new data')\n\n if doy_observed.shape[0] != to_predict.shape[0]:\n raise TypeError('The length of doy_observed must be equal to the',\n 'length of to_predict.')\n\n else:\n raise TypeError('Unknown doy_observed parameter type. expected ndarray, got ' + str(type(doy_observed)))\n\n doy_estimated = self.predict(to_predict=to_predict,\n predictors=predictors)\n\n error_function = utils.optimize.get_loss_function(method=metric)\n\n if metric == 'aic':\n error = error_function(doy_observed, doy_estimated,\n n_param=len(self._parameters_to_estimate))\n else:\n error = error_function(doy_observed, doy_estimated)\n\n return error",
"def scores(self, y_pred, y_true ): \n u = ((y_true - y_pred) ** 2).sum(axis=-1)\n v = ((y_true - y_true.mean(axis=-1)[None].T) ** 2).sum(axis=-1)\n r_2 = 1 - u/v\n return r_2",
"def score(self, X, y=None):\n y_pred = self.predict(X)\n return self.draw(y,y_pred)",
"def calculate_accuracy_score(self) -> float:\n return accuracy_score(self.labels, self.y_pred)",
"def r2_score(y_true, y_pred):\n eps = 1e-13 # Epsilon avoid possible division by zero\n SS_res = np.sum(np.square(y_true - y_pred))\n SS_tot = np.sum(np.square(y_true - np.mean(y_true)))\n return (1 - SS_res / (SS_tot + eps))",
"def mean_squared_error(y_true, y_pred):\n mse = np.mean(np.power(y_true-y_pred,2))\n return mse",
"def match_accuracy_scorer(estimator, X, y):\n y_pred = estimator.predict(X)\n\n return _calculate_match_accuracy(X, y, y_pred)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for description, mapped from YANG variable /local_routes/static_routes/static/config/description (string)
|
def _set_description(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """description must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)""",
})
self.__description = t
if hasattr(self, '_set'):
self._set()
|
[
"def set_description(self, description):\n if not isinstance(description, str):\n raise ValueError(\"Description must be a string.\")\n try:\n self._set_config_value(\n _SERVICE_INFO_SECTION_NAME, \"Description\", description\n )\n except Exception as e:\n logger.error(f\"Unable to set description: {e}\")",
"def set_description(self):\n if \"description\" not in self.data:\n logger.debug(\"Adding empty descriptions to root\")\n self.data[\"description\"] = \"\"",
"def config_setting_description(self, config_setting_description):\n\n self._config_setting_description = config_setting_description",
"def add_description(self, description: str):\n self.response[DESCRIPTION_KEY] = description\n return self",
"def description(cls, desc: str) -> \"meta\":\n return cls(name=\"description\", content=desc)",
"def set_description(description):",
"def _setup_externals_description(config):\n\n config.add_section(DESCRIPTION_SECTION)\n config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.1')",
"def description_url(self, description_url):\n\n self._description_url = description_url",
"def description(self, value):\r\n if self.description is not None:\r\n if isinstance(value, basestring):\r\n output = self._update(\r\n backupset_name=self.backupset_name,\r\n backupset_description=value,\r\n default_backupset=self.is_default_backupset\r\n )\r\n\r\n if output[0]:\r\n return\r\n else:\r\n o_str = 'Failed to update the description of the backupset\\nError: \"{0}\"'\r\n raise SDKException('Backupset', '102', o_str.format(output[2]))\r\n else:\r\n raise SDKException(\r\n 'Backupset', '102', 'Backupset description should be a string value'\r\n )\r\n else:\r\n raise SDKException('Backupset', '102', 'Description cannot be modified')",
"def edit_description(self, new_desciption):\n self.desciption = new_desciption",
"def _description_string(self) -> str:",
"def meta_description(doc):\n return meta_content(doc, \"meta[name=description]\")",
"def set_description(module):\n name = module.attributes['name']\n value = module.attributes['description']\n module.node.api('interfaces').set_description(name, value)",
"def description_html(self):\n return self.description",
"def description(self):\n return self._clean_string(self.video_data.get('description'))",
"def description(self, text=pythoncom.Empty):\r\n return _base._rsf.block_description(self._block._name, text)",
"def description(self, value):\n if value is None or value == \"\":\n value = self.name\n CCAPI.set_product_description(product_ids=[self.id], description=value)\n self._description = value",
"def BlockDescription(block_name, description=None):\n idef = scriptcontext.doc.InstanceDefinitions.Find(block_name)\n if not idef: raise ValueError(\"%s does not exist in InstanceDefinitionsTable\"%block_name)\n rc = idef.Description\n if description: scriptcontext.doc.InstanceDefinitions.Modify( idef, idef.Name, description, True )\n return rc",
"def get_description_of_rule(config_rule_name):\n description = \"\"\n try:\n response = CONFIG.describe_config_rules(\n ConfigRuleNames=[config_rule_name]\n )\n if 'Description' in response['ConfigRules'][0]:\n description = response['ConfigRules'][0]['Description']\n else:\n description = response['ConfigRules'][0]['ConfigRuleName']\n return description\n except Exception as error:\n print(\"Error: \", error)\n raise"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for index, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config/index (string)
|
def _set_index(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """index must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)""",
})
self.__index = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/platform/transceiver', defining_module='openconfig-platform-transceiver', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"index must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/platform/transceiver', defining_module='openconfig-platform-transceiver', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__index = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"index must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__index = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_index(self, index):\n if index not in INDEX_NAMES:\n available_keys = \", \".join(list(INDEX_NAMES.keys()))\n msg = (f\"{index} key is not avaiable. Available keys: \"\n f\"{available_keys}\")\n logger.error(msg)\n raise KeyError(msg)\n self.index = index\n self.index_name = INDEX_NAMES[index]",
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"index must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__index = t\n if hasattr(self, '_set'):\n self._set()",
"def getIndex(self, index: 'int const') -> \"int\":\n return _coin.SoPath_getIndex(self, index)",
"def index(self, index):\n raise NotImplementedError, \\\n \"there is no standard CREATE INDEX construct (or is there? let me know!)\"",
"def addIndex (self, index):\n\n self.indexes.append (index)\n self.indexMap [index.name] = index",
"def getIndex(self, index: 'int const') -> \"int\":\n return _coin.SoLightPath_getIndex(self, index)",
"def is_index(self, is_index):\n\n self._is_index = is_index",
"def get_index(index):\n indexes = get_setting('SEARCH_INDEXES') or {}\n data = indexes.get(index, None)\n if data is None:\n raise IndexNotFound(index)\n return data",
"def edit_index(self, index, reindex=False, ind_kwargs=None):\n if ind_kwargs is None:\n ind_kwargs = {}\n ind_obj, name = self.__write_index(index, -1, edit=True)\n old = next(x for x in self.indexes if x.name == name)\n old.close_index()\n index_of_index = self.indexes.index(old)\n ind_obj.open_index()\n self.indexes[index_of_index] = ind_obj\n self.indexes_names[name] = ind_obj\n if reindex:\n self.reindex_index(name)\n return name",
"def create_index(self):\n try:\n self.client.create_index(self.index)\n except Exception as e:\n pass",
"def index_key(self):\n return self.request.GET.get('index_key', default='current')",
"def index_settings(self):\n url = f'{self.hostname}/settings/indexes'\n return self._get(url)",
"def handle_set_page_index(self, index):\n pass",
"def index_of(self, name):\n\n info = self.info_of(name)\n return info[self.INDEX]",
"def __get_index_info__(self, index_name):\n result = self.__description__.get_indices()\n if result is not None:\n result = result.get(index_name, None)\n return result",
"def addIndex(self, index):\n assert type(index)==int\n assert 0 <= index <self._dataset.getSize()\n\n if index in self._indices :\n pass\n else:\n self._indices.append(index)",
"def index_config(self) -> Optional[pulumi.Input['FieldIndexConfigArgs']]:\n return pulumi.get(self, \"index_config\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for next_hop, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config/next_hop (union)
|
def _set_next_hop(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name="next-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """next_hop must be of a type compatible with union""",
'defined-type': "openconfig-local-routing:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name="next-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)""",
})
self.__next_hop = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with union\"\"\",\n 'defined-type': \"openconfig-local-routing:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"index\",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name=\"next-hop\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"index\",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name=\"next-hop\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hops(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name=\"next-hops\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hops must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name=\"next-hops\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hops = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_nexthop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"nexthop\", rest_name=\"nexthop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='mpls-ipv4-address', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"nexthop must be of a type compatible with mpls-ipv4-address\"\"\",\n 'defined-type': \"brocade-mpls:mpls-ipv4-address\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"nexthop\", rest_name=\"nexthop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='mpls-ipv4-address', is_config=True)\"\"\",\n })\n\n self.__nexthop = t\n if hasattr(self, '_set'):\n self._set()",
"def VplsEnableNextHop(self):\n return self._get_attribute('vplsEnableNextHop')",
"def _update_next_hop(self, want, have, opr=True):\n commands = []\n\n want_copy = deepcopy(remove_empties(want))\n have_copy = deepcopy(remove_empties(have))\n\n diff_next_hops = get_lst_diff_for_dicts(\n have_copy, want_copy, \"next_hops\"\n )\n if diff_next_hops:\n for hop in diff_next_hops:\n for element in hop:\n if element == \"forward_router_address\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n value=hop[element],\n remove=True,\n )\n )\n elif element == \"enabled\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"],\n value=\"disable\",\n remove=True,\n )\n )\n elif element == \"admin_distance\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=str(hop[element]),\n remove=True,\n )\n )\n elif element == \"interface\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=hop[element],\n remove=True,\n )\n )\n return commands",
"def _set_max_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"max_hop must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__max_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_max_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"max_hop must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__max_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _update_nexthop_cache(self, now, vlan, eth_src, port, ip_gw):\n nexthop = NextHop(eth_src, port, now)\n nexthop_cache = self._vlan_nexthop_cache(vlan)\n nexthop_cache[ip_gw] = nexthop\n return nexthop",
"def _set_hop_limit(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..255']}), is_leaf=True, yang_name=\"hop-limit\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"hop_limit must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..255']}), is_leaf=True, yang_name=\"hop-limit\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__hop_limit = t\n if hasattr(self, '_set'):\n self._set()",
"def test_default_ipv6_route_next_hop_global_address(duthosts, tbinfo):\n duthost = find_duthost_on_role(\n duthosts, get_upstream_neigh_type(tbinfo['topo']['type']), tbinfo)\n asichost = duthost.asic_instance(0 if duthost.is_multi_asic else None)\n\n rtinfo = asichost.get_ip_route_info(ipaddress.ip_network(\"::/0\"))\n pytest_assert(len(rtinfo['nexthops']) > 0,\n \"cannot find ipv6 nexthop for default route\")\n for nh in rtinfo['nexthops']:\n pytest_assert(not nh[0].is_link_local,\n \"use link local address {} for nexthop\".format(nh[0]))",
"def set_next(self, next_node):\r\n self.next_node = next_node",
"def update_hop_count(self, hop_count: int) -> None:\n if hop_count < self.hop_count:\n self.hop_count = hop_count",
"def test_hopping_generator():\n from scipy.spatial import cKDTree\n\n @pb.hopping_generator(\"tnn_test\", energy=graphene.t_nn)\n def next_nearest(x, y, z):\n pos = np.stack([x, y, z], axis=1)\n dmin = graphene.a * 0.95\n dmax = graphene.a * 1.05\n kdtree = cKDTree(pos)\n coo = kdtree.sparse_distance_matrix(kdtree, dmax).tocoo()\n idx = coo.data > dmin\n return coo.row[idx], coo.col[idx]\n\n @pb.onsite_energy_modifier\n def onsite_offset(energy):\n return energy + 3 * graphene.t_nn\n\n model = pb.Model(graphene.monolayer(), next_nearest, onsite_offset, graphene.hexagon_ac(1))\n expected = pb.Model(graphene.monolayer(2), graphene.hexagon_ac(1))\n assert pytest.fuzzy_equal(model.hamiltonian, expected.hamiltonian)",
"def evpn_next_hop_unchanged(self, **kwargs):\n callback = kwargs.pop('callback', self._callback)\n ip_addr = kwargs.pop('ip_addr')\n rbridge_id = kwargs.pop('rbridge_id', '1')\n feature = '_neighbor_evpn_neighbor_ipv4'\n afi = 'l2vpn'\n if kwargs.pop('delete', False):\n args = dict(next_hop_unchanged=False)\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n afi=afi,\n op='_update',\n evpn_n_addr=ip_addr,\n args=args,\n os=self.os)\n return callback(config)\n if kwargs.pop('get', False):\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n evpn_n_addr=ip_addr,\n afi=afi,\n op='_get',\n os=self.os)\n out = callback(config, handler='get_config')\n bgp = Util(out.data)\n out = bgp.find(bgp.root, './/next-hop-unchanged')\n out = True if out == 'true' else False\n return out\n args = dict(next_hop_unchanged=True)\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n afi=afi,\n op='_update',\n evpn_n_addr=ip_addr,\n args=args,\n os=self.os)\n return callback(config)",
"def is_hop_by_hop(header_name):\r\n return _hoppish(header_name.lower())",
"def set_analysis_hop(self, analysis_hop):\n # pylint: disable=no-self-use,unused-argument\n return",
"def set_next(self, new_next):\n \tself.next = new_next"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for metric, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config/metric (uint32)
|
def _set_metric(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """metric must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='uint32', is_config=True)""",
})
self.__metric = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_metric(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=metric.metric, is_container='container', presence=False, yang_name=\"metric\", rest_name=\"metric\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route metric', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"metric must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=metric.metric, is_container='container', presence=False, yang_name=\"metric\", rest_name=\"metric\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Route metric', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__metric = t\n if hasattr(self, '_set'):\n self._set()",
"def get_metric_value(metric, data):\n\n if len(metric) > 1:\n return get_metric_value(metric[1:], data[metric[0]])\n\n return data[metric[0]]",
"def set_metric_file(self, metric_file):\n try :\n with open(metric_file, \"r\") as read_file:\n self.metric_data = json.load(read_file) \n except Exception as error:\n self.logger.error(\"Error while opening metric_file. Please review error: %s\" % error)",
"def metric_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"metric_id\")",
"def val_metric(self, key, metric=None):\n actual_key = \"val_\" + key\n return self.metric(key, val_metric=metric, mode=\"val\")",
"def train_metric(self, key, metric=None):\n actual_key = \"train_\" + key\n return self.metric(key, train_metric=metric, mode=\"train\")",
"def show_watch_metric(self, ctxt, metric_namespace=None, metric_name=None):\r\n return self.call(ctxt, self.make_msg('show_watch_metric',\r\n metric_namespace=metric_namespace,\r\n metric_name=metric_name))",
"def writeMetric(\n self,\n path,\n metric,\n value,\n timestamp,\n metricType,\n metricId,\n min,\n max,\n hasThresholds,\n threshEventData,\n allowStaleDatapoint,\n ):",
"def remove_metric_from_folder(self, folder, metric):\n if folder is not None and metric is not None:\n LOGGER.debug(\"Removing metric '%s' from folder '%s'\" %\n (metric.name, folder.name))\n folder.metrices.remove(metric)\n LOGGER.debug(\"Metric removed from folder successfully\")\n else:\n raise ValueError(\"Parameter 'metric' and 'folder' can't be None\")",
"def delete_metric(self, metric_name):\n return self._delete('metric/%s' % metric_name)",
"def unregister(metric):\n del state.metrics[metric.name]",
"def metric_type_in(self, metric_type_in):\n\n self._metric_type_in = metric_type_in",
"def as_metric_config(self):\n return tfma.config.MetricConfig(\n class_name=self.class_name,\n module=self.module_name,\n config=json.dumps(self.config) if self.config else None)",
"def set_custom_metric(key, value):\n if not newrelic:\n return\n newrelic.agent.add_custom_parameter(key, value)",
"def update_metering_label_rule(self, metering_label_rule, **attrs):\n return self._update(\n _metering_label_rule.MeteringLabelRule,\n metering_label_rule,\n **attrs,\n )",
"def put_metric_alarm(self, req):\r\n self._enforce(req, 'PutMetricAlarm')\r\n return exception.HeatAPINotImplementedError()",
"def update_reduced_metric(self, name, value, key=None):\n if name not in self.metrics:\n Log.error(\"In update_reduced_metric(): %s is not registered in the metric\" % name)\n\n if key is None and isinstance(self.metrics[name], ReducedMetric):\n self.metrics[name].update(value)\n elif key is not None and isinstance(self.metrics[name], MultiReducedMetric):\n self.metrics[name].update(key, value)\n else:\n Log.error(\"In update_count(): %s is registered but not supported with this method\" % name)",
"def metric_count_in(self, metric_count_in):\n\n self._metric_count_in = metric_count_in",
"def metric_detail(request, name):\n try:\n r = get_redis_connection()\n except:\n return JSONResponse('Error connecting with DB', status=500)\n\n if request.method == 'GET':\n metric = r.hgetall(\"metric:\"+str(name))\n return JSONResponse(metric, status=200)\n\n if request.method == 'PUT':\n if not r.exists('metric:'+str(name)):\n return JSONResponse('Metric with name: '+str(name)+' not exists.', status=404)\n\n data = JSONParser().parse(request)\n r.hmset('metric:'+str(name), data)\n return JSONResponse('The metadata of the metric workload with name: '+str(name)+' has been updated', status=201)\n\n if request.method == 'DELETE':\n r.delete(\"metric:\"+str(id))\n return JSONResponse('Metric workload has been deleted', status=204)\n return JSONResponse('Method '+str(request.method)+' not allowed.', status=405)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for next_hop, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/state/next_hop (union)
|
def _set_next_hop(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name="next-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """next_hop must be of a type compatible with union""",
'defined-type': "openconfig-local-routing:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name="next-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)""",
})
self.__next_hop = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with union\"\"\",\n 'defined-type': \"openconfig-local-routing:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"index\",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name=\"next-hop\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"index\",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name=\"next-hop\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hops(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name=\"next-hops\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hops must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name=\"next-hops\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hops = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_nexthop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"nexthop\", rest_name=\"nexthop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='mpls-ipv4-address', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"nexthop must be of a type compatible with mpls-ipv4-address\"\"\",\n 'defined-type': \"brocade-mpls:mpls-ipv4-address\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"nexthop\", rest_name=\"nexthop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='mpls-ipv4-address', is_config=True)\"\"\",\n })\n\n self.__nexthop = t\n if hasattr(self, '_set'):\n self._set()",
"def VplsEnableNextHop(self):\n return self._get_attribute('vplsEnableNextHop')",
"def _update_nexthop_cache(self, now, vlan, eth_src, port, ip_gw):\n nexthop = NextHop(eth_src, port, now)\n nexthop_cache = self._vlan_nexthop_cache(vlan)\n nexthop_cache[ip_gw] = nexthop\n return nexthop",
"def _update_next_hop(self, want, have, opr=True):\n commands = []\n\n want_copy = deepcopy(remove_empties(want))\n have_copy = deepcopy(remove_empties(have))\n\n diff_next_hops = get_lst_diff_for_dicts(\n have_copy, want_copy, \"next_hops\"\n )\n if diff_next_hops:\n for hop in diff_next_hops:\n for element in hop:\n if element == \"forward_router_address\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n value=hop[element],\n remove=True,\n )\n )\n elif element == \"enabled\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"],\n value=\"disable\",\n remove=True,\n )\n )\n elif element == \"admin_distance\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=str(hop[element]),\n remove=True,\n )\n )\n elif element == \"interface\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=hop[element],\n remove=True,\n )\n )\n return commands",
"def _set_lsp_operational_up(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"lsp-operational-up\", rest_name=\"lsp-operational-up\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_operational_up must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"lsp-operational-up\", rest_name=\"lsp-operational-up\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\"\"\",\n })\n\n self.__lsp_operational_up = t\n if hasattr(self, '_set'):\n self._set()",
"def add_hop(self, state: Tuple[int, int], new_state: Tuple[int, int]):\n assert len(state) == 2, \"state shape must be 2D\"\n assert len(new_state) == 2, \"state shape must be 2D\"\n self.hops.append(\n {\"old_state\": np.array(state), \"new_state\": np.array(new_state)}\n )",
"def set_next(self, next_node):\r\n self.next_node = next_node",
"def is_hop_by_hop(header_name):\r\n return _hoppish(header_name.lower())",
"def update_hop_count(self, hop_count: int) -> None:\n if hop_count < self.hop_count:\n self.hop_count = hop_count",
"def _set_max_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"max_hop must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__max_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_max_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"max_hop must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..255']}), is_leaf=True, yang_name=\"max-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__max_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def go_to_state(self, next_state):\n for t in self.transitions:\n if t.next_state == None:\n t.next_state = next_state\n return self.root",
"def test_default_ipv6_route_next_hop_global_address(duthosts, tbinfo):\n duthost = find_duthost_on_role(\n duthosts, get_upstream_neigh_type(tbinfo['topo']['type']), tbinfo)\n asichost = duthost.asic_instance(0 if duthost.is_multi_asic else None)\n\n rtinfo = asichost.get_ip_route_info(ipaddress.ip_network(\"::/0\"))\n pytest_assert(len(rtinfo['nexthops']) > 0,\n \"cannot find ipv6 nexthop for default route\")\n for nh in rtinfo['nexthops']:\n pytest_assert(not nh[0].is_link_local,\n \"use link local address {} for nexthop\".format(nh[0]))",
"def set_next(self, new_next):\n \tself.next = new_next"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for recurse, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/state/recurse (boolean)
|
def _set_recurse(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="recurse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """recurse must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="recurse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='boolean', is_config=False)""",
})
self.__recurse = t
if hasattr(self, '_set'):
self._set()
|
[
"def set_recursive(self, b):\n _ldns.ldns_resolver_set_recursive(self, b)\n #parameters: ldns_resolver *,bool,\n #retvals: ",
"def set_visited_right(self):\n self.visited_right = True",
"def recursion_depth(self, recursion_depth):\n\n self._recursion_depth = recursion_depth",
"def match_recurse(is_start, sections, path_elements, location):\n if sections:\n section = sections[0]\n any_match = False\n for end in section.match_iter(path_elements, location):\n any_match = True\n match = match_recurse(False, sections[1:], path_elements, end)\n if match | MatchType.MATCH:\n return match\n\n # No match found\n if is_start and self.bound_start and not any_match:\n # This this is the start of the recursion AND the pattern\n # is bound to the start of the path (\"/start/**\") AND this\n # did not match, then no subdirectories are possible either\n\n # WAS if len(path_elements) >= 1:\n\n if len(path_elements) >= len(section.elements):\n return MatchType.NO_MATCH_NO_SUBDIRECTORIES\n else:\n return MatchType.NO_MATCH\n else:\n return MatchType.NO_MATCH\n else:\n # Termination of the recursion after FINDING the match.\n if len(self.sections) == 1 and self.bound_start and self.bound_end:\n # If this pattern is of the form \"/test/*\" it matches\n # just THIS directory and no subdirectories\n return MatchType.MATCH_BUT_NO_SUBDIRECTORIES\n elif self.bound_end:\n # \"**/test/*\" matches just this directory\n # and allows subdirectories to also match\n return MatchType.MATCH\n else:\n # If the pattern is not bound to the end of the path (eg\n # NOT \"**/term/**\") the pattern matches all subdirectories\n return MatchType.MATCH_ALL_SUBDIRECTORIES\n # End of: def match_recurse(is_start, sections, path_elements, location):",
"def solveOneStep(self):\n ### Student code goes here\n curr = self.currentState\n # print(curr, end=\" \")\n # print(curr.depth)\n self.visited[self.currentState] = True\n movables = self.gm.getMovables()\n\n if self.gm.getGameState() == self.victoryCondition:\n return True\n\n else:\n if movables and not self.currentState.children:\n for x in range(len(movables)):\n\n self.gm.makeMove(movables[x])\n node = GameState(self.gm.getGameState(), curr.depth+1, movables[x])\n # self.gm.reverseMove(movables[x])\n curr.children.append(node)\n node.parent = curr\n if node not in self.visited:\n self.visited[node] = False\n self.gm.reverseMove(movables[x])\n else:\n self.gm.reverseMove(movables[x])\n else:\n if curr.parent != None:\n curr.depth -= 1\n self.gm.reverseMove(curr.requiredMovable)\n\n for child in curr.children:\n if self.visited[child] is False:\n self.currentState = child\n self.visited[child] = True\n self.gm.makeMove(child.requiredMovable)\n break",
"def set_right_visited(self):\n if self.size() == 0:\n return\n self.data[len(self.data) - 1][\"right_visited\"] = True",
"def _get_recursion_parameter(self, method_name, *args, **kwargs):\n recursion = kwargs.get('recursion', None)\n if recursion is None:\n log.info(\"%r: %s called with no recursion parameter. \"\n \"Using recursion=True by default.\",\n self._platform_id, method_name)\n recursion = True\n else:\n log.info(\"%r: %s called with recursion parameter: %r\",\n self._platform_id, method_name, recursion)\n recursion = bool(recursion)\n\n return recursion",
"def solveOneStep(self):\n ### Student code goes here\n if self.victoryCondition == self.currentState.state:\n return True\n\n if self.currentState not in self.visited:\n self.visited[self.currentState] = True\n\n if self.gm.getMovables() and not self.currentState.children:\n self.findChildren(self.currentState)\n\n path = []\n\n always = 123 ### runs until we break this loop ourselves\n while always == 123:\n next = self.search_queue.get()\n\n if not next in self.visited:\n #create the path back to the top\n while next.requiredMovable:\n path.append(next.requiredMovable)\n next = next.parent\n\n # reverse the actual states\n while self.currentState.requiredMovable:\n self.gm.reverseMove(self.currentState.requiredMovable)\n self.currentState = self.currentState.parent\n\n # navigate back down and go to the children to mark them as visited and keep searching\n num = len(path) - 1\n while path:\n\n # get last element of path and the remove it\n move = path[num]\n path.remove(path[num])\n self.gm.makeMove(move)\n new_state = self.gm.getGameState()\n num = num - 1\n\n for child in self.currentState.children:\n if child.state == new_state:\n self.currentState = child\n self.visited[self.currentState] = True # set visited flag to make sure we don't visit again\n break\n break\n\n return False",
"def search_recursively(self, node, search_node):\n\n comparison = self.compare(node, search_node)\n\n if comparison == 0:\n return True\n elif comparison == -1: # traverse left\n if node.has_left_child():\n return self.search_recursively(node.get_left_child(), search_node)\n else:\n return False\n else: #i.e. if comparison == 1, traverse right\n if node.has_right_child():\n return self.search_recursively(node.get_right_child(), search_node)\n else:\n return False",
"def RDFSrecursive():\n\n visited = [[False for x in range(width)] for y in range(height)]\n visited[pos.y][pos.x] = True\n paths = []\n\n def move(current_pos):\n\n # While the current cell has any unvisited neighbor cells\n m = movable(current_pos, visited)\n while len(m) > 0:\n \n # Chose one randomly\n next_pos = random.choice(m)\n\n if not visited[next_pos.y][next_pos.x]:\n\n # Mark the chosen cell as visited and set as movable path\n visited[next_pos.y][next_pos.x] = True\n paths.append((current_pos, next_pos))\n\n # Invoke the routine recursively\n move(next_pos)\n \n # Remove the chosen cell from neibors\n m.remove(next_pos)\n\n move(pos)\n\n # Convert to enclosed cells\n result = draw_paths(width, height, paths)\n\n # Print the result\n [print(' '.join(x)) for x in result]",
"def search(self, initial_state):\n #dictionary key state value parent state current depth and action\n self.VisitedState={}\n #first node parent is None depth is 0\n self.VisitedState[initial_state]=(None,0,'Start')\n level=0\n nextNodes=self.getNodeByDepth(level)\n #TODO:use queue\n while len(nextNodes)>0:\n for node in nextNodes:\n if self.problem.isgoal(node):\n return node\n else:\n nextActions = self.problem.actions(node)\n for action in nextActions:\n newState=self.child_node(node,action)\n if newState not in self.VisitedState.keys():\n #skip when already visit\n self.VisitedState[newState]=(node,level+1,action)\n \n #try to get next level\n level+=1\n nextNodes=self.getNodeByDepth(level)\n return None",
"def depth_first_traverse(self, start_node):\r\n # Reset the network.\r\n self.reset_network()\r\n\r\n # Keep track of the number of nodes in the traversal.\r\n num_done = 0\r\n\r\n # Push the start node onto the stack.\r\n stack = []\r\n stack.append(start_node)\r\n\r\n # Visit the start node.\r\n traversal = []\r\n traversal.append(start_node)\r\n start_node.visited = True\r\n start_node.text = f\"{num_done}\"\r\n num_done += 1\r\n\r\n # Process the stack until it's empty.\r\n while len(stack) > 0:\r\n # Get the next node from the stack.\r\n node = stack.pop()\r\n\r\n # Process the node's links.\r\n for link in node.links:\r\n to_node = link.node1\r\n\r\n # Only use the link if the destination\r\n # node hasn't been visited.\r\n if not to_node.visited:\r\n # Mark the node as visited.\r\n to_node.visited = True\r\n to_node.text = f\"{num_done}\"\r\n num_done += 1\r\n\r\n # Add the node to the traversal.\r\n traversal.append(to_node)\r\n\r\n # Add the link to the traversal.\r\n link.visited = True\r\n\r\n # Push the node onto the stack.\r\n stack.append(to_node)\r\n\r\n # See if the network is connected.\r\n is_connected = True\r\n for node in self.all_nodes:\r\n if not node.visited:\r\n is_connected = False\r\n break\r\n\r\n return traversal, is_connected",
"def the_recursive(some_item, var_path):\n new_path = var_path + \"/\" + some_item\n\n # This appends the path of a file that ends with the specified ending, the_end.\n if some_item.endswith(the_end):\n list_of_paths.append(new_path)\n #print(\"FOUNDS SOMETHING!!!!\")\n #print(\"Appended {}\".format(some_item))\n #print(\"______________________________\\n\")\n\n # This explores another directory and calls up the_recursive. If this condition is\n # not met, then this is the last of the if statements and nothing happens.\n if os.path.isdir(new_path):\n #print(\"Exploring new path {}...\".format(new_path))\n #print(\"______________________________\\n\")\n for i in os.listdir(new_path):\n the_recursive(i, new_path)",
"def dfs_recursive(self, starting_vertex, destination_vertex, visited=None, path=None):\n if visited is None:\n visited = set()\n\n if path is None:\n path = []\n\n visited.add(starting_vertex)\n path = [*path, starting_vertex]\n\n if starting_vertex == destination_vertex:\n return path\n for n in self.get_neighbors(starting_vertex):\n\n if n not in visited:\n \n new_path = self.dfs_recursive(n, destination_vertex, visited, path)\n if new_path:\n\n return new_path\n return None",
"def serialize_recursion(self, obj):\r\n raise _SkipField",
"def depthFirstSearch(problem):\n\n #This should be the starting state of the problem I believe\n startState = problem.getStartState()\n\n \n\n #print(\"Start:\", startState)\n #print(\"Is the start a goal?\", problem.isGoalState(startState))\n #print(\"Start's successors:\", problem.getSuccessors(startState))\n\n \"*** YOUR CODE HERE ***\"\n \n #This was the original algorithm. It finds the path but doesn't record it.\n #I altered it to make it so I could record it\n \"\"\"\n #This is the stack that holds all the states\n \n #It has to be a stack as it is depth first search,\n #so the last node opened is the last explored\n stateStack = Stack()\n stateStack.push(startState)\n \n #We make an empty set for visited so we can fast check if visited\n #possesses the current item\n visited = {}\n \n #Holds the currently followed path\n #We make it a stack as it needs to be able to remove the\n #most recent node visited if it's not along the path to the goal\n currentPath = []\n\n #This holds the currentNode being evaluated\n #It's not really a node but the state, however node is easier to understand\n currentNode = stateStack.pop()\n \n #This is the while loop for the dfs that allows us to access all\n #nodes until we reach the goal state\n while problem.isGoalState(currentNode) == False:\n\n #If the current node has not been visited, operate on it\n if currentNode not in visited:\n \n #Get all the children\n children = problem.getSuccessors(currentNode)\n\n #iterate over all children and handle them\n for child in children:\n \n #This is what they called it in searchAgent.py, so that's what I'm gonna call it\n nextState, action, cost = child\n \n\n # If the child's state has not been visited, visit it\n if nextState not in visited:\n \n #Add the action to the current path\n\n #Add the nextState to the state stack\n \n\n #Mark the currentNode as visited and then set the new current node\n visited.add(currentNode)\n\n currentPath, currentNode = stateStack.pop()\n\n \n\n #This converts the currentPath Stack into an array to return\n returner = []\n while currentPath.isEmpty() == False:\n returner.append(currentPath.pop())\n\n #The return statement\n return returner\n \"\"\"\n #I'm gonna hold each state in the visited stack but I will record\n #the path to the location and the cost of said path to the array\n #So each item will be (state, pathToState, costArrayForEachDirection)\n pathHolder = []\n cost = 0\n \n\n #Holds all the nodes that have been visited\n visited = []\n\n #This holds the states, path's to the state, and the cost's to the states that have been found\n nodeStack = Stack()\n \n #Add the first item to the stack\n nodeStack.push( (startState, pathHolder, cost) ) \n\n #Holds the temps that get the Nodes of the state\n while nodeStack.isEmpty() == False:\n #Get the next node in the state stack\n currentState, currentPath, currentCost = nodeStack.pop()\n \n #Check to see if the current state has been visited before\n #if has not been visited, handle it\n #else ignore it\n if currentState not in visited:\n #Add it to the visited node set\n visited.append(currentState)\n\n #If the currentNode's state is the goal state, return the path to the current node\n if problem.isGoalState(currentState):\n return currentPath\n\n #Add all of it's children with their path's and their costArrays\n #to the state stack\n for child in problem.getSuccessors(currentState):\n \n # Need to\n\n\n #Get all the values seperated\n childState, childDirection, costToChild = child\n \n #Add the new child with it's direction appended to the array and the cost added\n #Creates the new sub items of the nodes\n childPath = currentPath + [childDirection]\n childCost = currentCost + costToChild\n\n nodeStack.push( ( childState , childPath, childCost) )\n\n #If it gets here, that means the goalState is not accessable from the currentState and you fucked up somehow\n if debug == True:\n print(visited)\n # So return an empty path\n #return []\n\n #DEBUG ONLY\n if debug == True:\n print(visited)\n #util.raiseNotDefined()",
"def recursiveSolve(self):\n\tif not self.isValid(): return False\n\temptyCell = self.findEmpty()\n\tif emptyCell == None: return self.isSolved()\n\tfor p in self.possibleValues(emptyCell[0], emptyCell[1]):\n\t\tself.grid[emptyCell[0]][emptyCell[1]] = p\n\t\tif self.recursiveSolve():\n\t\t\treturn True\n\t# We tried everything, nothing worked.\n\tself.grid[emptyCell[0]][emptyCell[1]] = 0\n\treturn False",
"def visited(state: State) -> bool:\n global visited_states\n key = Game.state_string(state)\n seen = key in visited_states\n if not seen:\n visited_states.add(key)\n #print('solver.py visited: key =', key)\n return seen",
"def dfs(root, visited=set()):\n visited.add(root)\n print(root.data)\n for n in root.children:\n if n not in visited:\n dfs(n)\n return visited"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd/config (container)
|
def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_enable_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enable_bfd must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__enable_bfd = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def set_config(self, value):\n try:\n self.validate(config=value)\n except (KeyError, ValueError) as e:\n print(\"Config not set, encountered error %s\" % e.msg)\n\n self.config = value",
"def enable_peer_bfd(self, **kwargs):\n rbridge_id = kwargs.pop('rbridge_id', '1')\n peer_ip = kwargs.pop('peer_ip')\n delete = kwargs.pop('delete', False)\n get = kwargs.pop('get', False)\n feature_tmp = '_neighbor{0}_bfd'\n callback = kwargs.pop('callback', self._callback)\n vrf = kwargs.pop('vrf', 'default')\n ip_addr = ip_interface(unicode(peer_ip))\n afi = 'ipv4' if ip_addr.version == 4 else 'ipv6'\n if vrf == 'default':\n if 'ipv4' == afi:\n feature = feature_tmp.format('_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_neighbor_ipv6_addr')\n afi = None\n elif 'ipv4' == afi:\n feature = feature_tmp.format('_af_ipv4_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_af_ipv6_neighbor_addr')\n if delete:\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n op='_bfd_enable_delete',\n os=self.os)\n return callback(config)\n if get:\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n resource_depth=2,\n op='_get',\n os=self.os)\n ret = callback(config, handler='get_config')\n bgp = Util(ret.data)\n ret = bgp.findall(bgp.root, './/bfd-enable')\n ret = True if ret and ret[0] == 'true' else False\n return ret\n args = dict(bfd_enable=True)\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n op='_update',\n args=args,\n os=self.os)\n return callback(config)",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_vlan__vlans_vlan_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_vlan__vlans_vlan_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _enBConfig(self, ip_version=4):\n # Using exaggerated prints makes the stdout easier to read.\n print(\"************************* Enb tester config\")\n req = s1ap_types.FwNbConfigReq_t()\n req.cellId_pr.pres = True\n req.cellId_pr.cell_id = 10\n req.ip_version = ip_version\n assert self._s1_util.issue_cmd(s1ap_types.tfwCmd.ENB_CONFIG, req) == 0\n response = self._s1_util.get_response()\n assert response.msg_type == s1ap_types.tfwCmd.ENB_CONFIG_CONFIRM.value\n res = response.cast(s1ap_types.FwNbConfigCfm_t)\n assert res.status == s1ap_types.CfgStatus.CFG_DONE.value",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_platform__components_component_port_breakout_mode_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform/port', defining_module='openconfig-platform-port', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_platform__components_component_port_breakout_mode_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform/port', defining_module='openconfig-platform-port', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def set_config(self, config):\r\n todo = deque([self])\r\n while todo:\r\n node = todo.popleft()\r\n node.config = config\r\n todo.extend(node.iter_child_nodes())\r\n return self",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_probes__probes_probe_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_probes__probes_probe_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def define_config(self, config: str) -> None:\n self.config = config",
"def configure_cjdroute_conf():\n log.info(\"Configuring cjdroute config\")\n\n mapping = get_config_mapping()\n cjdns_secret = get_cjdns_password(mapping)\n cjdroute_config = OrderedDict(load_json(CJDROUTE_CONF_PATH))\n cjdroute_config['authorizedPasswords'] = [{\n 'password': cjdns_secret,\n }]\n neighbours = parse_cjdns_neighbours(mapping)\n neighbour_items = list(neighbours.items())\n # A python3 dict is non-deterministic but not random\n shuffle(neighbour_items)\n shuffled_neighbours = OrderedDict(neighbour_items)\n cjdroute_config['interfaces']['UDPInterface'] = [{\n 'connectTo': shuffled_neighbours,\n 'bind': '0.0.0.0:{}'.format(conf().CJDNS_DEFAULT_PORT)\n }]\n cjdroute_config['interfaces']['ETHInterface'] = [{\n # Disable peer auto-discovery\n 'beacon': 0,\n 'bind': 'all',\n 'connectTo': {}\n }]\n write_json(cjdroute_config, CJDROUTE_CONF_PATH, sort_keys=False)",
"def update_drbd_config(self, context):\n return self.call(context, self.make_msg('update_drbd_config'))",
"def _get_enable_bfd(self):\n return self.__enable_bfd",
"def _set_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=bfd.bfd, is_container='container', presence=False, yang_name=\"bfd\", rest_name=\"bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD operation mode on this interface'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"bfd must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name=\"bfd\", rest_name=\"bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD operation mode on this interface'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__bfd = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd/state (container)
|
def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_op_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"op-state\", rest_name=\"op-state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-nsm-operational', defining_module='brocade-nsm-operational', yang_type='boolean', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"op_state must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"op-state\", rest_name=\"op-state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-nsm-operational', defining_module='brocade-nsm-operational', yang_type='boolean', is_config=False)\"\"\",\n })\n\n self.__op_state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_enable_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enable_bfd must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__enable_bfd = t\n if hasattr(self, '_set'):\n self._set()",
"def setstate(self, state):\n if state not in VALID_STATES:\n states = ', '.join(VALID_STATES)\n raise ValueError(\"Wrong state, allowed states {}\".format(states))\n if state in ('enable', 'disable'):\n cmd = \"{} server {}/{}\".format(state, self.backendname, self.name)\n else:\n cmd = \"set server {}/{} state {}\".format(\n self.backendname, self.name, state\n )\n\n results = cmd_across_all_procs(self._server_per_proc, 'command', cmd)\n\n return check_command(results)",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_vlan__vlans_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_vlan__vlans_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def state(self, bulb_state):\n print(bulb_state)\n print(self.BULB_STATE_ON)\n print(self.BULB_STATE_OFF)\n if bulb_state == self.BULB_STATE_ON:\n bulb_state = 1\n elif bulb_state == self.BULB_STATE_OFF:\n bulb_state = 0\n else:\n raise ValueError\n \n light_state = {\n \"on_off\": bulb_state,\n }\n return self.set_light_state(light_state)",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_rapid_pvst_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_rapid_pvst_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_port_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DISABLED': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:DISABLED': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'LISTENING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:LISTENING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'LEARNING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:LEARNING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'BLOCKING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:BLOCKING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'FORWARDING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:FORWARDING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}},), is_leaf=True, yang_name=\"port-state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='identityref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"port_state must be of a type compatible with identityref\"\"\",\n 'defined-type': \"openconfig-spanning-tree:identityref\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DISABLED': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:DISABLED': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'LISTENING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:LISTENING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'LEARNING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:LEARNING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'BLOCKING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:BLOCKING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'FORWARDING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}, 'oc-stp-types:FORWARDING': {'@module': 'openconfig-spanning-tree-types', '@namespace': 'http://openconfig.net/yang/spanning-tree/types'}},), is_leaf=True, yang_name=\"port-state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='identityref', is_config=False)\"\"\",\n })\n\n self.__port_state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_dot11k_neighbors_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_dot11k_neighbors_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11r_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11r_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def set_state(self, state):\n\n if state not in Metadata.valid_states:\n structlog.getLogger(LOGGERNAME).exception(f\"{state} is not a valid Metadata state\")\n raise InvalidMetadataError()\n self.data[\"state\"] = state",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_probes__probes_probe_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_probes__probes_probe_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_rstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_rstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for subinterface, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/config/subinterface (leafref)
|
def _set_subinterface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subinterface must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)""",
})
self.__subinterface = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def update_interfaces_interface_subinterfaces_subinterface_subinterface_by_id(name, index, subinterface): # noqa: E501\n if connexion.request.is_json:\n subinterface = SubinterfaceSchema.from_dict(connexion.request.get_json()) # noqa: E501\n return 'do some magic!'",
"def create_subinterface(\n node, interface, sub_id, outer_vlan_id=None, inner_vlan_id=None,\n type_subif=None):\n subif_types = type_subif.split()\n\n flags = 0\n if u\"no_tags\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_NO_TAGS\n if u\"one_tag\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_ONE_TAG\n if u\"two_tags\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_TWO_TAGS\n if u\"dot1ad\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_DOT1AD\n if u\"exact_match\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_EXACT_MATCH\n if u\"default_sub\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_DEFAULT\n if type_subif == u\"default_sub\":\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_INNER_VLAN_ID_ANY\\\n | SubInterfaceFlags.SUB_IF_API_FLAG_OUTER_VLAN_ID_ANY\n\n cmd = u\"create_subif\"\n args = dict(\n sw_if_index=InterfaceUtil.get_interface_index(node, interface),\n sub_id=int(sub_id),\n sub_if_flags=flags.value if hasattr(flags, u\"value\")\n else int(flags),\n outer_vlan_id=int(outer_vlan_id) if outer_vlan_id else 0,\n inner_vlan_id=int(inner_vlan_id) if inner_vlan_id else 0\n )\n err_msg = f\"Failed to create sub-interface on host {node[u'host']}\"\n with PapiSocketExecutor(node) as papi_exec:\n sw_if_index = papi_exec.add(cmd, **args).get_sw_if_index(err_msg)\n\n if_key = Topology.add_new_port(node, u\"subinterface\")\n Topology.update_interface_sw_if_index(node, if_key, sw_if_index)\n ifc_name = InterfaceUtil.vpp_get_interface_name(node, sw_if_index)\n Topology.update_interface_name(node, if_key, ifc_name)\n\n return f\"{interface}.{sub_id}\", sw_if_index",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def retrieve_interfaces_interface_subinterfaces_subinterface_subinterface_by_id(name, index): # noqa: E501\n return 'do some magic!'",
"def create_vlan_subinterface(node, interface, vlan):\n sw_if_index = InterfaceUtil.get_interface_index(node, interface)\n\n cmd = u\"create_vlan_subif\"\n args = dict(\n sw_if_index=sw_if_index,\n vlan_id=int(vlan)\n )\n err_msg = f\"Failed to create VLAN sub-interface on host {node[u'host']}\"\n\n with PapiSocketExecutor(node) as papi_exec:\n sw_if_index = papi_exec.add(cmd, **args).get_sw_if_index(err_msg)\n\n if_key = Topology.add_new_port(node, u\"vlan_subif\")\n Topology.update_interface_sw_if_index(node, if_key, sw_if_index)\n ifc_name = InterfaceUtil.vpp_get_interface_name(node, sw_if_index)\n Topology.update_interface_name(node, if_key, ifc_name)\n\n return f\"{interface}.{vlan}\", sw_if_index",
"def update_interfaces_interface_subinterfaces_subinterfaces_by_id(name, subinterfaces): # noqa: E501\n if connexion.request.is_json:\n subinterfaces = SubinterfacesSchema.from_dict(connexion.request.get_json()) # noqa: E501\n return 'do some magic!'"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for interface, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/state/interface (leafref)
|
def _set_interface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)""",
})
self.__interface = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='base-interface-ref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with base-interface-ref\"\"\",\n 'defined-type': \"openconfig-vlan:base-interface-ref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='base-interface-ref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_ref(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name=\"interface-ref\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_ref must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name=\"interface-ref\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interface_ref = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def set_interface_state(node, interface, state, if_type=u\"key\"):\n if if_type == u\"key\":\n if isinstance(interface, str):\n sw_if_index = Topology.get_interface_sw_index(node, interface)\n iface_name = Topology.get_interface_name(node, interface)\n else:\n sw_if_index = interface\n elif if_type == u\"name\":\n iface_key = Topology.get_interface_by_name(node, interface)\n if iface_key is not None:\n sw_if_index = Topology.get_interface_sw_index(node, iface_key)\n iface_name = interface\n else:\n raise ValueError(f\"Unknown if_type: {if_type}\")\n\n if node[u\"type\"] == NodeType.DUT:\n if sw_if_index is None:\n raise ValueError(\n f\"Interface index for {interface} not assigned by VPP.\"\n )\n if state == u\"up\":\n flags = InterfaceStatusFlags.IF_STATUS_API_FLAG_ADMIN_UP.value\n elif state == u\"down\":\n flags = 0\n else:\n raise ValueError(f\"Unexpected interface state: {state}\")\n cmd = u\"sw_interface_set_flags\"\n err_msg = f\"Failed to set interface state on host {node[u'host']}\"\n args = dict(\n sw_if_index=int(sw_if_index),\n flags=flags\n )\n with PapiSocketExecutor(node) as papi_exec:\n papi_exec.add(cmd, **args).get_reply(err_msg)\n elif node[u\"type\"] == NodeType.TG or node[u\"type\"] == NodeType.VM:\n cmd = f\"ip link set {iface_name} {state}\"\n exec_cmd_no_error(node, cmd, sudo=True)\n else:\n raise ValueError(\n f\"Node {node[u'host']} has unknown NodeType: {node[u'type']}\"\n )",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"name\",yc_interface_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"name\",yc_interface_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for subinterface, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/state/subinterface (leafref)
|
def _set_subinterface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subinterface must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="subinterface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)""",
})
self.__subinterface = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def update_interfaces_interface_subinterfaces_subinterface_subinterface_by_id(name, index, subinterface): # noqa: E501\n if connexion.request.is_json:\n subinterface = SubinterfaceSchema.from_dict(connexion.request.get_json()) # noqa: E501\n return 'do some magic!'",
"def create_subinterface(\n node, interface, sub_id, outer_vlan_id=None, inner_vlan_id=None,\n type_subif=None):\n subif_types = type_subif.split()\n\n flags = 0\n if u\"no_tags\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_NO_TAGS\n if u\"one_tag\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_ONE_TAG\n if u\"two_tags\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_TWO_TAGS\n if u\"dot1ad\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_DOT1AD\n if u\"exact_match\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_EXACT_MATCH\n if u\"default_sub\" in subif_types:\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_DEFAULT\n if type_subif == u\"default_sub\":\n flags = flags | SubInterfaceFlags.SUB_IF_API_FLAG_INNER_VLAN_ID_ANY\\\n | SubInterfaceFlags.SUB_IF_API_FLAG_OUTER_VLAN_ID_ANY\n\n cmd = u\"create_subif\"\n args = dict(\n sw_if_index=InterfaceUtil.get_interface_index(node, interface),\n sub_id=int(sub_id),\n sub_if_flags=flags.value if hasattr(flags, u\"value\")\n else int(flags),\n outer_vlan_id=int(outer_vlan_id) if outer_vlan_id else 0,\n inner_vlan_id=int(inner_vlan_id) if inner_vlan_id else 0\n )\n err_msg = f\"Failed to create sub-interface on host {node[u'host']}\"\n with PapiSocketExecutor(node) as papi_exec:\n sw_if_index = papi_exec.add(cmd, **args).get_sw_if_index(err_msg)\n\n if_key = Topology.add_new_port(node, u\"subinterface\")\n Topology.update_interface_sw_if_index(node, if_key, sw_if_index)\n ifc_name = InterfaceUtil.vpp_get_interface_name(node, sw_if_index)\n Topology.update_interface_name(node, if_key, ifc_name)\n\n return f\"{interface}.{sub_id}\", sw_if_index",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def retrieve_interfaces_interface_subinterfaces_subinterface_subinterface_by_id(name, index): # noqa: E501\n return 'do some magic!'",
"def create_vlan_subinterface(node, interface, vlan):\n sw_if_index = InterfaceUtil.get_interface_index(node, interface)\n\n cmd = u\"create_vlan_subif\"\n args = dict(\n sw_if_index=sw_if_index,\n vlan_id=int(vlan)\n )\n err_msg = f\"Failed to create VLAN sub-interface on host {node[u'host']}\"\n\n with PapiSocketExecutor(node) as papi_exec:\n sw_if_index = papi_exec.add(cmd, **args).get_sw_if_index(err_msg)\n\n if_key = Topology.add_new_port(node, u\"vlan_subif\")\n Topology.update_interface_sw_if_index(node, if_key, sw_if_index)\n ifc_name = InterfaceUtil.vpp_get_interface_name(node, sw_if_index)\n Topology.update_interface_name(node, if_key, ifc_name)\n\n return f\"{interface}.{vlan}\", sw_if_index",
"def update_interfaces_interface_subinterfaces_subinterfaces_by_id(name, subinterfaces): # noqa: E501\n if connexion.request.is_json:\n subinterfaces = SubinterfacesSchema.from_dict(connexion.request.get_json()) # noqa: E501\n return 'do some magic!'"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/config (container)
|
def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_input_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_input_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_interfaces_interface_input_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_interfaces_interface_input_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_classifiers_classifier_terms_term_conditions_ipv6_icmpv6_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_classifiers_classifier_terms_term_conditions_ipv6_icmpv6_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_input_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_input_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_classifiers_classifier_terms_term_conditions_ipv6_icmpv6_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_classifiers_classifier_terms_term_conditions_ipv6_icmpv6_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_interfaces_interface_input_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_interfaces_interface_input_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_vlan__vlans_vlan_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_vlan__vlans_vlan_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_ref(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name=\"interface-ref\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_ref must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name=\"interface-ref\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interface_ref = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_interfaces_interface_output_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_interfaces_interface_output_classifiers_classifier_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref/state (container)
|
def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_interfaces_interface_input_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_interfaces_interface_input_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_interfaces_interface_input_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_interfaces_interface_input_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_interfaces_interface_output_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_interfaces_interface_output_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_interfaces_interface_output_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_interfaces_interface_output_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_interfaces_interface_input_classifiers_classifier_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_interfaces_interface_input_classifiers_classifier_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_vlan__vlans_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_vlan__vlans_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_connection_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_connection_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_access_points__access_points_access_point_system_grpc_server_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_access_points__access_points_access_point_system_grpc_server_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_interfaces_interface_input_classifiers_classifier_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_interfaces_interface_input_classifiers_classifier_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for index, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/index (leafref)
|
def _set_index(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """index must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)""",
})
self.__index = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"index must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__index = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_index(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/platform/transceiver', defining_module='openconfig-platform-transceiver', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"index must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/index', caller=self._path() + ['index'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/platform/transceiver', defining_module='openconfig-platform-transceiver', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__index = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_index(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"index must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"index\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__index = t\n if hasattr(self, '_set'):\n self._set()",
"def edit_index(self, index, reindex=False, ind_kwargs=None):\n if ind_kwargs is None:\n ind_kwargs = {}\n ind_obj, name = self.__write_index(index, -1, edit=True)\n old = next(x for x in self.indexes if x.name == name)\n old.close_index()\n index_of_index = self.indexes.index(old)\n ind_obj.open_index()\n self.indexes[index_of_index] = ind_obj\n self.indexes_names[name] = ind_obj\n if reindex:\n self.reindex_index(name)\n return name",
"def getIndex(self, index: 'int const') -> \"int\":\n return _coin.SoPath_getIndex(self, index)",
"def addIndex (self, index):\n\n self.indexes.append (index)\n self.indexMap [index.name] = index",
"def getIndex(self, index: 'int const') -> \"int\":\n return _coin.SoLightPath_getIndex(self, index)",
"def set_reference_index(self, value): # pragma: no cover\n pass",
"def is_index(self, is_index):\n\n self._is_index = is_index",
"def vertexIndex(*args, **kwargs):\n \n pass",
"def _set_index(self, index):\n if index not in INDEX_NAMES:\n available_keys = \", \".join(list(INDEX_NAMES.keys()))\n msg = (f\"{index} key is not avaiable. Available keys: \"\n f\"{available_keys}\")\n logger.error(msg)\n raise KeyError(msg)\n self.index = index\n self.index_name = INDEX_NAMES[index]",
"def post_labelindex(server, uuid, instance, label, proto_index, *, session=None):\n payload = None\n assert isinstance(proto_index, (bytes, LabelIndex))\n if isinstance(proto_index, LabelIndex):\n assert proto_index.label == label\n payload = proto_index.SerializeToString()\n elif isinstance(proto_index, bytes):\n payload = proto_index\n\n r = session.post(f'{server}/api/node/{uuid}/{instance}/index/{label}', data=payload)\n r.raise_for_status()",
"def __bwaIndex(self, out): \n self.__CallCommand('mkdir', ['mkdir', '-p', out])\n self.__CallCommand('cp', ['cp', self.reference, out + \"/ref.fa\"])\n self.reference = out + \"/ref.fa\"\n self.__CallCommand('bwa index', [self.__bwa, 'index', self.reference])\n self.__CallCommand('CreateSequenceDictionary', ['java', '-jar', self.__picard, \n 'CreateSequenceDictionary', 'R='+self.reference,'O='+ out + \"/ref.dict\"])\n self.__CallCommand('samtools faidx', [self.__samtools, 'faidx', self.reference ])",
"def set_index(self):\n \n HarvestManUrlParser.IDX += 1\n self.index = HarvestManUrlParser.IDX",
"def getIndexFromTail(self, index: 'int const') -> \"int\":\n return _coin.SoPath_getIndexFromTail(self, index)",
"def insertIndex(self, parent: 'SoNode', newindex: 'int const') -> \"void\":\n return _coin.SoPath_insertIndex(self, parent, newindex)",
"def handle_set_page_index(self, index):\n pass",
"def getIndexFromTail(self, index: 'int const') -> \"int\":\n return _coin.SoFullPath_getIndexFromTail(self, index)",
"def index(self, index):\n raise NotImplementedError, \\\n \"there is no standard CREATE INDEX construct (or is there? let me know!)\""
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/config (container)
|
def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11r_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11r_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11v_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11v_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_rstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_rstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_wmm_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_wmm_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_dot1x_timers_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_dot1x_timers_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_probes__probes_probe_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_probes__probes_probe_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_platform__components_component_port_breakout_mode_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform/port', defining_module='openconfig-platform-port', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_platform__components_component_port_breakout_mode_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform/port', defining_module='openconfig-platform-port', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_input_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_input_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/state (container)
|
def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11r_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11r_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_dot11k_neighbors_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_dot11k_neighbors_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_rstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_rstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11v_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11v_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_rapid_pvst_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_rapid_pvst_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_connection_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_connection_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_access_points__access_points_access_point_system_grpc_server_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_access_points__access_points_access_point_system_grpc_server_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_wmm_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_wmm_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Getter method for enable_bfd, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd (container)
|
def _get_enable_bfd(self):
return self.__enable_bfd
|
[
"def _set_enable_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enable_bfd must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name=\"enable-bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__enable_bfd = t\n if hasattr(self, '_set'):\n self._set()",
"def enable_peer_bfd(self, **kwargs):\n rbridge_id = kwargs.pop('rbridge_id', '1')\n peer_ip = kwargs.pop('peer_ip')\n delete = kwargs.pop('delete', False)\n get = kwargs.pop('get', False)\n feature_tmp = '_neighbor{0}_bfd'\n callback = kwargs.pop('callback', self._callback)\n vrf = kwargs.pop('vrf', 'default')\n ip_addr = ip_interface(unicode(peer_ip))\n afi = 'ipv4' if ip_addr.version == 4 else 'ipv6'\n if vrf == 'default':\n if 'ipv4' == afi:\n feature = feature_tmp.format('_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_neighbor_ipv6_addr')\n afi = None\n elif 'ipv4' == afi:\n feature = feature_tmp.format('_af_ipv4_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_af_ipv6_neighbor_addr')\n if delete:\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n op='_bfd_enable_delete',\n os=self.os)\n return callback(config)\n if get:\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n resource_depth=2,\n op='_get',\n os=self.os)\n ret = callback(config, handler='get_config')\n bgp = Util(ret.data)\n ret = bgp.findall(bgp.root, './/bfd-enable')\n ret = True if ret and ret[0] == 'true' else False\n return ret\n args = dict(bfd_enable=True)\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n op='_update',\n args=args,\n os=self.os)\n return callback(config)",
"def _set_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=bfd.bfd, is_container='container', presence=False, yang_name=\"bfd\", rest_name=\"bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD operation mode on this interface'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"bfd must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name=\"bfd\", rest_name=\"bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD operation mode on this interface'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__bfd = t\n if hasattr(self, '_set'):\n self._set()",
"def bgp_next_hop_trigger_enable(self, bgp_next_hop_trigger_enable):\n\n self._bgp_next_hop_trigger_enable = bgp_next_hop_trigger_enable",
"def enable_di(self, value):\n self._set_property('enable_di', value)",
"def enable_fpga(self) -> Optional[bool]:\n return pulumi.get(self, \"enable_fpga\")",
"def get_block_device_enable_status(self):\n response = self._get(\n c.SERVICE_DEVICE_CONFIG, c.GET_BLOCK_DEVICE_ENABLE_STATUS\n )\n return h.zero_or_one_dict_to_boolean(response)",
"def configure_ospf_bfd(module, vrouter, ip, CHANGED_FLAG, task, msg):\n cli = pn_cli(module)\n clicopy = cli\n cli += ' vrouter-interface-show vrouter-name %s' % vrouter\n cli += ' ip %s format nic no-show-headers ' % ip\n nic_interface = run_command(module, cli, task, msg).split()\n nic_interface = list(set(nic_interface))\n nic_interface.remove(vrouter)\n\n cli = clicopy\n cli += ' vrouter-interface-config-show vrouter-name %s' % vrouter\n cli += ' nic %s format ospf-bfd no-show-headers ' % nic_interface[0]\n ospf_status = run_command(module, cli, task, msg).split()\n ospf_status = list(set(ospf_status))\n\n cli = clicopy\n cli += ' vrouter-show name ' + vrouter\n cli += ' format location no-show-headers '\n switch = run_command(module, cli, task, msg).split()[0]\n\n if 'Success' in ospf_status:\n cli = clicopy\n cli += ' vrouter-interface-config-add vrouter-name %s' % vrouter\n cli += ' nic %s ospf-bfd enable' % nic_interface[0]\n if 'Success' in run_command(module, cli, task, msg):\n CHANGED_FLAG.append(True)\n return ' %s: Added OSPF BFD config to %s \\n' % (switch, vrouter)\n elif 'enable' not in ospf_status:\n ospf_status.remove(vrouter)\n cli = clicopy\n cli += ' vrouter-interface-config-modify vrouter-name %s' % vrouter\n cli += ' nic %s ospf-bfd enable' % nic_interface[0]\n if 'Success' in run_command(module, cli, task, msg):\n CHANGED_FLAG.append(True)\n output = ' %s: Enabled OSPF BFD for %s \\n' % (switch, vrouter)\n return CHANGED_FLAG, output\n else:\n return CHANGED_FLAG, ''",
"def appdev_enable(self, appdev_enable):\n\n self._appdev_enable = appdev_enable",
"def r_is_ha_enabled(self):\r\n v = self.r_get(brcdapi_util.bc_ha_enabled)\r\n return False if v is None else v",
"def _set_enable(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"enable\", rest_name=\"enable\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable global sflow'}}, namespace='urn:brocade.com:mgmt:brocade-sflow', defining_module='brocade-sflow', yang_type='empty', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enable must be of a type compatible with empty\"\"\",\n 'defined-type': \"empty\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"enable\", rest_name=\"enable\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable global sflow'}}, namespace='urn:brocade.com:mgmt:brocade-sflow', defining_module='brocade-sflow', yang_type='empty', is_config=True)\"\"\",\n })\n\n self.__enable = t\n if hasattr(self, '_set'):\n self._set()",
"def enable_handles(dwg):\r\n def has_handle(tags, handle_code):\r\n for tag in tags.noclass:\r\n if tag.code == handle_code:\r\n return True\r\n return False\r\n\r\n def put_handles_into_entity_tags():\r\n for handle, tags in dwg.entitydb.items():\r\n is_not_dimstyle = tags.noclass[0] != (0, 'DIMSTYLE')\r\n handle_code = 5 if is_not_dimstyle else 105 # legacy shit!!!\r\n if not has_handle(tags, handle_code):\r\n tags.noclass.insert(1, DXFTag(handle_code, handle)) # handle should be the 2. tag\r\n\r\n if dwg.dxfversion > 'AC1009':\r\n return\r\n put_handles_into_entity_tags()\r\n dwg.header['$HANDLING'] = 1",
"def VplsEnableNextHop(self):\n return self._get_attribute('vplsEnableNextHop')",
"def enable_oeenclave_debug(oe_enclave_addr, enclave_path):\n # Check if the magic matches.\n enclave_blob = read_from_memory(oe_enclave_addr, OE_ENCLAVE_HEADER_LENGTH)\n enclave_tuple = struct.unpack(OE_ENCLAVE_HEADER_FORMAT, enclave_blob)\n if enclave_tuple[OE_ENCLAVE_MAGIC_FIELD] != OE_ENCLAVE_MAGIC_VALUE:\n return False\n # Check if it's SGX debug mode enclave.\n flags_blob = read_from_memory(oe_enclave_addr + OE_ENCLAVE_FLAGS_OFFSET, OE_ENCLAVE_FLAGS_LENGTH)\n flags_tuple = struct.unpack(OE_ENCLAVE_FLAGS_FORMAT, flags_blob)\n\n # Check if debugging is enabled.\n if flags_tuple[0] == 0:\n print (\"oe-gdb: Debugging not enabled for enclave %s\" % enclave_path)\n return False\n\n # Check if the enclave is loaded in simulation mode.\n if flags_tuple[1] != 0:\n print (\"oe-gdb: Enclave %s loaded in simulation mode\" % enclave_path)\n\n # Load symbol.\n if load_enclave_symbol(enclave_path, enclave_tuple[OE_ENCLAVE_ADDR_FIELD]) != 1:\n return False\n # Set debug flag for each TCS in this enclave.\n thread_binding_addr = oe_enclave_addr + OE_ENCLAVE_THREAD_BINDING_OFFSET\n thread_binding_blob = read_from_memory(thread_binding_addr, THREAD_BINDING_HEADER_LENGTH)\n thread_binding_tuple = struct.unpack(THREAD_BINDING_HEADER_FORMAT, thread_binding_blob)\n while thread_binding_tuple[0] > 0 :\n # print (\"tcs address {0:#x}\" .format(thread_binding_tuple[0]))\n set_tcs_debug_flag(thread_binding_tuple[0])\n # Iterate the array\n thread_binding_addr = thread_binding_addr + THREAD_BINDING_SIZE\n thread_binding_blob = read_from_memory(thread_binding_addr, THREAD_BINDING_HEADER_LENGTH)\n thread_binding_tuple = struct.unpack(THREAD_BINDING_HEADER_FORMAT, thread_binding_blob)\n return True",
"def setEndianness(self, is_little):\n if is_little:\n self.compile_flags += ('EL',)\n self.link_flags += ('EL',)\n else:\n self.compile_flags += ('EB',)\n self.link_flags += ('EB',)",
"def flash_set_lba(self, enable):\n self._set_flag_cmd(CMD_FL_LBA, enable)",
"def enable_dhcp(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_dhcp\")",
"def get_parental_control_enable_status(self):\n response = self._get(\n c.SERVICE_PARENTAL_CONTROL, c.GET_PARENTAL_CONTROL_ENABLE_STATUS\n )\n return h.zero_or_one_dict_to_boolean(response)",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for enable_bfd, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/enable_bfd (container)
|
def _set_enable_bfd(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name="enable-bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable_bfd must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_enable_bfd_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd, is_container='container', yang_name="enable-bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__enable_bfd = t
if hasattr(self, '_set'):
self._set()
|
[
"def _get_enable_bfd(self):\n return self.__enable_bfd",
"def enable_peer_bfd(self, **kwargs):\n rbridge_id = kwargs.pop('rbridge_id', '1')\n peer_ip = kwargs.pop('peer_ip')\n delete = kwargs.pop('delete', False)\n get = kwargs.pop('get', False)\n feature_tmp = '_neighbor{0}_bfd'\n callback = kwargs.pop('callback', self._callback)\n vrf = kwargs.pop('vrf', 'default')\n ip_addr = ip_interface(unicode(peer_ip))\n afi = 'ipv4' if ip_addr.version == 4 else 'ipv6'\n if vrf == 'default':\n if 'ipv4' == afi:\n feature = feature_tmp.format('_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_neighbor_ipv6_addr')\n afi = None\n elif 'ipv4' == afi:\n feature = feature_tmp.format('_af_ipv4_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_af_ipv6_neighbor_addr')\n if delete:\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n op='_bfd_enable_delete',\n os=self.os)\n return callback(config)\n if get:\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n resource_depth=2,\n op='_get',\n os=self.os)\n ret = callback(config, handler='get_config')\n bgp = Util(ret.data)\n ret = bgp.findall(bgp.root, './/bfd-enable')\n ret = True if ret and ret[0] == 'true' else False\n return ret\n args = dict(bfd_enable=True)\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n feature=feature,\n vrf=vrf,\n afi=afi,\n n_addr=peer_ip,\n op='_update',\n args=args,\n os=self.os)\n return callback(config)",
"def _set_bfd(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=bfd.bfd, is_container='container', presence=False, yang_name=\"bfd\", rest_name=\"bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD operation mode on this interface'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"bfd must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=bfd.bfd, is_container='container', presence=False, yang_name=\"bfd\", rest_name=\"bfd\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set BFD operation mode on this interface'}}, namespace='urn:brocade.com:mgmt:brocade-ospf', defining_module='brocade-ospf', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__bfd = t\n if hasattr(self, '_set'):\n self._set()",
"def bgp_next_hop_trigger_enable(self, bgp_next_hop_trigger_enable):\n\n self._bgp_next_hop_trigger_enable = bgp_next_hop_trigger_enable",
"def enable_di(self, value):\n self._set_property('enable_di', value)",
"def flash_set_lba(self, enable):\n self._set_flag_cmd(CMD_FL_LBA, enable)",
"def appdev_enable(self, appdev_enable):\n\n self._appdev_enable = appdev_enable",
"def _set_enable(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"enable\", rest_name=\"enable\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable global sflow'}}, namespace='urn:brocade.com:mgmt:brocade-sflow', defining_module='brocade-sflow', yang_type='empty', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enable must be of a type compatible with empty\"\"\",\n 'defined-type': \"empty\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"enable\", rest_name=\"enable\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable global sflow'}}, namespace='urn:brocade.com:mgmt:brocade-sflow', defining_module='brocade-sflow', yang_type='empty', is_config=True)\"\"\",\n })\n\n self.__enable = t\n if hasattr(self, '_set'):\n self._set()",
"def configure_ospf_bfd(module, vrouter, ip, CHANGED_FLAG, task, msg):\n cli = pn_cli(module)\n clicopy = cli\n cli += ' vrouter-interface-show vrouter-name %s' % vrouter\n cli += ' ip %s format nic no-show-headers ' % ip\n nic_interface = run_command(module, cli, task, msg).split()\n nic_interface = list(set(nic_interface))\n nic_interface.remove(vrouter)\n\n cli = clicopy\n cli += ' vrouter-interface-config-show vrouter-name %s' % vrouter\n cli += ' nic %s format ospf-bfd no-show-headers ' % nic_interface[0]\n ospf_status = run_command(module, cli, task, msg).split()\n ospf_status = list(set(ospf_status))\n\n cli = clicopy\n cli += ' vrouter-show name ' + vrouter\n cli += ' format location no-show-headers '\n switch = run_command(module, cli, task, msg).split()[0]\n\n if 'Success' in ospf_status:\n cli = clicopy\n cli += ' vrouter-interface-config-add vrouter-name %s' % vrouter\n cli += ' nic %s ospf-bfd enable' % nic_interface[0]\n if 'Success' in run_command(module, cli, task, msg):\n CHANGED_FLAG.append(True)\n return ' %s: Added OSPF BFD config to %s \\n' % (switch, vrouter)\n elif 'enable' not in ospf_status:\n ospf_status.remove(vrouter)\n cli = clicopy\n cli += ' vrouter-interface-config-modify vrouter-name %s' % vrouter\n cli += ' nic %s ospf-bfd enable' % nic_interface[0]\n if 'Success' in run_command(module, cli, task, msg):\n CHANGED_FLAG.append(True)\n output = ' %s: Enabled OSPF BFD for %s \\n' % (switch, vrouter)\n return CHANGED_FLAG, output\n else:\n return CHANGED_FLAG, ''",
"def setEndianness(self, is_little):\n if is_little:\n self.compile_flags += ('EL',)\n self.link_flags += ('EL',)\n else:\n self.compile_flags += ('EB',)\n self.link_flags += ('EB',)",
"def _set_enabled(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, default=YANGBool(\"true\"), is_leaf=True, yang_name=\"enabled\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='boolean', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enabled must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, default=YANGBool(\"true\"), is_leaf=True, yang_name=\"enabled\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='boolean', is_config=False)\"\"\",\n })\n\n self.__enabled = t\n if hasattr(self, '_set'):\n self._set()",
"def enableConnection(self, flag: 'SbBool') -> \"void\":\n return _coin.SoField_enableConnection(self, flag)",
"def enable(cls, run_module=False):\n # type: (bool) -> None\n if cls._instance is not None:\n log.debug(\"%s already enabled\", cls.__name__)\n return\n\n log.debug(\"Enabling %s\", cls.__name__)\n\n di_config.enabled = True\n\n cls.__watchdog__.install()\n\n if di_config.metrics:\n metrics.enable()\n\n cls._instance = debugger = cls()\n\n debugger.start()\n\n forksafe.register(cls._restart)\n atexit.register(cls.disable)\n register_post_run_module_hook(cls._on_run_module)\n\n log.debug(\"%s enabled\", cls.__name__)",
"def enable_oeenclave_debug(oe_enclave_addr, enclave_path):\n # Check if the magic matches.\n enclave_blob = read_from_memory(oe_enclave_addr, OE_ENCLAVE_HEADER_LENGTH)\n enclave_tuple = struct.unpack(OE_ENCLAVE_HEADER_FORMAT, enclave_blob)\n if enclave_tuple[OE_ENCLAVE_MAGIC_FIELD] != OE_ENCLAVE_MAGIC_VALUE:\n return False\n # Check if it's SGX debug mode enclave.\n flags_blob = read_from_memory(oe_enclave_addr + OE_ENCLAVE_FLAGS_OFFSET, OE_ENCLAVE_FLAGS_LENGTH)\n flags_tuple = struct.unpack(OE_ENCLAVE_FLAGS_FORMAT, flags_blob)\n\n # Check if debugging is enabled.\n if flags_tuple[0] == 0:\n print (\"oe-gdb: Debugging not enabled for enclave %s\" % enclave_path)\n return False\n\n # Check if the enclave is loaded in simulation mode.\n if flags_tuple[1] != 0:\n print (\"oe-gdb: Enclave %s loaded in simulation mode\" % enclave_path)\n\n # Load symbol.\n if load_enclave_symbol(enclave_path, enclave_tuple[OE_ENCLAVE_ADDR_FIELD]) != 1:\n return False\n # Set debug flag for each TCS in this enclave.\n thread_binding_addr = oe_enclave_addr + OE_ENCLAVE_THREAD_BINDING_OFFSET\n thread_binding_blob = read_from_memory(thread_binding_addr, THREAD_BINDING_HEADER_LENGTH)\n thread_binding_tuple = struct.unpack(THREAD_BINDING_HEADER_FORMAT, thread_binding_blob)\n while thread_binding_tuple[0] > 0 :\n # print (\"tcs address {0:#x}\" .format(thread_binding_tuple[0]))\n set_tcs_debug_flag(thread_binding_tuple[0])\n # Iterate the array\n thread_binding_addr = thread_binding_addr + THREAD_BINDING_SIZE\n thread_binding_blob = read_from_memory(thread_binding_addr, THREAD_BINDING_HEADER_LENGTH)\n thread_binding_tuple = struct.unpack(THREAD_BINDING_HEADER_FORMAT, thread_binding_blob)\n return True",
"def enable_fpga(self) -> Optional[bool]:\n return pulumi.get(self, \"enable_fpga\")",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def enable(self, flag: 'SbBool const') -> \"void\":\n return _coin.SoEngineOutput_enable(self, flag)",
"def set_ceph_enabled(self):\n\n if 'backends' not in self.config[self.ROOT]:\n raise configerror.ConfigError('No backends configured')\n\n if 'external_ceph' in self.config[self.ROOT]['backends']:\n return\n\n if self.is_lvm_enabled():\n return\n\n try:\n self.is_ceph_enabled()\n except configerror.ConfigError:\n self.config[self.ROOT]['backends']['ceph'].update({\"enabled\": True})",
"def add_bd(node, bd_name, **kwargs):\n\n path = (\"bridge-domains\", \"bridge-domain\")\n new_bd = BridgeDomainKeywords._create_bd_structure(bd_name, **kwargs)\n bridge_domain = [new_bd, ]\n return BridgeDomainKeywords._set_bd_properties(node, bd_name, path,\n bridge_domain)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for interface_ref, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop/interface_ref (container)
|
def _set_interface_ref(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name="interface-ref", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_ref must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_interface_ref_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref, is_container='container', yang_name="interface-ref", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__interface_ref = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='base-interface-ref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with base-interface-ref\"\"\",\n 'defined-type': \"openconfig-vlan:base-interface-ref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name', caller=self._path() + ['interface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='base-interface-ref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=interface.interface, is_container='container', presence=False, yang_name=\"interface\", rest_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface name, maximum 3 interfaces supported', u'cli-compact-syntax': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=interface.interface, is_container='container', presence=False, yang_name=\"interface\", rest_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface name, maximum 3 interfaces supported', u'cli-compact-syntax': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def logical_interface_ref(self):\n return self.data.get('logical_interface_ref')",
"def _set_interface_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"name\",yc_interface_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"name\",yc_interface_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface, yang_name=\"interface\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name=\"interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__interface = t\n if hasattr(self, '_set'):\n self._set()",
"def set_interface(self, interface: str):\n self.di = interface",
"def _set_track_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=TypedListType(allowed_type=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name',caller=self._path() + ['track-interface'], path_helper=self._path_helper, require_instance=True)), is_leaf=True, yang_name=\"track-interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"track_interface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=TypedListType(allowed_type=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface/oc-if:name',caller=self._path() + ['track-interface'], path_helper=self._path_helper, require_instance=True)), is_leaf=True, yang_name=\"track-interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__track_interface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/tunnel', defining_module='openconfig-if-tunnel', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_subinterface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"subinterface must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='/oc-if:interfaces/oc-if:interface[oc-if:name=current()/../interface]/oc-if:subinterfaces/oc-if:subinterface/oc-if:index', caller=self._path() + ['subinterface'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"subinterface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__subinterface = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for next_hop, mapped from YANG variable /local_routes/static_routes/static/next_hops/next_hop (list)
|
def _set_next_hop(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("index",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name="next-hop", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name="next-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """next_hop must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("index",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name="next-hop", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name="next-hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)""",
})
self.__next_hop = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with union\"\"\",\n 'defined-type': \"openconfig-local-routing:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with union\"\"\",\n 'defined-type': \"openconfig-local-routing:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hops(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name=\"next-hops\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hops must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name=\"next-hops\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hops = t\n if hasattr(self, '_set'):\n self._set()",
"def VplsEnableNextHop(self):\n return self._get_attribute('vplsEnableNextHop')",
"def _update_next_hop(self, want, have, opr=True):\n commands = []\n\n want_copy = deepcopy(remove_empties(want))\n have_copy = deepcopy(remove_empties(have))\n\n diff_next_hops = get_lst_diff_for_dicts(\n have_copy, want_copy, \"next_hops\"\n )\n if diff_next_hops:\n for hop in diff_next_hops:\n for element in hop:\n if element == \"forward_router_address\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n value=hop[element],\n remove=True,\n )\n )\n elif element == \"enabled\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"],\n value=\"disable\",\n remove=True,\n )\n )\n elif element == \"admin_distance\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=str(hop[element]),\n remove=True,\n )\n )\n elif element == \"interface\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=hop[element],\n remove=True,\n )\n )\n return commands",
"def _set_nexthop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"nexthop\", rest_name=\"nexthop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='mpls-ipv4-address', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"nexthop must be of a type compatible with mpls-ipv4-address\"\"\",\n 'defined-type': \"brocade-mpls:mpls-ipv4-address\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}), is_leaf=True, yang_name=\"nexthop\", rest_name=\"nexthop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='mpls-ipv4-address', is_config=True)\"\"\",\n })\n\n self.__nexthop = t\n if hasattr(self, '_set'):\n self._set()",
"def test_default_ipv6_route_next_hop_global_address(duthosts, tbinfo):\n duthost = find_duthost_on_role(\n duthosts, get_upstream_neigh_type(tbinfo['topo']['type']), tbinfo)\n asichost = duthost.asic_instance(0 if duthost.is_multi_asic else None)\n\n rtinfo = asichost.get_ip_route_info(ipaddress.ip_network(\"::/0\"))\n pytest_assert(len(rtinfo['nexthops']) > 0,\n \"cannot find ipv6 nexthop for default route\")\n for nh in rtinfo['nexthops']:\n pytest_assert(not nh[0].is_link_local,\n \"use link local address {} for nexthop\".format(nh[0]))",
"def test_hopping_generator():\n from scipy.spatial import cKDTree\n\n @pb.hopping_generator(\"tnn_test\", energy=graphene.t_nn)\n def next_nearest(x, y, z):\n pos = np.stack([x, y, z], axis=1)\n dmin = graphene.a * 0.95\n dmax = graphene.a * 1.05\n kdtree = cKDTree(pos)\n coo = kdtree.sparse_distance_matrix(kdtree, dmax).tocoo()\n idx = coo.data > dmin\n return coo.row[idx], coo.col[idx]\n\n @pb.onsite_energy_modifier\n def onsite_offset(energy):\n return energy + 3 * graphene.t_nn\n\n model = pb.Model(graphene.monolayer(), next_nearest, onsite_offset, graphene.hexagon_ac(1))\n expected = pb.Model(graphene.monolayer(2), graphene.hexagon_ac(1))\n assert pytest.fuzzy_equal(model.hamiltonian, expected.hamiltonian)",
"def update_hop_count(self, hop_count: int) -> None:\n if hop_count < self.hop_count:\n self.hop_count = hop_count",
"def create_static_route(parent_mo, prefix, **args):\n args = args['optional_args'] if 'optional_args' in args.keys() else args\n\n ip_routep = RouteP(parent_mo, prefix)\n\n if is_valid_key(args, 'next_hop_address'):\n for ip in args['next_hop_address']:\n ip_nexthopp = NexthopP(ip_routep, ip)",
"def add_next_to_path():\n path = [self.start_station, self.end_station]\n if path not in self.possible_paths:\n self.possible_paths.append(path)",
"def _update_nexthop_cache(self, now, vlan, eth_src, port, ip_gw):\n nexthop = NextHop(eth_src, port, now)\n nexthop_cache = self._vlan_nexthop_cache(vlan)\n nexthop_cache[ip_gw] = nexthop\n return nexthop",
"def broadcast_next_hop(self):\n base_station = self.get_BS()\n for node in self.get_alive_nodes():\n base_station.transmit(msg_length=cf.MSG_LENGTH, destination=node)",
"def is_hop_by_hop(header_name):\r\n return _hoppish(header_name.lower())",
"def set_next(self, next_node):\r\n self.next_node = next_node",
"def is_hop_by_hop_header(header):\n return header.lower() in _hop_by_hop_headers",
"def _set_hop_limit(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..255']}), is_leaf=True, yang_name=\"hop-limit\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"hop_limit must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..255']}), is_leaf=True, yang_name=\"hop-limit\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__hop_limit = t\n if hasattr(self, '_set'):\n self._set()",
"def set_next(self, new_next):\n \tself.next = new_next"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for prefix, mapped from YANG variable /local_routes/static_routes/static/prefix (leafref)
|
def _set_prefix(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prefix must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)""",
})
self.__prefix = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_ip_prefix(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/ip-prefix', caller=self._path() + ['ip-prefix'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"ip-prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"ip_prefix must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/ip-prefix', caller=self._path() + ['ip-prefix'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"ip-prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__ip_prefix = t\n if hasattr(self, '_set'):\n self._set()",
"def set_root_as_prefix(self) -> None:\n self.root_node.prefix_flag = True\n self._prefix_nodes[0] += 1",
"def addPrefix(self, prefix):\n \n pass",
"def _set_prefix(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"ip_prefix\",yc_prefix_openconfig_routing_policy__routing_policy_defined_sets_prefix_sets_prefix_set_prefixes_prefix, yang_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-prefix', extensions=None), is_container='list', yang_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"ip_prefix\",yc_prefix_openconfig_routing_policy__routing_policy_defined_sets_prefix_sets_prefix_set_prefixes_prefix, yang_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-prefix', extensions=None), is_container='list', yang_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__prefix = t\n if hasattr(self, '_set'):\n self._set()",
"def prefix(self):\n return self.parsed.prefix",
"def _lower_prefix(self):\n return self.instance_prefix.lower()",
"def _set_prefix(self, init_supplied_val):\n if init_supplied_val is not None:\n value = init_supplied_val.rstrip('_')\n elif self.__class__.prefix is not None:\n value = self.__class__.prefix.rstrip('_')\n else:\n module_path_parts = self.__module_path_split[:-1]\n try:\n module_path_parts.remove('conf')\n except ValueError:\n pass\n value = '_'.join(module_path_parts).upper()\n self._prefix = value",
"def get_route_prefix(self):\n pass",
"def uri_with_prefix(self, prefix: str = None) -> str:\n if prefix is None:\n return self.uri\n\n uri = self.uri\n\n if isinstance(uri, URI):\n match_policy = uri.match_policy\n else:\n match_policy = None\n\n if match_policy is None and isinstance(prefix, URI):\n match_policy = prefix.match_policy\n\n uri = prefix + self.uri\n if match_policy is not None:\n return URI(uri, match_policy=match_policy)\n\n return uri",
"def set_script_prefix(prefix):\n if not prefix.endswith(\"/\"):\n prefix += \"/\"\n _prefixes.value = prefix",
"def prefix(self):\n return self._config['DEFAULT']['prefix']",
"def default_prefixer(sender, **kwargs):\n request = http.HttpRequest()\n request.META['SCRIPT_NAME'] = ''\n prefixer = urlresolvers.Prefixer(request)\n urlresolvers.set_url_prefix(prefixer)",
"async def setprefix(self, ctx, prefix: str):\n\n await ctx.bot.get_cog(\"PrefixStore\").set(ctx, prefix)\n await ctx.send(\n ctx._(\"prefix_set_success\").format(prefix=prefix)\n )",
"def _set_source_address_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"source-address-prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"source_address_prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"source-address-prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__source_address_prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_prefix(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"destination\",prefix.prefix, yang_name=\"prefix\", rest_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='destination', extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}), is_container='list', yang_name=\"prefix\", rest_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"destination\",prefix.prefix, yang_name=\"prefix\", rest_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='destination', extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}), is_container='list', yang_name=\"prefix\", rest_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)\"\"\",\n })\n\n self.__prefix = t\n if hasattr(self, '_set'):\n self._set()",
"def addLocalPrefix_IDL(self, lispHandle, eidInstanceId, prefixAF, prefix, prefixLen):\n pass",
"def addPrefixToSite_IDL(self, lispHandle, siteHandle, eidInstanceId, prefixAF, prefix, prefixLen):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for config, mapped from YANG variable /local_routes/static_routes/static/config (container)
|
def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_static_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"static_routes must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__static_routes = t\n if hasattr(self, '_set'):\n self._set()",
"def static_route_configuration(self) -> pulumi.Output[Optional['outputs.InternalNetworkPropertiesResponseStaticRouteConfiguration']]:\n return pulumi.get(self, \"static_route_configuration\")",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_static(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"static\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"static must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"static\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__static = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_band_steering_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11r_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11r_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_rstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_rstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_probes__probes_probe_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_probes__probes_probe_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/probes', defining_module='openconfig-probes', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_vlan__vlans_vlan_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_vlan__vlans_vlan_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_queue_management_profiles_queue_management_profile_wred_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_queue_management_profiles_queue_management_profile_wred_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11v_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_wifi_mac__ssids_ssid_dot11v_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for state, mapped from YANG variable /local_routes/static_routes/static/state (container)
|
def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_band_steering_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_rstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_rstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_interfaces_interface_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_mst_instances_mst_instance_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_rapid_pvst_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_rapid_pvst_vlan_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_global_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_global_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_dot11k_neighbors_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_dot11k_neighbors_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_clients_client_client_rf_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_access_points__access_points_access_point_system_grpc_server_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_access_points__access_points_access_point_system_grpc_server_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11r_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11r_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11v_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_wifi_mac__ssids_ssid_dot11v_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/mac', defining_module='openconfig-wifi-mac', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for next_hops, mapped from YANG variable /local_routes/static_routes/static/next_hops (container)
|
def _set_next_hops(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name="next-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """next_hops must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_next_hops_openconfig_local_routing__local_routes_static_routes_static_next_hops, is_container='container', yang_name="next-hops", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__next_hops = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=next_hop.next_hop, is_container='container', presence=False, yang_name=\"next-hop\", rest_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop address filter', u'hidden': u'full'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"index\",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name=\"next-hop\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"index\",yc_next_hop_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop, yang_name=\"next-hop\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='index', extensions=None), is_container='list', yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with union\"\"\",\n 'defined-type': \"openconfig-local-routing:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=True)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_next_hop(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop must be of a type compatible with union\"\"\",\n 'defined-type': \"openconfig-local-routing:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\\\p{N}\\\\p{L}]+)?'}),RestrictedClassType(base_type=six.text_type, restriction_type=\"dict_key\", restriction_arg={'DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:DROP': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}, 'oc-loc-rt:LOCAL_LINK': {'@module': 'openconfig-local-routing', '@namespace': 'http://openconfig.net/yang/local-routing'}},),], is_leaf=True, yang_name=\"next-hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='union', is_config=False)\"\"\",\n })\n\n self.__next_hop = t\n if hasattr(self, '_set'):\n self._set()",
"def standard_access_list_next_hops(self, standard_access_list_next_hops):\n\n self._standard_access_list_next_hops = standard_access_list_next_hops",
"def VplsEnableNextHop(self):\n return self._get_attribute('vplsEnableNextHop')",
"def set_next(self, new_next):\n \tself.next = new_next",
"def _update_next_hop(self, want, have, opr=True):\n commands = []\n\n want_copy = deepcopy(remove_empties(want))\n have_copy = deepcopy(remove_empties(have))\n\n diff_next_hops = get_lst_diff_for_dicts(\n have_copy, want_copy, \"next_hops\"\n )\n if diff_next_hops:\n for hop in diff_next_hops:\n for element in hop:\n if element == \"forward_router_address\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n value=hop[element],\n remove=True,\n )\n )\n elif element == \"enabled\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"],\n value=\"disable\",\n remove=True,\n )\n )\n elif element == \"admin_distance\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=str(hop[element]),\n remove=True,\n )\n )\n elif element == \"interface\":\n commands.append(\n self._compute_command(\n dest=want[\"dest\"],\n key=\"next-hop\",\n attrib=hop[\"forward_router_address\"]\n + \" \"\n + element,\n value=hop[element],\n remove=True,\n )\n )\n return commands",
"def get_next_shoe(self):\n self._get_next_item(self.shoe_image_path, self.shoe_images, increment=True)",
"def _next_url(self, response):\n return response.links.get(\"page-next\", {}).get(\"url\", None)",
"def set_next(self, val):\n self._next = val",
"def sls_handler(self, next_url):\n return next_url",
"def collection_next(self):\n return \"__next\"",
"def get_next_url(self):\n return None",
"def get_next(request):\r\n try:\r\n next = request.GET.get('next', '')\r\n if not next:\r\n next = request.path\r\n return next\r\n except AttributeError:\r\n return ''",
"def set_next(self, next_node):\r\n self.next_node = next_node",
"def _set_next_variable_to_session_if_found():\n\n if request.method == \"GET\" and request.args.get(\"next\"):\n Session.add(\"next\", session_value=request.args.get(\"next\"))",
"async def next(self):\n json = await self._client._get(self._url_next)\n return NeoWsFeedPage(self._client, json)",
"def next_url(request):\n next_page = request.REQUEST.get(\"next\", \"\")\n host = request.get_host()\n return next_page if next_page and is_safe_url(\n next_page, host=host) else None"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for static, mapped from YANG variable /local_routes/static_routes/static (list)
|
def _set_static(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("prefix",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name="static", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name="static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """static must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("prefix",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name="static", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name="static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)""",
})
self.__static = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_static_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"static_routes must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__static_routes = t\n if hasattr(self, '_set'):\n self._set()",
"def inspect_static_routes(app: App) -> 'List[StaticRouteInfo]':\n routes = []\n for sr, _, _ in app._static_routes:\n info = StaticRouteInfo(sr._prefix, sr._directory, sr._fallback_filename)\n routes.append(info)\n return routes",
"def _get_static_ipv4_routes(self):\n pass",
"def treat_as_static(self):\n self._static = 1",
"def staticroutes_status(self, site_id, element_id, staticroute_id, tenant_id=None, api_version=\"v2.2\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/sites/{}/elements/{}/staticroutes/{}/status\".format(api_version,\n tenant_id,\n site_id,\n element_id,\n staticroute_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"get\")",
"def static_tunnels(self) -> List[StaticTunnel]:\n return list(self._static_tunnels.values())",
"def DelAllRouterStatic(self):\n req = self.ApiGet('cmdb/router/static/')\n data = json.loads(req.text)\n for y in range(0, len(data['results'])):\n route_id = data['results'][y]['seq-num']\n return_code = self.DelRouterStaticID(route_id)\n print('del route id:', route_id, '(', return_code, ')')\n if return_code != 200: return return_code\n return 200",
"def create_static_routes(ADDR_TYPE, input_dict, tgen, CWD, topo):\n\n try:\n global frr_cfg\n for router in input_dict.keys():\n if \"static_routes\" in input_dict[router]:\n static_routes_list = []\n\n # Reset config for routers\n frr_cfg[router].reset_it()\n\n static_routes = input_dict[router][\"static_routes\"]\n for static_route in static_routes:\n network = static_route[\"network\"]\n # No of IPs\n if \"no_of_ip\" in static_route:\n no_of_ip = static_route[\"no_of_ip\"]\n else:\n no_of_ip = 0\n\n if \"admin_distance\" in static_route:\n admin_distance = static_route[\"admin_distance\"]\n else:\n admin_distance = 1\n\n if \"tag\" in static_route:\n tag = static_route[\"tag\"]\n else:\n tag = None\n\n if \"if_name\" in static_route:\n if_name = static_route[\"if_name\"]\n else:\n if_name = None\n\n next_hop = static_route[\"next_hop\"]\n\n ip_list = generate_ips(ADDR_TYPE, network, no_of_ip)\n for ip in ip_list:\n ip = str(ipaddress.ip_network(unicode(ip)))\n if ADDR_TYPE == \"ipv4\":\n addr = Address(ADDR_TYPE_IPv4, ip, None)\n route = Route(addr)\n nh = Address(ADDR_TYPE_IPv4, next_hop, None)\n else:\n addr = Address(ADDR_TYPE_IPv6, None, ip)\n route = Route(addr)\n nh = Address(ADDR_TYPE_IPv6, None, next_hop)\n route.add_nexthop(nh, None, admin_distance, if_name, tag)\n\n static_routes_list.append(route)\n frr_cfg[router].routing_pb.static_route = static_routes_list\n\n interfaces_cfg(frr_cfg[router])\n static_rt_cfg(frr_cfg[router])\n frr_cfg[router].print_common_config_to_file(topo)\n # Load configuration to router\n load_config_to_router(tgen, CWD, router)\n\n except Exception as e:\n errormsg = traceback.format_exc()\n logger.error(errormsg)\n return errormsg\n\n return True",
"def _get_static_ipv6_routes(self):\n pass",
"def static_route_configuration(self) -> pulumi.Output[Optional['outputs.InternalNetworkPropertiesResponseStaticRouteConfiguration']]:\n return pulumi.get(self, \"static_route_configuration\")",
"def MakeStaticFilePatternsIntoHandlers(self):\n includes = self.static_file_includes\n if not includes:\n return [handler.SimpleHandler('/*', {'type': 'static'})]\n\n handler_patterns = []\n\n for include in includes:\n pattern = include.pattern.replace('**', '*')\n if pattern[0] != '/':\n pattern = '/' + pattern\n properties = {'type': 'static'}\n if include.expiration:\n properties['expiration'] = include.expiration\n if include.http_headers:\n\n properties['http_headers'] = tuple(sorted(include.http_headers.items()))\n handler_patterns.append(handler.SimpleHandler(pattern, properties))\n return handler_patterns",
"def _copy_static(self):\n if self.static_dirs is None:\n self._copy_static_from_resources('resources/site', self.out_dir)\n else:\n fscopier = PatternFsCopier()\n for dname in self.static_dirs.split(','):\n logger.info('coping static tree {} -> {}'.format(\n dname, self.out_dir))\n fscopier.copytree(dname, self.out_dir)",
"def getStaticAttrVals(self, obj):\n return [getattr(obj, attr, None) for attr in self.static_attrs]",
"def static_router(self, root, path, controller=None):\n\n log.info(\"Static file server was enabled in root: {r}.\".format(r=root))\n self.routes.append(StaticRoute(root, path, controller))",
"def set_static_value(self, value):\n # TODO Check constraint\n # self._constraint.endpointWillChangeStatic()\n\n self._set_value_with_type_check(value)",
"def StaticHostsOptions(self):\n\t\tfrom ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.statichostsoptions_rhy2svc3rhdgljsg9zdhnpchrpb25z import StaticHostsOptions\n\t\treturn StaticHostsOptions(self)",
"def add_static_ipv4_route(self, add_route):\n pass",
"def is_static(self) -> bool:\n return '*' not in self.path",
"def __init__(self, static_path=None):\n self.__static_path = static_path"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for description, mapped from YANG variable /local_routes/local_aggregates/aggregate/config/description (string)
|
def _set_description(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """description must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='string', is_config=True)""",
})
self.__description = t
if hasattr(self, '_set'):
self._set()
|
[
"def set_description(self, description):\n if not isinstance(description, str):\n raise ValueError(\"Description must be a string.\")\n try:\n self._set_config_value(\n _SERVICE_INFO_SECTION_NAME, \"Description\", description\n )\n except Exception as e:\n logger.error(f\"Unable to set description: {e}\")",
"def config_setting_description(self, config_setting_description):\n\n self._config_setting_description = config_setting_description",
"def set_description(self):\n if \"description\" not in self.data:\n logger.debug(\"Adding empty descriptions to root\")\n self.data[\"description\"] = \"\"",
"def description(self, value):\r\n if self.description is not None:\r\n if isinstance(value, basestring):\r\n output = self._update(\r\n backupset_name=self.backupset_name,\r\n backupset_description=value,\r\n default_backupset=self.is_default_backupset\r\n )\r\n\r\n if output[0]:\r\n return\r\n else:\r\n o_str = 'Failed to update the description of the backupset\\nError: \"{0}\"'\r\n raise SDKException('Backupset', '102', o_str.format(output[2]))\r\n else:\r\n raise SDKException(\r\n 'Backupset', '102', 'Backupset description should be a string value'\r\n )\r\n else:\r\n raise SDKException('Backupset', '102', 'Description cannot be modified')",
"def _setup_externals_description(config):\n\n config.add_section(DESCRIPTION_SECTION)\n config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.1')",
"def set_description(description):",
"def add_description(self, description: str):\n self.response[DESCRIPTION_KEY] = description\n return self",
"def parse_description(self):\n string = self.description\n name, host_genus = \\\n basic.parse_names_from_record_field(string)\n self._description_name = name\n self._description_host_genus = host_genus",
"def set_description(module):\n name = module.attributes['name']\n value = module.attributes['description']\n module.node.api('interfaces').set_description(name, value)",
"def description(self, value):\n if value is None or value == \"\":\n value = self.name\n CCAPI.set_product_description(product_ids=[self.id], description=value)\n self._description = value",
"def description_url(self, description_url):\n\n self._description_url = description_url",
"def _description(cls, description):\n descriptions = description.asDict()\n return descriptions.get(None, descriptions.values()[0])",
"def add_description(self, description):\n\n new_desc = json.loads(description)\n model_desc = json.loads(self.description)\n\n for new_item in new_desc:\n model_desc[new_item] = new_desc[new_item]\n\n self.description = json.dumps(model_desc, sort_keys=True)",
"def _description_string(self) -> str:",
"def setDescription(self, description):\n\n prod = self.productClass()\n\n if prod:\n prod.description = description",
"def get_description_of_rule(config_rule_name):\n description = \"\"\n try:\n response = CONFIG.describe_config_rules(\n ConfigRuleNames=[config_rule_name]\n )\n if 'Description' in response['ConfigRules'][0]:\n description = response['ConfigRules'][0]['Description']\n else:\n description = response['ConfigRules'][0]['ConfigRuleName']\n return description\n except Exception as error:\n print(\"Error: \", error)\n raise",
"def description(event_id):\n return DESCRIPTIONS.get(event_id, event_id)",
"def format_desc(self):\n return '\\nDescription:\\n{}\\n'.format(\n C(\n FormatBlock(get_pkg_description(self.package)).format(\n width=76,\n newlines=True,\n prepend=' '\n ),\n fore='green'\n )\n )",
"def description(self, new_description):\n if new_description == \"\":\n raise ActivityValidatorError(\"The description cannot be empty!\\n\")\n self.__description = new_description"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for discard, mapped from YANG variable /local_routes/local_aggregates/aggregate/state/discard (boolean)
|
def _set_discard(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="discard", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """discard must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="discard", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='boolean', is_config=False)""",
})
self.__discard = t
if hasattr(self, '_set'):
self._set()
|
[
"def is_discarded(self):\n return self._discarded",
"def discard(self) :\n\t\tassert self._holded is not None, \\\n\t\t\t\"Discarding a configuration requires to be held\"\n\t\tself._dict = self._holded\n\t\tself._holded = None",
"def discard():\n player = current_player._get_current_object()\n if not player:\n abort(400)\n\n args = request.get_json()\n card_id = args.get('card', None)\n if card_id is None:\n abort(400)\n\n card = Card.query.get(card_id)\n if card is not None and card in player.cards:\n player.cards.remove(card)\n db.session.commit()\n return player_state()",
"def begin_discard(\n self,\n resource_group_name: str,\n move_collection_name: str,\n body: Optional[_models.DiscardRequest] = None,\n *,\n content_type: str = \"application/json\",\n **kwargs: Any\n ) -> LROPoller[_models.OperationStatus]:",
"def drop_packet(state):\n state.id = StateId.NO_PACKET\n state.packet = bytearray()\n return state",
"def unflag(self):\n self._flagged = False",
"def ignore(self):\n return \"ignore\" in self.attributes and self.attributes[\"ignore\"] == \"true\"",
"def discard_tile(self, player_id, tile_code):\n tile_states = self.get_state(tile_code)\n if player_id == 0: # discard our tile\n first_in_hand_index = tile_states.index(TileState.InMyHand)\n self.set_tile_status(tile_code, first_in_hand_index, TileState.InMyDiscardTiles)\n else: # other player discarded a tile\n first_unknown_index = tile_states.index(TileState.Unknown)\n self.set_tile_status(tile_code, first_unknown_index, TileState(1 + player_id))",
"def discard(self, shadowattribute):\n assert shadowattribute is not MispShadowAttribute\n self.server.POST('/shadow_attributes/discard/%d' % shadowattribute.id, '')",
"def discard(self):\n assert self\n self.__discard.add(self.__items[-1])\n self.__skip_discarded()",
"def get_rating_discard(state: State, c: Color, n: int) -> float:\n return state.get_discard_score(Card(n, c))",
"def set_auto_discard_for_device(self, name, controller_port, device, discard):\n if not isinstance(name, basestring):\n raise TypeError(\"name can only be an instance of type basestring\")\n if not isinstance(controller_port, baseinteger):\n raise TypeError(\"controller_port can only be an instance of type baseinteger\")\n if not isinstance(device, baseinteger):\n raise TypeError(\"device can only be an instance of type baseinteger\")\n if not isinstance(discard, bool):\n raise TypeError(\"discard can only be an instance of type bool\")\n self._call(\"setAutoDiscardForDevice\",\n in_p=[name, controller_port, device, discard])",
"def mark_cards_for_discard(self, player):\n cards_to_discard = [False, False, False, False, False]\n discarding = True\n while discarding:\n # Display cards with those marked for discard\n print(\"Player\", player.hand.name, \"Trump suit\", self.trump_suit)\n print(\"Index Discard Card\")\n for card_index in range(0, len(player.hand.cards)):\n print(\"{:5} {:7} {}\".\n format(str(card_index + 1),\n str(cards_to_discard[card_index]),\n str(player.hand.cards[card_index])))\n discard_string = input(\n \"Enter card to discard, 1-5. Enter 0 when done.\")\n try:\n discard_value = int(discard_string)\n except ValueError:\n discard_value = -1\n if 0 < discard_value < 6:\n cards_to_discard[discard_value - 1] = \\\n not cards_to_discard[discard_value - 1]\n if discard_value == 0:\n discarding = False\n return cards_to_discard",
"def discard_pcard(self, card):\n move(card, self.phand, self.discard)",
"def discard_settings(self):\n self._call(\"discardSettings\")",
"def _draw_discard(self) -> Card:\n return self._discards.draw()",
"def SetEpoIgnore(self, state):\n if state:\n level = COMMAND_VALUE_ON\n else:\n level = COMMAND_VALUE_OFF\n\n try:\n self.RawWrite(COMMAND_SET_EPO_IGNORE, [level])\n except KeyboardInterrupt:\n raise\n except:\n self.Print('Failed sending EPO ignore state!')",
"def reject(self) -> None:\n\n assert self.state == 'submitted'\n self.state = 'rejected'",
"def clear_capture_state(self):\n\n # discard current state by retrieving the state and not using it\n response = request.get(self.__capture_url)\n return response.status_code == 200"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for prefix, mapped from YANG variable /local_routes/local_aggregates/aggregate/prefix (leafref)
|
def _set_prefix(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prefix must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="prefix", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='leafref', is_config=True)""",
})
self.__prefix = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../../../../../../../../defined-sets/prefix-sets/prefix-set/config/name', caller=self._path() + ['prefix-set'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def set_root_as_prefix(self) -> None:\n self.root_node.prefix_flag = True\n self._prefix_nodes[0] += 1",
"def prefix(self):\n return self.parsed.prefix",
"def _set_ip_prefix(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/ip-prefix', caller=self._path() + ['ip-prefix'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"ip-prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"ip_prefix must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/ip-prefix', caller=self._path() + ['ip-prefix'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"ip-prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__ip_prefix = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_prefix(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"ip_prefix\",yc_prefix_openconfig_routing_policy__routing_policy_defined_sets_prefix_sets_prefix_set_prefixes_prefix, yang_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-prefix', extensions=None), is_container='list', yang_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"ip_prefix\",yc_prefix_openconfig_routing_policy__routing_policy_defined_sets_prefix_sets_prefix_set_prefixes_prefix, yang_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='ip-prefix', extensions=None), is_container='list', yang_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/routing-policy', defining_module='openconfig-routing-policy', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__prefix = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_source_address_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"source-address-prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"source_address_prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"source-address-prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__source_address_prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def addPrefix(self, prefix):\n \n pass",
"def add_prefix(dir_path=None, prefix=None, sep=None, filter_ext=None):\n return __add_affix(dir_path=dir_path, affix=prefix, affix_type='prefix', sep=sep, filter_ext=filter_ext)",
"def prefix_with(self, prefix: str) -> 'MetricDataCollection':\n for child in self.values:\n child.prefix_with(prefix)",
"def _set_destination_address_prefix_set(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name=\"destination-address-prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"destination_address_prefix_set must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=six.text_type, is_leaf=True, yang_name=\"destination-address-prefix-set\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=False)\"\"\",\n })\n\n self.__destination_address_prefix_set = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_prefix(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"destination\",prefix.prefix, yang_name=\"prefix\", rest_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='destination', extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}), is_container='list', yang_name=\"prefix\", rest_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"prefix must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"destination\",prefix.prefix, yang_name=\"prefix\", rest_name=\"prefix\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='destination', extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}), is_container='list', yang_name=\"prefix\", rest_name=\"prefix\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-fec-prefix', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)\"\"\",\n })\n\n self.__prefix = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_prefix(self, init_supplied_val):\n if init_supplied_val is not None:\n value = init_supplied_val.rstrip('_')\n elif self.__class__.prefix is not None:\n value = self.__class__.prefix.rstrip('_')\n else:\n module_path_parts = self.__module_path_split[:-1]\n try:\n module_path_parts.remove('conf')\n except ValueError:\n pass\n value = '_'.join(module_path_parts).upper()\n self._prefix = value",
"def prefix(self):\n return self._config['DEFAULT']['prefix']",
"def addLocalPrefix_IDL(self, lispHandle, eidInstanceId, prefixAF, prefix, prefixLen):\n pass",
"def _lower_prefix(self):\n return self.instance_prefix.lower()",
"def prefix(self):\n # Task 1.3\n final_repr = self.root\n if hasattr(self, 'first'):\n final_repr += self.first.prefix()\n if hasattr(self, 'second'):\n final_repr += self.second.prefix()\n return final_repr",
"def uri_with_prefix(self, prefix: str = None) -> str:\n if prefix is None:\n return self.uri\n\n uri = self.uri\n\n if isinstance(uri, URI):\n match_policy = uri.match_policy\n else:\n match_policy = None\n\n if match_policy is None and isinstance(prefix, URI):\n match_policy = prefix.match_policy\n\n uri = prefix + self.uri\n if match_policy is not None:\n return URI(uri, match_policy=match_policy)\n\n return uri",
"def set_prefix(self):\n try:\n if self.prefix_column is not None:\n self.uids = self.prefix_column.str.cat(self.uids, self.delimiter)\n elif self.prefix:\n self.uids = '{}{}'.format(self.prefix, self.delimiter) + self.uids.astype(str)\n except AttributeError as e:\n print(e)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for config, mapped from YANG variable /local_routes/local_aggregates/aggregate/config (container)
|
def _set_config(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_aggregate(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name=\"aggregate\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"aggregate\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"aggregate must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"prefix\",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name=\"aggregate\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"aggregate\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__aggregate = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_local_aggregates(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_aggregates must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__local_aggregates = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_telemetry__telemetry_system_subscriptions_persistent_subscriptions_persistent_subscription_destination_groups_destination_group_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_telemetry__telemetry_system_subscriptions_persistent_subscriptions_persistent_subscription_destination_groups_destination_group_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_queue_management_profiles_queue_management_profile_red_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_queue_management_profiles_queue_management_profile_red_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_elements__qos_queue_management_profiles_queue_management_profile_red_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_elements__qos_queue_management_profiles_queue_management_profile_red_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_buffer_allocation_profiles_buffer_allocation_profile_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_buffer_allocation_profiles_buffer_allocation_profile_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_queue_management_profiles_queue_management_profile_wred_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_queue_management_profiles_queue_management_profile_wred_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_spanning_tree__stp_global_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_access_points__access_points_access_point_system_grpc_server_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/wifi/access-points', defining_module='openconfig-access-points', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_elements__qos_queue_management_profiles_queue_management_profile_wred_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_elements__qos_queue_management_profiles_queue_management_profile_wred_uniform_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_qos_interfaces__qos_interfaces_interface_output_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for state, mapped from YANG variable /local_routes/local_aggregates/aggregate/state (container)
|
def _set_state(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_interfaces__interfaces_interface_subinterfaces_subinterface_ipv6_addresses_address_vrrp_vrrp_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_persistent_subscriptions_persistent_subscription_destination_groups_destination_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_persistent_subscriptions_persistent_subscription_destination_groups_destination_group_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_queue_management_profiles_queue_management_profile_red_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_queue_management_profiles_queue_management_profile_red_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_enable_bfd_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_queue_management_profiles_queue_management_profile_red_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_queue_management_profiles_queue_management_profile_red_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_interface_ref_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_queue_management_profiles_queue_management_profile_wred_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_queue_management_profiles_queue_management_profile_wred_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_next_hops_next_hop_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_buffer_allocation_profiles_buffer_allocation_profile_queues_queue_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_global_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_global_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_buffer_allocation_profiles_buffer_allocation_profile_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_queue_management_profiles_queue_management_profile_wred_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_queue_management_profiles_queue_management_profile_wred_uniform_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_static_routes_static_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_mem_mgmt__qos_buffer_allocation_profiles_buffer_allocation_profile_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_mem_mgmt__qos_buffer_allocation_profiles_buffer_allocation_profile_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_spanning_tree__stp_mstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_spanning_tree__stp_mstp_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/spanning-tree', defining_module='openconfig-spanning-tree', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos_interfaces__qos_queue_management_profiles_queue_management_profile_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos_interfaces__qos_queue_management_profiles_queue_management_profile_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_telemetry__telemetry_system_subscriptions_dynamic_subscriptions_dynamic_subscription_sensor_paths_sensor_path_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/telemetry', defining_module='openconfig-telemetry', yang_type='container', is_config=False)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_qos__qos_interfaces_interface_input_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_qos__qos_interfaces_interface_input_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for aggregate, mapped from YANG variable /local_routes/local_aggregates/aggregate (list)
|
def _set_aggregate(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("prefix",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name="aggregate", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name="aggregate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """aggregate must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("prefix",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name="aggregate", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name="aggregate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)""",
})
self.__aggregate = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_local_aggregates(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_aggregates must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__local_aggregates = t\n if hasattr(self, '_set'):\n self._set()",
"def aggregate(self, agg, name=None):\n\n if name is None:\n name = str(agg)\n\n self.gxview.aggregate(agg.gxagg, name)",
"def ListAggregatedFunc(self):\n return self.api.addresses.aggregatedList",
"def aggregation(self, value):\n self._set_property('aggregation', value)",
"def cluster_aggregate(\n self,\n collection_name: str,\n aggregation_query: Dict,\n page: int = 1,\n page_size: int = 10,\n asc: bool = False,\n flatten: bool = True,\n return_curl=False,\n **kwargs\n ):\n params = {\n \"username\": self.username,\n \"api_key\": self.api_key,\n \"collection_name\": collection_name,\n \"aggregation_query\": aggregation_query,\n \"page\": page,\n \"page_size\": page_size,\n \"asc\": asc,\n \"flatten\" : flatten\n }\n params.update(kwargs)\n response = requests.post(\n url=\"{}/collection/cluster_aggregate\".format(self.url),\n json=params,\n )\n return return_curl_or_response(response, return_type='json', return_curl=return_curl)",
"def _getVariableAggregate(self, aggregate=None):\n\n if aggregate not in [\"avg\", \"min\", \"max\", None]:\n raise TSIQueryError(\n \"TSIClient: Aggregation method not supported, must be \\\"avg\\\", \\\"min\\\" or \\\"max\\\".\"\n )\n\n if aggregate != None:\n aggregate = {\"tsx\": \"{0!s}($value)\".format(aggregate)}\n requestType = \"aggregateSeries\"\n else:\n requestType = \"getSeries\"\n\n return (aggregate, requestType)",
"def _find_matching_aggregates(self):\n aggregate_names = self._client.list_aggregates()\n pattern = self.configuration.netapp_aggregate_name_search_pattern\n return [aggr_name for aggr_name in aggregate_names\n if re.match(pattern, aggr_name)]",
"def update_aggregates(self, ctxt, aggregates):\n # NOTE(sbauza): We're dropping the user context now as we don't need it\n self.host_manager.update_aggregates(aggregates)",
"def set_aggregation_function(self, function):\n for node in self:\n node.aggregation_function = function",
"def isAggregate():",
"def _update_aggstatus(self, aggregate_type, new_status, alerts_list=None):\n log.debug(\"%r: _update_aggstatus called\", self._agent._platform_id)\n # alerts_list passed per OOIION-1275\n self._agent._status_manager.set_aggstatus(aggregate_type, new_status, alerts_list)",
"def get_aggregate_request(self, aggregate_name, **kwargs):\n try:\n return self.split_and_replace(\n self.aggregates[aggregate_name.upper()]\n )\n except KeyError:\n raise KeyError(\n '{} aggregate does not exist. Must be one of {}'.format(\n aggregate_name,\n ', '.join(self.aggregates.keys())\n )\n )",
"def aggregate(self, values):\n pass",
"def aggregate(cls, items, mode):\n if mode == cls.AGG_MODE_MAX:\n return max(items)\n elif mode == cls.AGG_MODE_MIN:\n return min(items)\n elif mode == cls.AGG_MODE_AVG:\n return cls.round(sum(items) / len(items))\n elif mode == cls.AGG_MODE_DIFF:\n return cls.round(max(items) - min(items))\n else:\n raise ValueError('unknown mode: \\'{}\\''.format(mode))",
"def process_aggregates(gprod):\n cols = [\n _coerce_to_series(\n ctx,\n c.resolve_expression(ctx, gprod)\n ).reset_index(drop=True)\n for c in frame_columns]\n\n\n for c in cols:\n if c.name == 'aggregate':\n break\n else:\n return cols\n\n return [\n list(c)[0]\n if c.name != 'aggregate'\n else c\n for c in cols\n ]",
"def rewrite_aggregates(tree, symtab):\n class Rewriter(S.QueryRewriter):\n expand = True\n \n def rewrite_aggr(self, symbol, name, aggr):\n # Only operate on min and max nodes.\n if isinstance(aggr.op, L.Min):\n func = 'min2'\n elif isinstance(aggr.op, L.Max):\n func = 'max2'\n else:\n return\n \n parts = L.get_setunion(aggr.value)\n if len(parts) <= 1:\n return\n multiple_queries = \\\n len([p for p in parts if not isinstance(p, L.Set)]) > 1\n \n i = 2\n done_first_query = False\n new_parts = []\n for p in parts:\n if isinstance(p, L.Set):\n # Flatten the literal elements as arguments to\n # min2/max2.\n new_parts.extend(p.elts)\n else:\n new_query_node = aggr._replace(value=p)\n if done_first_query:\n # Create a new query symbol and node for this\n # non-literal argument.\n new_name = name + '_aggrop' + str(i)\n i += 1\n new_parts.append(L.Query(new_name, new_query_node,\n None))\n symtab.define_query(new_name, node=new_query_node,\n impl=symbol.impl)\n else:\n # Push the Query node down to the first non-literal\n # argument.\n new_parts.append(L.Query(name, new_query_node, None))\n symbol.node = new_query_node\n done_first_query = True\n \n return L.Call(func, new_parts)\n \n tree = Rewriter.run(tree, symtab)\n return tree",
"def test_aggregates_list(self):\n pass",
"def rewrite_aggregates(tree, symtab):\n class Rewriter(S.QueryRewriter):\n def rewrite_aggr(self, symbol, name, expr):\n operand = expr.value\n \n if (isinstance(operand, L.Name) and\n operand.id in symtab.get_relations()):\n return\n if isinstance(operand, L.Query):\n return\n \n oper_name = symbol.name + '_oper'\n elem = next(symtab.fresh_names.vars)\n t_oper = symtab.analyze_expr_type(operand)\n # We're tolerant of type misinformation here, since our object\n # type inference isn't in place at the moment.\n if not t_oper.issmaller(T.Set(T.Top)):\n t_oper = T.Set(T.Top)\n \n comp = L.Comp(L.Name(elem), [L.Member(L.Name(elem), operand)])\n oper_query = L.Query(oper_name, comp, None)\n symtab.define_query(oper_name, node=comp, type=t_oper,\n impl=symbol.impl)\n expr = expr._replace(value=oper_query)\n return expr\n \n tree = Rewriter.run(tree, symtab)\n return tree",
"def mm_aggregate(mmprox, agg='mun', from_agg='postalcode'):\n # validate\n mmprox_shape = mmprox.shape\n if from_agg != 'arr' and mmprox_shape != (1148, 1148):\n raise Exception(f\"The input dataframe is of the shape {mmprox_shape}, not (1148, 1148) which is all 1147 postal codes + destinations/origins abroad. Fix this first.\")\n if agg not in ['mun', 'arr', 'prov']:\n raise Exception(\"The aggregation level must be either municipality ('mun'), arrondissements ('arr') or provinces ('prov').\")\n \n if agg=='prov' and from_agg=='arr':\n # Rename columns\n for nis in mmprox.columns:\n if nis not in ['ABROAD', '21000', '23000', '24000', '25000']: # Brussels is '11th province'\n new_nis = nis[:-4] + '0000'\n mmprox = mmprox.rename(columns={nis : new_nis})\n if nis in ['23000', '24000']:\n new_nis = '20001'\n mmprox = mmprox.rename(columns={nis : new_nis})\n if nis == '25000':\n new_nis = '20002'\n mmprox = mmprox.rename(columns={nis : new_nis})\n\n # Rename rows\n for nis in mmprox.index:\n if nis not in ['Foreigner', '21000', '23000', '24000', '25000']:\n new_nis = nis[:-4] + '0000'\n mmprox = mmprox.rename(index={nis : new_nis})\n if nis in ['23000', '24000']:\n new_nis = '20001'\n mmprox = mmprox.rename(index={nis : new_nis})\n if nis == '25000':\n new_nis = '20002'\n mmprox = mmprox.rename(index={nis : new_nis})\n\n # Collect rows and columns with the same NIS code, and automatically order column/row names\n mmprox_agg = mmprox.groupby(level=0, axis=1).sum()\n mmprox_agg = mmprox.groupby(level=0, axis=0).sum()#.astype(int)\n \n else:\n # copy dataframe and load the postal-code-to-NIS-value translator\n mmprox_agg = mmprox.copy()\n pc_to_nis = load_pc_to_nis()\n\n rename_abroad = 'ABROAD'\n rename_foreigner = 'Foreigner'\n\n # initiate renaming dictionaries\n rename_col_dict = dict({})\n rename_idx_dict = dict({})\n for pc in mmprox_agg.columns:\n if pc != 'ABROAD':\n NIS = str(pc_to_nis[pc_to_nis['Postcode']==int(pc)]['NISCode'].values[0])\n rename_col_dict[pc] = NIS\n rename_col_dict['ABROAD'] = rename_abroad\n for pc in mmprox_agg.index:\n if pc != 'Foreigner':\n NIS = str(pc_to_nis[pc_to_nis['Postcode']==int(pc)]['NISCode'].values[0])\n rename_idx_dict[pc] = NIS\n rename_idx_dict['Foreigner'] = rename_foreigner\n\n # Rename the column names and indices to prepare for merging\n mmprox_agg = mmprox_agg.rename(columns=rename_col_dict, index=rename_idx_dict)\n\n mmprox_agg = mmprox_agg.groupby(level=0, axis=1).sum()\n mmprox_agg = mmprox_agg.groupby(level=0, axis=0).sum()#.astype(int)\n\n if agg in ['arr', 'prov']:\n # Rename columns\n for nis in mmprox_agg.columns:\n if nis != 'ABROAD':\n new_nis = nis[:-3] + '000'\n mmprox_agg = mmprox_agg.rename(columns={nis : new_nis})\n\n # Rename rows\n for nis in mmprox_agg.index:\n if nis != 'Foreigner':\n new_nis = nis[:-3] + '000'\n mmprox_agg = mmprox_agg.rename(index={nis : new_nis})\n\n # Collect rows and columns with the same NIS code, and automatically order column/row names\n mmprox_agg = mmprox_agg.groupby(level=0, axis=1).sum()\n mmprox_agg = mmprox_agg.groupby(level=0, axis=0).sum()#.astype(int)\n\n if agg == 'prov':\n # Rename columns\n for nis in mmprox_agg.columns:\n if nis not in ['ABROAD', '21000', '23000', '24000', '25000']: # Brussels is '11th province'\n new_nis = nis[:-4] + '0000'\n mmprox_agg = mmprox_agg.rename(columns={nis : new_nis})\n if nis in ['23000', '24000']:\n new_nis = '20001'\n mmprox_agg = mmprox_agg.rename(columns={nis : new_nis})\n if nis == '25000':\n new_nis = '20002'\n mmprox_agg = mmprox_agg.rename(columns={nis : new_nis})\n\n # Rename rows\n for nis in mmprox_agg.index:\n if nis not in ['Foreigner', '21000', '23000', '24000', '25000']:\n new_nis = nis[:-4] + '0000'\n mmprox_agg = mmprox_agg.rename(index={nis : new_nis})\n if nis in ['23000', '24000']:\n new_nis = '20001'\n mmprox_agg = mmprox_agg.rename(index={nis : new_nis})\n if nis == '25000':\n new_nis = '20002'\n mmprox_agg = mmprox_agg.rename(index={nis : new_nis})\n\n # Collect rows and columns with the same NIS code, and automatically order column/row names\n mmprox_agg = mmprox_agg.groupby(level=0, axis=1).sum()\n mmprox_agg = mmprox_agg.groupby(level=0, axis=0).sum()#.astype(int)\n \n return mmprox_agg"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for static_routes, mapped from YANG variable /local_routes/static_routes (container)
|
def _set_static_routes(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name="static-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """static_routes must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name="static-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__static_routes = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_static(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"static\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"static must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"static\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__static = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_local_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name=\"local-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_routes must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name=\"local-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__local_routes = t\n if hasattr(self, '_set'):\n self._set()",
"def inspect_static_routes(app: App) -> 'List[StaticRouteInfo]':\n routes = []\n for sr, _, _ in app._static_routes:\n info = StaticRouteInfo(sr._prefix, sr._directory, sr._fallback_filename)\n routes.append(info)\n return routes",
"def set_routes(routes):\r\n self.routes = routes",
"def static_router(self, root, path, controller=None):\n\n log.info(\"Static file server was enabled in root: {r}.\".format(r=root))\n self.routes.append(StaticRoute(root, path, controller))",
"def _get_static_ipv4_routes(self):\n pass",
"def ipv6_routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['StaticRoutePropertiesArgs']]]]:\n return pulumi.get(self, \"ipv6_routes\")",
"def _get_static_ipv6_routes(self):\n pass",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def static_route_configuration(self) -> pulumi.Output[Optional['outputs.InternalNetworkPropertiesResponseStaticRouteConfiguration']]:\n return pulumi.get(self, \"static_route_configuration\")",
"def staticroutes_status(self, site_id, element_id, staticroute_id, tenant_id=None, api_version=\"v2.2\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/sites/{}/elements/{}/staticroutes/{}/status\".format(api_version,\n tenant_id,\n site_id,\n element_id,\n staticroute_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"get\")",
"def create_static_routes(ADDR_TYPE, input_dict, tgen, CWD, topo):\n\n try:\n global frr_cfg\n for router in input_dict.keys():\n if \"static_routes\" in input_dict[router]:\n static_routes_list = []\n\n # Reset config for routers\n frr_cfg[router].reset_it()\n\n static_routes = input_dict[router][\"static_routes\"]\n for static_route in static_routes:\n network = static_route[\"network\"]\n # No of IPs\n if \"no_of_ip\" in static_route:\n no_of_ip = static_route[\"no_of_ip\"]\n else:\n no_of_ip = 0\n\n if \"admin_distance\" in static_route:\n admin_distance = static_route[\"admin_distance\"]\n else:\n admin_distance = 1\n\n if \"tag\" in static_route:\n tag = static_route[\"tag\"]\n else:\n tag = None\n\n if \"if_name\" in static_route:\n if_name = static_route[\"if_name\"]\n else:\n if_name = None\n\n next_hop = static_route[\"next_hop\"]\n\n ip_list = generate_ips(ADDR_TYPE, network, no_of_ip)\n for ip in ip_list:\n ip = str(ipaddress.ip_network(unicode(ip)))\n if ADDR_TYPE == \"ipv4\":\n addr = Address(ADDR_TYPE_IPv4, ip, None)\n route = Route(addr)\n nh = Address(ADDR_TYPE_IPv4, next_hop, None)\n else:\n addr = Address(ADDR_TYPE_IPv6, None, ip)\n route = Route(addr)\n nh = Address(ADDR_TYPE_IPv6, None, next_hop)\n route.add_nexthop(nh, None, admin_distance, if_name, tag)\n\n static_routes_list.append(route)\n frr_cfg[router].routing_pb.static_route = static_routes_list\n\n interfaces_cfg(frr_cfg[router])\n static_rt_cfg(frr_cfg[router])\n frr_cfg[router].print_common_config_to_file(topo)\n # Load configuration to router\n load_config_to_router(tgen, CWD, router)\n\n except Exception as e:\n errormsg = traceback.format_exc()\n logger.error(errormsg)\n return errormsg\n\n return True",
"def build_routes(self):\n mapper = self.get_mapper()\n routes = mapper.get_routes(include_static=False)\n\n for route in routes:\n data = _get_route_data(route, self.request.registry)\n for name, pattern, view_data in data:\n\n if self.is_good_route_item(name, pattern, view_data):\n self.add_route_item(name, pattern, view_data)",
"def DelAllRouterStatic(self):\n req = self.ApiGet('cmdb/router/static/')\n data = json.loads(req.text)\n for y in range(0, len(data['results'])):\n route_id = data['results'][y]['seq-num']\n return_code = self.DelRouterStaticID(route_id)\n print('del route id:', route_id, '(', return_code, ')')\n if return_code != 200: return return_code\n return 200",
"def add_static_ipv4_route(self, add_route):\n pass",
"def hostRoutes(minHosts, maxHosts, minDests, maxDests, gateway, interface):\n # Create/open xml file\n with open(\"StaticRoute.xml\", \"a\") as f:\n # Route for every host\n for i in range(minDests, maxDests):\n f.write(\"<route hosts='host[%i..%i]' destination='host[%i]' gateway='%s' netmask = '255.255.255.255' interface='%s'/>\\n\" %(minHosts,maxHosts, i, gateway, interface));\n\n return;",
"def add_static_ipv6_route(self, add_route):\n pass",
"def modify_admin_distance_for_static_routes(input_dict, CWD, tgen, topo):\n logger.info(\"Entering lib API: modify_admin_distance_for_static_routes()\")\n\n try:\n for router in input_dict.keys():\n # Reset config for routers\n frr_cfg[router].reset_it()\n\n for static_route in input_dict[router].keys():\n next_hop = input_dict[router][static_route]['next_hop']\n admin_distance = input_dict[router][static_route]['admin_distance']\n\n for st in frr_cfg[router].routing_pb.static_route:\n st_ip_prefix = IpAddressMsg_to_str(st.prefix)\n for nh in st.nexthops:\n if st_ip_prefix == static_route and \\\n IpAddressMsg_to_str(nh.ip) == next_hop:\n nh.admin_distance = admin_distance\n\n interfaces_cfg(frr_cfg[router])\n static_rt_cfg(frr_cfg[router])\n frr_cfg[router].print_common_config_to_file(topo)\n # Load config to router\n load_config_to_router(tgen, CWD, router)\n\n except Exception as e:\n errormsg = traceback.format_exc()\n logger.error(errormsg)\n return errormsg\n\n logger.info(\"Exiting lib API: modify_admin_distance_for_static_routes\")\n return True",
"def add_static_route(self, gateway, destination, network=None):\n routing_node_gateway = RoutingNodeGateway(gateway,\n destinations=destination) \n return self._add_gateway_node('router', routing_node_gateway, network)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for local_aggregates, mapped from YANG variable /local_routes/local_aggregates (container)
|
def _set_local_aggregates(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name="local-aggregates", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """local_aggregates must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name="local-aggregates", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__local_aggregates = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_aggregate(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name=\"aggregate\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"aggregate\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"aggregate must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"prefix\",yc_aggregate_openconfig_local_routing__local_routes_local_aggregates_aggregate, yang_name=\"aggregate\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"aggregate\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__aggregate = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_local_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name=\"local-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_routes must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name=\"local-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__local_routes = t\n if hasattr(self, '_set'):\n self._set()",
"def max_local_total_logical_configurations(self, max_local_total_logical_configurations):\n\n self._max_local_total_logical_configurations = max_local_total_logical_configurations",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_local_aggregates_aggregate_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def max_local_total_physical_configurations(self, max_local_total_physical_configurations):\n\n self._max_local_total_physical_configurations = max_local_total_physical_configurations",
"def max_local_analyses(self, max_local_analyses):\n\n self._max_local_analyses = max_local_analyses",
"def update_aggregates(self, ctxt, aggregates):\n # NOTE(sbauza): We're dropping the user context now as we don't need it\n self.host_manager.update_aggregates(aggregates)",
"def max_local_logical_configurations_per_physical(self, max_local_logical_configurations_per_physical):\n\n self._max_local_logical_configurations_per_physical = max_local_logical_configurations_per_physical",
"def addLocalPrefixRlocMap_IDL(self, lispHandle, eidInstanceId, prefixAF, prefix, prefixLen, localRlocs):\n pass",
"def read_location_aggregates(self, summary_filename_list):\n for sum_fname in summary_filename_list:\n try:\n csv_rdr, inf = get_csv_dict_reader(sum_fname, GBIF.DWCA_DELIMITER)\n for rec in csv_rdr:\n species_key = rec[SPECIES_KEY]\n self._add_record_to_location_summaries(\n rec[LOCATION_KEY], species_key, count=rec[COUNT_KEY])\n try:\n self._canonicals[species_key]\n except KeyError:\n self._canonicals[species_key] = rec[SPECIES_NAME_KEY]\n except Exception as e:\n raise Exception(f\"Failed to open or read {sum_fname}: {e}\")\n finally:\n inf.close()",
"def monitor_aiops_aggregates(self, tenant_id=None, api_version=\"v2.1\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.cdl_url\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/monitor/aiops/aggregates\".format(api_version,\n tenant_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"get\")",
"def ListAggregatedFunc(self):\n return self.api.addresses.aggregatedList",
"def rewrite_aggregates(tree, symtab):\n class Rewriter(S.QueryRewriter):\n expand = True\n \n def rewrite_aggr(self, symbol, name, aggr):\n # Only operate on min and max nodes.\n if isinstance(aggr.op, L.Min):\n func = 'min2'\n elif isinstance(aggr.op, L.Max):\n func = 'max2'\n else:\n return\n \n parts = L.get_setunion(aggr.value)\n if len(parts) <= 1:\n return\n multiple_queries = \\\n len([p for p in parts if not isinstance(p, L.Set)]) > 1\n \n i = 2\n done_first_query = False\n new_parts = []\n for p in parts:\n if isinstance(p, L.Set):\n # Flatten the literal elements as arguments to\n # min2/max2.\n new_parts.extend(p.elts)\n else:\n new_query_node = aggr._replace(value=p)\n if done_first_query:\n # Create a new query symbol and node for this\n # non-literal argument.\n new_name = name + '_aggrop' + str(i)\n i += 1\n new_parts.append(L.Query(new_name, new_query_node,\n None))\n symtab.define_query(new_name, node=new_query_node,\n impl=symbol.impl)\n else:\n # Push the Query node down to the first non-literal\n # argument.\n new_parts.append(L.Query(name, new_query_node, None))\n symbol.node = new_query_node\n done_first_query = True\n \n return L.Call(func, new_parts)\n \n tree = Rewriter.run(tree, symtab)\n return tree",
"def local_mappings(self):\n ret = self._get_attr(\"localMappings\")\n return ret",
"def initLocalCollection(self):\n\n # Get the local plugins information\n self.local = LocalPluginCollection(self.c)\n self.local.initFrom(self.local_path)\n\n # Get the active status of the plugins\n self.enable = EnableManager()\n self.enable.initFrom(self.local_path)\n self.local.setEnabledStateFrom(self.enable)",
"def _set_state(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"state must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_state_openconfig_local_routing__local_routes_local_aggregates_aggregate_state, is_container='container', yang_name=\"state\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__state = t\n if hasattr(self, '_set'):\n self._set()",
"def has_local_as(self, local_as):\n for as_path_seg in self._path_seg_list:\n for as_num in as_path_seg:\n if as_num == local_as:\n return True\n return False",
"def test_add_local():\n with patch(\n \"salt.utils.path.which\",\n MagicMock(return_value=\"/bin/lgroupadd\"),\n ) as which_mock:\n mock = MagicMock(return_value={\"retcode\": 0})\n with patch.dict(groupadd.__salt__, {\"cmd.run_all\": mock}):\n assert groupadd.add(\"test\", 100, local=True) is True\n which_mock.assert_called_once_with(\"lgroupadd\")\n mock.assert_called_once_with(\n [\"/bin/lgroupadd\", \"-g 100\", \"test\"], python_shell=False\n )",
"def _set_lsps(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(\n v,\n base=lsps.lsps,\n is_container=\"container\",\n yang_name=\"lsps\",\n parent=self,\n path_helper=self._path_helper,\n extmethods=self._extmethods,\n register_paths=True,\n extensions=None,\n namespace=\"http://openconfig.net/yang/network-instance\",\n defining_module=\"openconfig-network-instance\",\n yang_type=\"container\",\n is_config=True,\n )\n except (TypeError, ValueError):\n raise ValueError(\n {\n \"error-string\": \"\"\"lsps must be of a type compatible with container\"\"\",\n \"defined-type\": \"container\",\n \"generated-type\": \"\"\"YANGDynClass(base=lsps.lsps, is_container='container', yang_name=\"lsps\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)\"\"\",\n }\n )\n\n self.__lsps = t\n if hasattr(self, \"_set\"):\n self._set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setter method for local_routes, mapped from YANG variable /local_routes (container)
|
def _set_local_routes(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name="local-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """local_routes must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_local_routes_openconfig_local_routing__local_routes, is_container='container', yang_name="local-routes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)""",
})
self.__local_routes = t
if hasattr(self, '_set'):
self._set()
|
[
"def _set_static_routes(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"static_routes must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_static_routes_openconfig_local_routing__local_routes_static_routes, is_container='container', yang_name=\"static-routes\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__static_routes = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_local_aggregates(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_aggregates must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_local_aggregates_openconfig_local_routing__local_routes_local_aggregates, is_container='container', yang_name=\"local-aggregates\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__local_aggregates = t\n if hasattr(self, '_set'):\n self._set()",
"def set_routes(routes):\r\n self.routes = routes",
"def addLocalPrefixRlocMap_IDL(self, lispHandle, eidInstanceId, prefixAF, prefix, prefixLen, localRlocs):\n pass",
"def add_local(route_dist, prefix, next_hop, route_family=VRF_RF_IPV4):\n try:\n # Create new path and insert into appropriate VRF table.\n tm = CORE_MANAGER.get_core_service().table_manager\n label = tm.add_to_vrf(route_dist, prefix, next_hop, route_family)\n # Currently we only allocate one label per local_prefix,\n # so we share first label from the list.\n if label:\n label = label[0]\n\n # Send success response with new label.\n return [{ROUTE_DISTINGUISHER: route_dist, PREFIX: prefix,\n VRF_RF: route_family, VPN_LABEL: label}]\n except BgpCoreError as e:\n raise PrefixError(desc=e)",
"def local_path(self, local_path: str):\n\n self._local_path = local_path",
"def _initialize_routes(self):\n\n for i,stop_id in enumerate(list(self.manager_stop.keys())):\n stop = self.manager_stop[stop_id]\n\n assert stop.demand <= self.env.capacity, stop.demand\n\n new_guid = \"route_\" + str(i)\n short_sequence = [self.manager_stop.depot,stop, self.manager_stop.depot]\n self.current_route[new_guid] = route.Route(sequence_stop=short_sequence,guid=new_guid)",
"def _set_local_ldp_lblspc(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"local-ldp-lblspc\", rest_name=\"local-ldp-lblspc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_ldp_lblspc must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"local-ldp-lblspc\", rest_name=\"local-ldp-lblspc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)\"\"\",\n })\n\n self.__local_ldp_lblspc = t\n if hasattr(self, '_set'):\n self._set()",
"def started_on_local(self, started_on_local):\n\n self._started_on_local = started_on_local",
"def addRemotePrefixRlocMap_IDL(self, lispHandle, eidInstanceId, prefixAF, prefix, prefixLen, state, remoteRlocs):\n pass",
"def _set_static(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"static\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"static must be of a type compatible with list\"\"\",\n 'defined-type': \"list\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGListType(\"prefix\",yc_static_openconfig_local_routing__local_routes_static_routes_static, yang_name=\"static\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='prefix', extensions=None), is_container='list', yang_name=\"static\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='list', is_config=True)\"\"\",\n })\n\n self.__static = t\n if hasattr(self, '_set'):\n self._set()",
"def ResetRoutes(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ResetRoutes\", params, headers=headers)\n response = json.loads(body)\n model = models.ResetRoutesResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def get_self_origin(self, routes):\n outroutes = []\n\n if len(routes) <= 1 or not any(route[SORG] is True for route in routes):\n outroutes = routes\n\n if any(route[SORG] is True for route in routes):\n outroutes = [route for route in routes if route[SORG] is True]\n\n return outroutes",
"def local_mappings(self):\n ret = self._get_attr(\"localMappings\")\n return ret",
"def _set_config(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"config must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_config_openconfig_local_routing__local_routes_static_routes_static_config, is_container='container', yang_name=\"config\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/local-routing', defining_module='openconfig-local-routing', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__config = t\n if hasattr(self, '_set'):\n self._set()",
"def has_local_as(self, local_as):\n for as_path_seg in self._path_seg_list:\n for as_num in as_path_seg:\n if as_num == local_as:\n return True\n return False",
"def local_start_time_starts_with(self, local_start_time_starts_with):\n\n self._local_start_time_starts_with = local_start_time_starts_with",
"def ReplaceRoutes(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ReplaceRoutes\", params, headers=headers)\n response = json.loads(body)\n model = models.ReplaceRoutesResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def __set_route(self, type_route, route):\n if type_route in self.__routes:\n if not self.verify_route_already_bound(type_route, route):\n self.__routes[type_route].append(route)\n else:\n self.__routes[type_route] = [route]\n return RouteMapping"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Convert this instance into a dict, which includes some detailed information of the target/source build, i.e. build version and file name.
|
def to_dict_detail(self, target_lib, offset=0):
detail_info = asdict(self)
try:
with open(self.stdout, 'r') as fout:
detail_info['stdout'] = fout.read()
with open(self.stderr, 'r') as ferr:
detail_info['stderr'] = ferr.read()
except FileNotFoundError:
detail_info['stdout'] = 'NO STD OUTPUT IS FOUND'
detail_info['stderr'] = 'NO STD ERROR IS FOUND'
target_info = target_lib.get_build_by_path(self.target)
detail_info['target_name'] = target_info.file_name
detail_info['target_build_version'] = target_info.build_version
if self.incremental:
incremental_info = target_lib.get_build_by_path(
self.incremental)
detail_info['incremental_name'] = incremental_info.file_name
detail_info['incremental_build_version'] = incremental_info.build_version
return detail_info
|
[
"def build_info(self):\n \n path='/build_info'\n res = self.client.call(path, 'GET', data='', token=self.manager.identity.token)\n self.logger.debug('Openstack build info: %s' % \\\n truncate(res))\n return res[0]",
"def _info(self, identity=None):\n from collections import OrderedDict\n\n d = super(BuildBundle, self)._info(identity)\n\n d['source'] = OrderedDict(bundle=self.bundle_dir)\n\n deps = self.config.build.get('dependencies')\n d['build'] = OrderedDict(dependencies=deps if deps else '')\n\n if self.is_built:\n d['build'].update(self._build_info())\n\n return d",
"def getBuildInformation() -> retval:\n ...",
"def getDetailsDict(self):\n return {'url' : self.downloadLink,\n 'description' : self.description,\n 'date' : self.uploadedDate,\n 'struct_time' : self.struct_time,\n 'version' : self.version,\n 'filesize' : self.filesize,\n 'filename' : self.filename}",
"def _extractReleaseInfo(self):\n print(\"Extracting Version Info ... \", end=\"\")\n foundV = False\n foundD = False\n with open(self._extPath / \"novelwriter\" / \"__init__.py\") as inFile:\n for aLine in inFile:\n if aLine.startswith(\"__version__\"):\n self._nwRelease = aLine.split('\"')[1].strip()\n foundV = True\n elif aLine.startswith(\"__date__\"):\n self._nwDate = aLine.split('\"')[1].strip()\n foundD = True\n if foundV and foundD:\n break\n else:\n raise Exception(\"Could not find release version and date\")\n\n version, _, dev = self._nwRelease.partition(\"-\")\n self._nwDev = dev\n bits = version.split(\".\")\n if len(bits) > 0:\n self._nwMajor = int(bits[0])\n if len(bits) > 1:\n self._nwMinor = int(bits[1])\n if len(bits) > 2:\n self._nwPatch = int(bits[2])\n\n print(\"Done\")\n\n return",
"def collect(self):\n\n # NOTE: we could run each command inside a try/except block to have a\n # more granular protection and be able to save data from those commands\n # that didn't fail. Otherwise, if one command fails, all the data for\n # this Build is lost.\n\n data = {}\n data[\"config\"] = {\"user\": self.config.source_config}\n data[\"os\"] = self._get_operating_system()\n data[\"python\"] = self._get_python_version()\n\n user_apt_packages, all_apt_packages = self._get_apt_packages()\n conda_packages = (\n self._get_all_conda_packages() if self.config.is_using_conda else {}\n )\n data[\"packages\"] = {\n \"pip\": {\n \"user\": self._get_user_pip_packages(),\n \"all\": self._get_all_pip_packages(),\n },\n \"conda\": {\n \"all\": conda_packages,\n },\n \"apt\": {\n \"user\": user_apt_packages,\n \"all\": all_apt_packages,\n },\n }\n data[\"doctool\"] = self._get_doctool()\n\n return data",
"def info_dict(self):\n\n paths = self.paths()\n if not paths:\n return {\n 'reference_path': self.reference_path(),\n 'online': False,\n 'nb_frames_available': 0,\n 'is_sequence': True,\n }\n\n nb_frames = len(paths)\n missing_frames = []\n\n all_frames = set(self._frames)\n for frame in range(self.start_frame(), self.end_frame()):\n if frame not in all_frames:\n missing_frames.append(frame)\n\n seq_label = '%s.[%s-%s]%s' % (\n self.basename(),\n self.start_frame(),\n self.end_frame(),\n self.extension()\n )\n\n return {\n 'reference_path': self.reference_path(),\n 'online': True,\n 'start_frame': self.start_frame(),\n 'end_frame': self.end_frame(),\n 'nb_frames_available': nb_frames,\n 'nb_missing_frames': len(missing_frames),\n 'missing_frames': missing_frames,\n 'available_frames': list(all_frames - set(missing_frames)),\n 'is_sequence': True,\n 'label': seq_label\n }",
"def as_dict(self):\n return {\n 'entry_id': self.entry_id,\n 'version': self.version,\n 'domain': self.domain,\n 'title': self.title,\n 'data': self.data,\n 'source': self.source,\n }",
"def get_node_metadata(self):\n return {\n class_json_consts.PACKAGE: self.package,\n class_json_consts.CLASS: self.class_name,\n class_json_consts.BUILD_TARGETS: sorted(self.build_targets),\n class_json_consts.NESTED_CLASSES: sorted(self.nested_classes),\n }",
"def setup_build_properties(self):",
"def getInfo(self, filename):\n self.info = {\n 'video': self.getVideoInfo(filename),\n 'audio': self.getAudioInfo(filename)\n }\n return self.info",
"def get_iphone_build_version(self):\n return self.parsed_info_file['Build Version']",
"def metadata(self) -> dict[str, Any]:",
"def _collect_python_info(self):\n if self._info is None:\n if self.root_dir == sys.prefix:\n # optimize for current python environment by directly using\n # the current interpreter\n import site\n import activestate\n pyver = '%d.%d' % sys.version_info[:2]\n apyver = activestate.version\n user_site = site.USER_SITE\n user_base = site.USER_BASE\n else:\n expr = [\n '\"%d.%d\" % sys.version_info[:2]',\n 'activestate.version',\n 'site.USER_SITE',\n 'site.USER_BASE',]\n with_modules = ['sys', 'site', 'activestate']\n output = self.eval(expr, with_modules).strip().splitlines()\n pyver, apyver, user_site, user_base = output\n self._info = dict(\n pyver = pyver,\n apyver = apyver,\n user_site = user_site,\n user_base = user_base,\n )",
"def get_koji_build_info(build_id, session, config):\n\n print(\"Retriewing build metadata from: \", config.koji_host)\n build = session.getBuild(build_id)\n if not build:\n raise Exception(\"Build with id '{id}' has not been found.\".format(id=build_id))\n\n print(\"Build with the ID\", build_id, \"found.\")\n\n return build",
"def SilicaAbout(self):\n\n compile_date = datetime.datetime.now().isoformat()\n try:\n upstream_url = check_output([\"git\", \"config\", \"--get\", \"remote.origin.url\"], cwd=self.root).decode(\"utf-8\")\n except Exception:\n upstream_url = \"undefined\"\n return {\n \"software\": \"Silica\",\n \"version\": self.version,\n \"compile_date\": compile_date,\n \"upstream_url\": upstream_url\n }",
"def build_file(self):\n return self._build_file",
"def get_build_info(json_file):\n try:\n fp = open(json_file, 'rU')\n text = fp.read()\n fp.close()\n text = re.sub(COMMENT_RE, '', text, flags=re.M)\n build_info = json.loads(text)\n except Exception as e:\n print \"Error in {0}:\\n{1}\".format(json_file, str(e))\n return None\n if not check_build_info(build_info):\n return None\n # if necessary add a value for \"visualizer_url\"\n if \"visualizer_url\" not in build_info:\n build_info[\"visualizer_url\"] = DEFALUT_VIZ_URL\n # merge DEFAULT_VIZ_CONFIG with the supplied \"default_viz_config\"\n config = DEFAULT_VIZ_CONFIG\n config[\"editCodeBaseURL\"] = build_info[\"visualizer_url\"]\n config.update(build_info.get(\"default_viz_config\", {}))\n build_info[\"default_viz_config\"] = config\n # update all the \n for key, value in build_info.iteritems():\n if key.endswith('.html'):\n for py_key, py_dict in value[1].iteritems():\n update_dict(py_dict, build_info.get(\"default_viz_config\", {}))\n return build_info",
"def get_target_extraction_context(self, build_file_path: str) -> dict:\n extraction_context = {}\n for name, builder in Plugin.builders.items():\n extraction_context[name] = extractor(name, builder,\n build_file_path, self)\n return extraction_context"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Insert the job_info into the database
|
def insert_database(self, job_info):
with sqlite3.connect(self.path) as connect:
cursor = connect.cursor()
cursor.execute("""
INSERT INTO Jobs (ID, TargetPath, IncrementalPath, Verbose, Partial, OutputPath, Status, Downgrade, OtherFlags, STDOUT, STDERR, StartTime, Finishtime)
VALUES (:id, :target, :incremental, :verbose, :partial, :output, :status, :downgrade, :extra, :stdout, :stderr, :start_time, :finish_time)
""", job_info.to_sql_form_dict())
|
[
"def _create_job_info(self, job_dir):\n meta = self._build_job_meta(job_dir)\n\n self.logger.debug(\"Create job: %s\" % meta)\n\n job_record = JobRecord.from_json(meta)\n job_record.save()",
"def insert_jobexp(exp_args, jobman_args):\n table_name = jobman_args.get(\"table_name\", \"experiment\")\n db = api0.open_db('postgres://jobuser:whatever@127.0.0.1/jobbase?table='+table_name)\n for arg in jobman_args:\n sql.insert_job(predictive_rl.rlglueexp.jobexp.run, flatten(arg), db)",
"def store_job_meta(self, mol_db_id):\n logger.info('Storing job metadata')\n rows = [(mol_db_id, self._ds.id, JobStatus.RUNNING, datetime.now().strftime('%Y-%m-%d %H:%M:%S'))]\n self._job_id = self._db.insert_return(JOB_INS, rows=rows)[0]",
"def save_job(instance_url, job_name, job_status):\n\n # initialize SQLite\n connection = sqlite3.connect(DB_NAME)\n cursor = connection.cursor()\n\n try:\n # Try to create \"jobs\" table if it hasn't been created\n cursor.execute(\"CREATE TABLE jobs (instance_url, name, status, date_checked)\")\n except sqlite3.OperationalError:\n # Do nothing since table has already been created\n pass\n\n query = (\"INSERT INTO jobs VALUES ('{instance}', '{job}', '{status}', '{checkin}')\"\n .format(\n instance=instance_url, job=job_name, status=job_status, checkin=datetime.now()\n ))\n\n # Execute INSERT query and conclude operation\n cursor.execute(query)\n connection.commit()\n connection.close()",
"def addjobinfo(self):\n self.job = {}\n if self.sid:\n try:\n response, content = rest.simpleRequest('search/jobs/%s' % self.sid,\n sessionKey=self.session_key,\n getargs={'output_mode': 'json'})\n if response.status == 200:\n self.job = json.loads(content)['entry'][0]['content']\n self.message('Successfully retrieved search job info')\n self.logger.debug(self.job)\n else:\n self.message('Could not retrieve search job info', level=logging.WARN)\n except Exception as e:\n self.message('Could not retrieve search job info', level=logging.WARN)",
"def apply_for_job(db, job_id, user_id):\n\n cursor = db.cursor()\n sql = \"INSERT INTO jobApplication(jobID, userID, status) VALUES (?, ?, ?);\"\n\n cursor.execute(sql, (job_id, user_id, 0))\n db.commit()",
"def save_details(self, jb_dict, top):\n\n jb_dict['job_id'] = self.job_id\n\n company_name = self.entry_company_name.get()\n jb_dict['company_name'] = company_name\n\n job_title = self.entry_job_title.get()\n jb_dict['job_title'] = job_title\n\n job_location = self.entry_job_location.get()\n jb_dict['job_location'] = job_location\n\n contact_person = self.entry_contact_person.get()\n jb_dict['contact_person'] = contact_person\n\n type_of_employment = self.entry_type_of_employment.get()\n jb_dict['type_of_employment'] = type_of_employment\n\n type_of_contract = self.entry_type_of_contract.get()\n jb_dict['type_of_contract'] = type_of_contract\n\n job_description = self.entry_job_description.get(\"1.0\", tk.END)\n jb_dict['job_description'] = job_description\n\n application_deadline = str(self.entry_application_deadline.get())\n jb_dict['application_deadline'] = application_deadline\n\n try:\n self.no_of_hires = int(self.entry_no_of_hires.get())\n jb_dict['no_of_hires'] = self.no_of_hires\n except (ValueError, AttributeError):\n mb.showerror(\"Error\", \"Please give valid number in No. of hires field!\", parent=self.top)\n self.entry_no_of_hires.focus()\n\n try:\n self.phone_no = self.entry_phone_no.get()\n jb_dict['phone_no'] = self.no_of_hires\n except (ValueError, AttributeError):\n mb.showerror(\"Error\", \"Please enter a valid Phone No.!\", parent=self.top)\n self.entry_phone_no.focus()\n\n try:\n if self.no_of_hires and self.phone_no:\n #jd().add_issue_key(jb_dict)\n #jd().update_job_id_tracker(self.id)\n \"\"\"thread module - starts\"\"\"\n self.check_tag = \"add_job_save\"\n self.issue_key = 0\n self.update_jb_dict = []\n self.master = tk.Toplevel()\n\n tpau(self.master, self.check_tag, jb_dict, self.id, self.issue_key, self.update_jb_dict)\n \"\"\"thread module - ends\"\"\"\n\n mb.showinfo('Saved', 'Saved New JOB into Jira Database', parent=self.top)\n self.cancel()\n if jt.selected_job_id:\n x = jt.selected_job_id.pop()\n y = jt.issue_key_list.pop()\n z = jt.update_values.pop()\n print(\"Pop elements\", x, y, z)\n jt(self.root).jobs_table(self.first_frame)\n else:\n mb.showwarning('Warning', 'Please fill in the all the details', parent=self.top)\n except (ValueError, AttributeError) as e:\n print(e)",
"def insert_job():\n seed = str(int(time.time())) # round it\n total_shards = tasa.conf.shards\n jobs = []\n for shard in range(1, total_shards + 1):\n jobs.append(\n [seed, '%d/%d' % (shard, total_shards), tasa.conf.ports])\n MasscanWorker.qinput.send(*jobs)",
"def insert_result(job_info, job_result, log_dict):\n job_id = job_info.job_id\n model_name = job_info.model_name\n indexs = sorted([int(x) for x in (str(job_info.report_index).split(','))], reverse=True)\n for index in indexs:\n mr = bm.JobResults()\n mr.job_id = job_id\n mr.model_name = model_name\n mr.report_index_id = index\n if index == 0:\n values = {}\n for key in job_result[\"performance\"].keys():\n values[key] = job_result[\"performance\"][key][1]\n mr.report_result = values\n mr.result_log = log_dict\n elif index == 1:\n mr.report_result = job_result[\"speed\"]\n elif index == 2:\n mr.report_result = job_result[\"gpu_train_mem_max\"]\n elif index == 3:\n mr.report_result = job_result[\"train_time\"]\n elif index == 4:\n mr.report_result = job_result[\"infer_speed\"]\n elif index == 5:\n mr.report_result = job_result[\"gpu_infer_mem_max\"]\n else:\n logging.error(\"error!\")\n mr.save()",
"def insert_job_to_currents(jobid, start_time):\n with open(current_job_file,'w') as outf:\n try:\n details = get_job_details(jobid)\n outf.write(jobid + \"=>\" + start_time.strftime(DateFormat.COMPACT) + \"\\n\")\n return details[0] # job name\n except KeyError:\n msg = \"There is no job corresponding to this jobid %s\" % jobid\n raise ValueError(msg) # find a better exception",
"def insert(self, sql):",
"def add_jobs():\n logger.info('Working with Job class')\n logger.info('Creating Job records: just like Person. We use the foreign key')\n\n JOB_NAME = 0\n START_DATE = 1\n END_DATE = 2\n SALARY = 3\n PERSON_EMPLOYED = 4\n DEPARTMENT = 5\n\n jobs = [\n ('Y1 Hogwarts Student', '1990-09-01', '1991-05-05', 0, 'Harry', 'STU'),\n ('Y2 Hogwarts Student', '1991-09-02', '1992-05-06', 0, 'Harry', 'STU'),\n ('Hogwarts Headmaster', '1970-09-01', '1997-06-30', 100000, 'Albus', 'ADM'),\n ('Evil Villain', '1938-09-04', '1998-05-02', 500000, 'Tom', 'DA'),\n ('Teacher', '1980-09-12', '1997-05-16', 75000, 'Sybill', 'EDU')\n ]\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for job in jobs:\n with database.transaction():\n new_job = Job.create(\n job_name=job[JOB_NAME],\n start_date=job[START_DATE],\n end_date=job[END_DATE],\n duration=date_deltas(job[START_DATE], job[END_DATE]),\n salary=job[SALARY],\n emplid=job[PERSON_EMPLOYED],\n job_department=job[DEPARTMENT])\n new_job.save()\n logger.info('Database add successful')\n\n logger.info(\n 'Reading and print all Job rows (note the value of person)...')\n for job in Job:\n logger.info(f'{job.job_name} : {job.start_date} to {job.end_date} for {job.emplid}')\n\n except Exception as e:\n logger.info(f'Error creating = {job[JOB_NAME]}')\n logger.info(e)\n\n finally:\n logger.info('database closes')\n database.close()",
"def create_jobs_and_queue(self):\n new_job_exists = False\n\n #c = self.db.cursor(cursor_factory=psycopg2.extras.DictCursor)\n #c.execute(\"SELECT * FROM deepstyle_job WHERE job_status='Q'\")\n c = self.safe_execute_sql(\"SELECT * FROM deepstyle_job WHERE job_status='Q'\", curs_fact=True)\n\n row = c.fetchone()\n\n while row is not None:\n try:\n self.job_queue.put(job(j_id=row['id'],\n im_name1= row['input_image'],\n im_name2= row['style_image'],\n output_name= row['output_image'],\n content_weight=row['content_weight'],\n content_blend=row['content_weight_blend'],\n style_weight=row['style_weight'],\n style_scale=row['style_scale'],\n style_blend=row['style_blend_weights'],\n style_layer_weight_exp=row['style_layer_weight_exp'],\n iterations=row['iterations'],\n preserve_color=row['preserve_color'],\n width=row['output_width'])\n )\n\n # Set queue status of current row's id to be 'queued'\n self.safe_execute_sql(\"UPDATE deepstyle_job SET job_status='P' WHERE id = (%s)\", True, (row['id'],))\n # c.execute(\"UPDATE deepstyle_job SET job_status='P' WHERE id = (%s)\", (row['id'],))\n new_job_exists = True\n self.logger.log.info(\"Job %d set In Progress\" % row['id'])\n\n\n except Exception as e:\n self.logger.log.error(\"Job %d could not be set In Progress\" % row['id'])\n self.logger.log.exception(e)\n\n #z = self.db.cursor()\n #z.execute(\"UPDATE deepstyle_job SET job_status='F' WHERE id = (%s)\", (row['id'],))\n self.safe_execute_sql(\"UPDATE deepstyle_job SET job_status='F' WHERE id = (%s)\", True, (row['id'],))\n\n try:\n row = c.fetchone()\n except:\n break\n\n c.close()\n\n if new_job_exists:\n self.db.commit()",
"def populate_job_details(workflow_stats , workflow_info):\n\ttotal_jobs =0\n\ttotal_jobs = workflow_stats.get_total_jobs_status()\n\tworkflow_info.total_jobs = total_jobs",
"def edit_job(self,query_data):\n self.cur.execute(query_data)\n self.conn.commit()",
"def insert_info(self):\n\t\tself.get_connection()\n\t\tc = self.conn.cursor()\n\t\t#For every USER in the dictionary\n\t\tfor i in range(len(self.info['USER'])):\n\t\t\t#insert them into the database (I <3 SQL INJECTIONS)\n\t\t\tc.execute(\"INSERT OR REPLACE INTO '{tn}' ('{user}', '{dp}', '{g}', '{f}', '{l}', '{e}') VALUES ('{idv}', '{dpv}', '{gv}', '{fv}', '{lv}', '{ev}');\".\\\n\t\t\t\tformat(tn=self.tn, user=self.user, dp=self.dp, g=self.g, f=self.f, l=self.l, e=self.e,\n\t\t\t\t\tidv=self.info['USER'][i], dpv=self.info['pass'][i], gv=self.info['group'][i],\n\t\t\t\t\t fv=self.info['first'][i], lv=self.info['last'][i], ev=self.info['email'][i]))\n\t\t#Log this datbase manipulation\n\t\tself.log_users_creation()\n\t\t#commit to database and close connection\n\t\tself.commit_db()",
"def add_Data(self, k, v):\n\n if v['site'] not in self.all_site.values():\n self.all_site[self.all_site.__len__()] = v['site']\n\n current_site = self.site(v['site'])\n\n if self.session.query(Job).filter(Job.name==k).first() is None :\n current_job = Job(k)\n\n for input_Element in v[\"inputs\"]:\n #Check if this input object is already in the database or not\n if self.session.query(Input).filter(Input.name==input_Element).first() is None :\n #if not we add it\n current_input = Input(input_Element)\n current_job.inputs.append(current_input)\n\n for connection in v[\"connections\"]:\n result = re.split(r'(\\S*)$', connection)\n #Check if this connection object is already in the database or not\n if self.session.query(Connection).filter(Connection.time==result[0]).first() is None :\n #if not we add it\n current_connection = Connection(result[0],result[1])\n current_job.connections.append(current_connection)\n\n current_site.jobs.append(current_job)\n\n self.session.add(current_site)\n\n self.session.commit()",
"def insert_job(self, process_id, camera, logname, version='1.0'):\n\n camera = self.insert_camera(camera)\n\n job = Job(\n process_id=process_id,\n camera_id=camera,\n logname=logname,\n version=version\n )\n job.save()\n\n return job",
"def _update_job_info(cls, job_dir):\n meta_file = os.path.join(job_dir, JOB_META_FILE)\n meta = parse_json(meta_file)\n\n if meta:\n logging.debug(\"Update job info for %s\" % meta[\"job_id\"])\n JobRecord.objects \\\n .filter(job_id=meta[\"job_id\"]) \\\n .update(end_time=timestamp2date(meta[\"end_time\"]))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return the status of all jobs as a list of JobInfo
|
def get_status(self):
with sqlite3.connect(self.path) as connect:
cursor = connect.cursor()
cursor.execute("""
SELECT ID, TargetPath, IncrementalPath, Verbose, Partial, OutputPath, Status, Downgrade, OtherFlags, STDOUT, STDERR, StartTime, FinishTime
FROM Jobs
""")
rows = cursor.fetchall()
statuses = [JobInfo(*row) for row in rows]
return statuses
|
[
"def list_jobs(self, status=True):",
"def check_job_status_all(self):\r\n\t\ttry:\r\n\t\t\t# r = requests.get(self.base_url + \"jobs\")\r\n\t\t\tr = requests.get(self.base_url + \"joboverview\")\r\n\t\t\tr.raise_for_status()\r\n\t\t\t# print('Response from check_job_status_all: %s'%(r.text))\r\n\t\t\tresponse = json.loads(r.text)\r\n\t\t\tret = {}\r\n\t\t\t# put the latest status in the ret\r\n\t\t\tfor x in response['finished']:\r\n\t\t\t\talert_name = x['name']\r\n\t\t\t\tlatest_time_seen = float('inf') if alert_name not in ret else ret[alert_name]['start-time']\r\n\t\t\t\tif x['start-time'] < latest_time_seen:\r\n\t\t\t\t\tret[alert_name] = {'start-time':x['start-time'], 'status':x['state']}\r\n\t\t\t# remove the start-time as we need just the state\r\n\t\t\tfor alert_name in ret:\r\n\t\t\t\tret[alert_name] = 'FINISHED - ' + ret[alert_name]['status']\r\n\t\t\tfor x in response['running']:\r\n\t\t\t\tret[x['name']] = 'RUNNING'\r\n\t\t\treturn ret\r\n\t\texcept Exception as e:\r\n\t\t\traise Exception('Failed to check job statuses. Error: %s, %s'%(str(type(e)),str(e)))",
"def get_jobs_list(self):\n return [j['job'] for j in self._running_jobs.values()]",
"def jobs(self):\n return self.get_jobs()",
"def _job_list(self,printer):\n\n\t\t# *** NOTE *** we don't set language to 'neutral' since it is useful\n\t\t#\t\t\t\tto get localized date/time strings.\n\n\t\tresult = []\n\t\t(stdout,stderr,status) = self._shell_command(['/usr/bin/lpstat','-o',printer])\n\t\texpr = re.compile('\\s*(\\S+)\\s+(\\S+)\\s+(\\d+)\\s*(.*?)$')\n\t\tif status == 0:\n\t\t\tfor line in stdout.split(\"\\n\"):\n\t\t\t\tmobj = expr.match(line)\n\t\t\t\tif mobj:\n\t\t\t\t\tentry = {\n\t\t\t\t\t\t'job':\t\tmobj.group(1),\n\t\t\t\t\t\t'owner':\tmobj.group(2),\n\t\t\t\t\t\t'size':\t\tmobj.group(3),\n\t\t\t\t\t\t'date':\t\tmobj.group(4)\n\t\t\t\t\t}\n\t\t\t\t\tresult.append(entry)\n\t\treturn result",
"def get_viable_status():\n stats = []\n for key in Job.possible_status.keys():\n stats.append(Job.possible_status[key])\n return stats",
"def get_all_jobs() -> List[Type[base_jobs.JobBase]]:\n return base_jobs.JobMetaclass.get_all_jobs()",
"def jobs(self):\n return self.job_set.all()",
"def getJobStatus( self, jobIDList ):\n\n workingDirectory = self.ceParameters['WorkingDirectory']\n fd, name = tempfile.mkstemp( suffix = '.list', prefix = 'StatJobs_', dir = workingDirectory )\n jobListFile = os.fdopen( fd, 'w' )\n \n jobTmpList = list( jobIDList )\n if type( jobIDList ) in StringTypes:\n jobTmpList = [ jobIDList ]\n\n\n jobList = []\n for j in jobTmpList:\n if \":::\" in j:\n job = j.split(\":::\")[0] \n else:\n job = j\n jobList.append( job )\n jobListFile.write( job+'\\n' ) \n \n cmd = ['arcstat','-c',self.ceHost,'-i',name,'-j',self.ceParameters['JobListFile']]\n result = executeGridCommand( self.proxy, cmd, self.gridEnv )\n os.unlink( name )\n \n resultDict = {}\n if not result['OK']:\n self.log.error( 'Failed to get job status', result['Message'] )\n return result\n if result['Value'][0]:\n if result['Value'][2]:\n return S_ERROR(result['Value'][2])\n else:\n return S_ERROR('Error while interrogating job statuses')\n if result['Value'][1]:\n resultDict = self.__parseJobStatus( result['Value'][1] )\n \n if not resultDict:\n return S_ERROR('No job statuses returned')\n\n # If CE does not know about a job, set the status to Unknown\n for job in jobList:\n if not resultDict.has_key( job ):\n resultDict[job] = 'Unknown'\n return S_OK( resultDict )",
"def jobs(self):\n return self.Job.query",
"def job_array_status(self, classname, method, jobs):\n states = []\n for taskid, job in enumerate(jobs):\n state = self._query(job)\n if state in ['TIMEOUT']:\n print msg.TimoutError % (classname, method, job, PAR.TASKTIME)\n sys.exit(-1)\n elif state in ['FAILED', 'NODE_FAIL']:\n print ' task %d failed, retrying' % taskid\n jobs = self.resubmit_failed_job(classname, method, jobs, taskid)\n states += [0]\n\n elif state in ['COMPLETED']:\n states += [1]\n else:\n states += [0]\n\n isdone = all(states)\n\n return isdone, jobs",
"def list_bigjobs(self):\n return [i[\"bigjob\"] for i in self.bigjob_list]",
"def jobs(self):\n with TRN:\n sql = \"\"\"SELECT job_id FROM qiita.analysis_job\n WHERE analysis_id = %s\"\"\".format(self._table)\n TRN.add(sql, [self._id])\n return TRN.execute_fetchflatten()",
"def findFinishedJobs(self):\n jobList = []\n\n jobListAction = self.daoFactory(classname=\"Jobs.GetAllJobs\")\n jobList1 = jobListAction.execute(state=\"success\", limitRows=self.numberOfJobsToArchive)\n jobList2 = jobListAction.execute(state=\"exhausted\", limitRows=self.numberOfJobsToArchive)\n jobList3 = jobListAction.execute(state=\"killed\", limitRows=self.numberOfJobsToArchive)\n\n jobList.extend(jobList1)\n jobList.extend(jobList2)\n jobList.extend(jobList3)\n\n if len(jobList) == 0:\n # Then nothing is ready\n return []\n\n # Put together a list of job IDs\n binds = []\n for jobID in jobList:\n binds.append({\"jobid\": jobID})\n\n results = self.loadAction.execute(jobID=binds)\n\n if not isinstance(results, list):\n results = [results]\n\n doneList = []\n\n for entry in results:\n # One job per entry\n tmpJob = Job(id=entry['id'])\n tmpJob.update(entry)\n doneList.append(tmpJob)\n\n return doneList",
"def overview(self):\n return _execute_rest_request(url=f\"{self.prefix}/overview\")[\"jobs\"]",
"def filter_by_status(job_list, status='AVAILABLE'):\n jobs = [j for j in job_list if j.get_status() == status]\n print(f'Number of {status} jobs: {len(jobs)}.')\n return jobs",
"def get_jobs(self) -> t.Dict[str, Job]:\n with JM_LOCK:\n return self._jobs.completed",
"def __parseJobList__(self, text):\n jobList = []\n et = ET.fromstring(text.encode('utf-8'))\n for xmlJobStatus in et.find(\"jobs\"):\n jobList.append(JobStatus(client=self, xml=xmlJobStatus))\n return jobList",
"def list_jobs(self, process):\n logging.info(\"Listing\")\n r = requests.get(self.jobs.format(self.uri)+'/'+process)\n r.raise_for_status()\n return r.json()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Change the status and finish time of job in the database
|
def update_status(self, id, status, finish_time):
with sqlite3.connect(self.path) as connect:
cursor = connect.cursor()
cursor.execute("""
UPDATE Jobs SET Status=(?), FinishTime=(?)
WHERE ID=(?)
""",
(status, finish_time, id))
|
[
"def update_final_job_logstatus():\n list = [\"deleted\", \"killed\"]\n pcjs = bm.Job.objects.filter(log_extracted='no')\n pcjs = pcjs.filter(status__in=list)\n dt = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n for job in pcjs:\n bm.Job.objects.filter(job_id=job.job_id).update(\n log_extracted='fail')\n bm.Job.objects.filter(job_id=job.job_id).update(\n update_time=dt)",
"def job_set_status(self, job, status, setextra=None):\n now = int(time.time())\n cur = self.conn.cursor()\n if setextra:\n cmd = \"update idx set status='%s', date_status=%d, %s where xtcfn = '%s'\" % (\n status, now, setextra, job.fn)\n else:\n cmd = \"update idx set status = '%s', date_status = %d where xtcfn = '%s'\" % (\n status, now, job.fn)\n print cmd\n cur.execute(cmd)\n self.conn.commit()",
"def update_job_status(jid, newstatus): # update the status of the job by altering the dictionary\n# jid, status, start, end = rd.hmget(generate_job_key(jid), 'id', 'status', 'start', 'end')\n olddict = rd.hgetall(generate_job_key(jid))\n print(olddict) \n job = _instantiate_job(jid, olddict, 'status', newstatus) # returns a job dictionary\n if job:\n _save_job(generate_job_key(jid), job)\n else:\n raise Exception()",
"def update_job_status(jid, new_status):\n jid, status, store_input, start_date, end_date = rd_jobs.hmget(_generate_job_key(jid), 'id', 'status', 'store_input', 'start_date', 'end_date')\n job = _instantiate_job(jid, status, store_input, start_date, end_date)\n \n if(new_status == 'in progress'):\n worker_IP = os.environ.get('WORKER_IP')\n print(worker_IP)\n rd_jobs.hset(_generate_job_key(jid), 'worker', worker_IP)\n\n if job:\n job['status'] = new_status\n _save_job(_generate_job_key(job['id']), job)\n else:\n raise Exception()",
"def clean(self):\n\n self.cursor.execute(\"UPDATE Jobs SET status=? WHERE status=?\", (\"pending\", \"running\"))",
"def update(self):\n self._job = pyslurm.job().find_id(str(self.id))[0]",
"def finish_process(self):\n self.percent_done = 100\n self.end_time = strftime(\"%Y-%m-%d %H:%M:%S\", gmtime())\n self.endtime_num = time.time()",
"def edit_job(self,query_data):\n self.cur.execute(query_data)\n self.conn.commit()",
"def do_job(self):\n session = DBSession()\n daywork = func.sum(func.julianday(Task.end_time) - func.julianday(Task.start_time)) * 86400\n day = func.date(Task.start_time)\n query = session.query(day, daywork) \\\n .group_by(day) \\\n .filter(func.date(Task.start_time) > func.date('now', '-%s day' % self.args.days)) \\\n .filter(Task.end_time != None) \\\n .order_by(Task.start_time)\n\n print()\n\n table = PrettyTable(['Day', 'Work Time', 'Graph'])\n table.align[\"Graph\"] = \"l\"\n total_hours = timedelta(0)\n last_day = None\n for row in query:\n day = datetime.strptime(row[0], '%Y-%m-%d').date()\n if last_day:\n diff = (day - last_day)\n for i in range(diff.days - 1):\n table.add_row([last_day + timedelta(i + 1), 0, ''])\n\n worktime = timedelta(seconds=round(row[1]))\n total_hours += worktime\n table.add_row([day, worktime, '#' * int(round((row[1] * 60 / 86400)))])\n last_day = day\n\n print(table)\n print('Total Work time: %s\\n' % total_hours)",
"def save(self, *args, **kwargs):\n if self.status == self.STATUS_NOT_STARTED:\n self.started = None\n self.completed = None\n self.open = False\n elif self.status == self.STATUS_STARTED:\n self.started = timezone.now()\n self.completed = None\n self.open = True\n elif self.status in [self.STATUS_COMPLETED, self.STATUS_ABANDONED]:\n self.completed = timezone.now()\n self.open = False\n\n super().save(*args, **kwargs)",
"def update_task(conn, task):\n sql = ''' UPDATE tasks\n SET status_id = ? ,\n end_date = ? \n WHERE id = ?'''\n c = conn.cursor()\n c.execute(sql, task)\n conn.commit()",
"def test_api_can_update_a_job(self):\r\n jobs = ReplicationJobs.objects.get(jobStatus='TESTING5')\r\n change_bucketlist = {'jobStatus': 'TESTING6',\r\n 'cronStr': 'required',\r\n 'destField': 'TST', \r\n 'destTableName': 'dummytab', \r\n 'destSchema': 'dummySchema'}\r\n res = self.client.put(\r\n reverse('job_details', kwargs={'jobid': jobs.jobid}),\r\n change_bucketlist, format='json'\r\n )\r\n # print 'update result', res.content\r\n\r\n self.assertEqual(res.status_code, status.HTTP_200_OK)",
"def updateTimeWaited(self, long: int) -> None:\n ...",
"def test_queue_job_timestamps(self):\n\n data_dict = convert_data_to_v6_json(Data()).get_dict()\n job = job_test_utils.create_job(num_exes=1, status='CANCELED', input=data_dict, started=timezone.now(),\n ended=timezone.now())\n\n Job.objects.update_jobs_to_queued([job], timezone.now(), requeue=True)\n job = Job.objects.get(pk=job.id)\n\n self.assertEqual(job.status, 'QUEUED')\n self.assertIsNotNone(job.queued)\n self.assertIsNone(job.started)\n self.assertIsNone(job.ended)",
"def export_setJobStatus( self, jobID, status, minorStatus, source = 'Unknown', datetime = None ):\n jobReport = RPCClient( 'WorkloadManagement/JobStateUpdate' )\n jobStatus = jobReport.setJobStatus( int( jobID ), status, minorStatus, source, datetime )\n return jobStatus",
"def mark_as_running(self):\n self.status = self.STATUS_RUNNING\n self.started_running_datetime = timezone.now()\n self.clean()\n self.save()",
"async def update_status_work(description, status = None):\n try:\n async with aiomysql.connect(host=settings.settings.abb_db_host, port=settings.settings.abb_db_port, db=settings.abb_db, user=settings.settings.abb_db_user, password=settings.settings.abb_db_password) as conn:\n cur = await conn.cursor()\n print(description)\n if status:\n await cur.execute(f\"update work_parser set status_work = '{description}', status = {status}; commit;\")\n else:\n await cur.execute(f\"update work_parser set status_work = '{description}'; commit;\")\n await cur.close()\n except Exception as exc:\n print(exc)",
"def update_task(connection, task):\n sql = ''' UPDATE tasks\n SET priority = ? ,\n begin_date = ? ,\n end_date = ?\n WHERE id = ?'''\n cur = connection.cursor()\n cur.execute(sql, task)\n connection.commit()",
"def form_finish_project_info(self):\n start_time_str = self.running_project[0]['start_time']\n start_time_tuple = time.strptime(start_time_str, \"%Y-%m-%d %H:%M:%S\")\n start_time = time.mktime(start_time_tuple)\n use_time = self.finish_time - start_time\n using_hour = int(use_time / 3600)\n using_min = int((use_time % 3600) / 60)\n using_seconds = int(use_time % 60)\n self.use_time = \"%sH%sM%sS\" % (using_hour, using_min, using_seconds)\n self.finished_project = self.running_project[0]\n self.finished_project[\"start_time\"] = self.start_time_str\n self.finished_project[\"using_time\"] = self.use_time\n self.finished_project[\"complete_status\"] = self.complete_status"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Enumerates quote from a filename or a stream filename filename or stream encoding applicable only if filename empty_name replces an empty author name enumerate on quote A quote is defined a dictionary.
|
def enumerate_quotes(filename, encoding="utf-8", empty_name="Inconnu"):
if isinstance(filename, str):
with open(filename, "r", encoding=encoding) as f:
for q in enumerate_quotes(f):
yield q
else:
re1 = re.compile("chapter[{]([0-9]+)[}]")
re2 = re.compile(
"[\\]begin[{]xcitt?[}][{](.*?)[}][{](.*?)[}][{](.*?)[}][{](.+?)[}]"
)
re3 = re.compile(
"[\\]begin[{]xcita[}][{](.*?)[}][{](.*?)[}][{](.+?)[}][{](.*?)[}][{](.*?)[}][{](.+?)[}]"
)
re4 = re.compile(
"[\\]begin[{]xcitenfant[}][{](.*?)[}][{](.*?)[}][{](.*?)[}][{](.+?)[}]"
)
re5 = re.compile(
"[\\]begin[{]xcitw[}][{](.*?)[}][{](.*?)[}][{](.*?)[}][{](.+?)[}][{](.+?)[}]"
)
re6 = re.compile(
"[\\]begin[{]xcita3[}][{](.*?)[}][{](.*?)[}][{](.+?)[}][{](.*?)[}][{](.+?)[}][{](.*?)[}][{](.*?)[}][{](.+?)[}]"
)
def process_content(il, content):
find = re2.search(content[0])
if find:
author, name, book, index = find.groups()
obs = dict(
author="{0} {1}".format(name, author),
book=book,
index=index,
year=year,
)
else:
find = re3.search(content[0])
if find:
author1, name1, author2, name2, book, index = find.groups()
obs = dict(
author="{0} {1}, {2} {3}".format(
name1, author1, name2, author2
),
book=book,
index=index,
year=year,
)
else:
find = re4.search(content[0])
if find:
author, name, book, index = find.groups()
obs = dict(
author="{0} {1}".format(name, author),
book=book,
index=index,
year=year,
tag="enfant",
)
else:
find = re5.search(content[0])
if find:
author, name, book, index, date = find.groups()
obs = dict(
author="{0} {1}".format(name, author),
book=book,
index=index,
year=year,
date=date,
)
else:
find = re6.search(content[0])
if find:
(
author,
name,
a2,
n2,
a3,
n3,
book,
index,
) = find.groups()
obs = dict(
author="{} {}, {} {}, {} {}".format(
name, author, n2, a2, n3, a3
),
book=book,
index=index,
year=year,
)
else:
raise FormatException( # pragma: no cover
"Unable to interpret line {0}: '{1}'".format(
il, content[0]
)
)
content = "\n".join(content[1:-1])
content = content.replace("~", " ")
content = content.replace("\\quad", "...")
obs["content"] = content
if not obs["author"]:
obs["author"] = empty_name
return obs
year = None
content = []
for il, line in enumerate(filename):
sline = line.strip()
if sline.startswith("\\chapter{"):
chap = re1.search(sline)
if chap:
year = chap.groups()[0]
else:
raise FormatException( # pragma: no cover
"Unable to process line {0}: '{1}'".format(il, sline)
)
else:
if sline.startswith("\\begin{xcit"):
content.append(sline)
elif sline.startswith("\\end{xcit"):
content.append(sline)
yield process_content(il, content)
content.clear()
else:
if content:
content.append(sline)
else:
# between quotes
pass
|
[
"def __read_quotes(self, quote_path):\n with codecs.open(quote_path, mode=\"r\", encoding=\"utf-8\", errors='ignore') as f:\n lines = f.readlines()\n \n for line in lines:\n splitted = line.replace('\\n','').replace('\\r', '').split(': ')\n if (len(splitted) > 1):\n if (splitted[0] not in self.quotes.keys()):\n self.quotes[splitted[0]] = \"\"\n self.quotes[splitted[0]] += ' ' + splitted[1]\n\n for character, sentences in self.quotes.items():\n self.quotes[character] = sent_tokenize(sentences)",
"def __init__(self, file_name, encoding):\n self.reader = codecs.getreader(encoding)(file_name)",
"def get_quote_from_file(self):\n with open('data/mot_quotes.txt', 'r') as read_file:\n data = json.load(read_file)\n self.quote = random.choice(data[\"quotes\"])\n while not self.is_quote_valid():\n self.quote = random.choice(data[\"quotes\"])",
"def exfile_open(filename, *args, **kwargs):\n if filename is None:\n from itertools import repeat\n\n _file = repeat(None)\n else:\n import codecs\n\n _file = codecs.open(filename, *args, **kwargs)\n yield _file\n if filename is not None and _file:\n _file.close()",
"def get_quote(file=\"addons/quotes.csv\"):\n # get length of file\n num_lines = sum(1 for line in open(file))\n # select random row\n index = random.randint(0, num_lines)\n with open(file, 'r', errors='ignore') as f:\n reader = csv.reader(f)\n row = [row for idx, row in enumerate(reader) if idx == index][0]\n return {\"author\": row[0], \"quote\": row[1]}",
"def processQuotes(strSyms, sym, xmlFile, rawlineFile):\n strUrl='http://finance.yahoo.com/d/quotes.csv?f=sd1t1l1bawmc1vj2&e=.csv'\n strUrl = strUrl + strSyms\n try:\n f = urllib.request.urlopen(strUrl)\n\n except:\n # catch the expection if cant read url\n print(\"URL access failed:\\n\" + strUrl)\n return\n\n for line in f.readlines():\n line = line.decode().strip(); # convert byte array to string\n print(line, file = rawlineFile)\n if line == \"\\\"\" + sym + \"\\\"\" + \",N/A,N/A,N/A,N/A,N/A,N/A,N/A,N/A,N/A,N/A\": # if stock symbol is unknown\n print(\"Unknown symbol: match failed\\n\")\n else:\n # print the stock quote data\n print(\"<stockquote>\", file=xmlFile)\n symbol(line, xmlFile)\n date(line, xmlFile)\n time(line, xmlFile)\n lastSalePrice(line, xmlFile)\n bidPrice(line, xmlFile)\n askPrice(line, xmlFile)\n weekLow(line, xmlFile)\n weekHigh(line, xmlFile)\n todayLow(line, xmlFile)\n todayHigh(line, xmlFile)\n netChangePrice(line, xmlFile)\n shareVolumeQty(line, xmlFile)\n totalShares(line, xmlFile)\n print(\"</stockquote>\", file=xmlFile)",
"def parse_quotes(cls, path: str) -> List[QuoteModel]:\n if not cls.can_ingest(path):\n raise Exception('incompatible file type')\n\n quotes = []\n\n with open(path, 'r') as infile:\n doc = infile.read()\n\n for row in doc.split('\\n'):\n body, author = row.split('-')\n quote = QuoteModel(author=author.strip(), body=body.strip())\n quotes.append(quote)\n\n return quotes",
"def get_quote():\n data = loads(open('quotes.json').read())\n selected = choice(data)\n text = selected['text']\n author = selected['author']\n return f'{text[:30]}.. — {author}'",
"def read_ascii_file(str_filename):\n if str_filename.endswith('.csv'):\n return read_csv_file(str_filename)\n elif str_filename.endswith('.json'):\n return read_json_file(str_filename)\n else:\n print(\"Err: Unknown file type: {}\".format(str_filename.rsplit(os.sep, 1)))\n return list()",
"def grabQuotes(quoteList):\n\n with open(\"quotes.csv\", mode='r', newline='') as file1:\n\n csv_reader = csv.reader(file1)\n for row in csv_reader:\n quoteList.append(row)",
"def __init__(self, *filenames, **kwargs):\n encoding = kwargs.get(\"encoding\", ENCODING)\n super(CantoneseCHATReader, self).__init__(*filenames, encoding=encoding)",
"def read_songs(file_name):\n songs = []\n with open(file_name) as f:\n reader = csv.reader(f, delimiter=\",\")\n for row in reader:\n artist_raw = row[1]\n track_raw = row[2]\n date = row[0]\n artist = artist_raw.replace(\"Artist: \", \"\")\n track = track_raw.replace(\"Track: \", \"\")\n song = Song(artist, track, date)\n yield song",
"def initialize_reader():\n eidos_reader.process_text('')",
"def author_title_from_filename(self,filename):\n filename = filename.replace('.mp3','')\n filename = filename.replace('_',' ')\n parts = filename.split(' - ')\n self.author = parts[0]\n self.title = parts[1]",
"def load(self, filename, escape=False):\n raise NotImplementedError()",
"def unicode_csv_dictreader(path, *args, **kwargs):\n # utf-8-sig strips off a BOM if it's present\n stream = codecs.open(path, encoding='utf-8-sig')\n return UnicodeCSVDictReader(stream, *args, **kwargs)",
"def __init__(self, fh):\n\n self.sentences = csv.reader(fh)",
"def get_csv_reader(filename, delimiter, quotechar):\n return csv.reader(open(filename), delimiter=delimiter, quotechar=quotechar)",
"def read(cls, filename):\n for item in cls().parse(filename):\n yield item"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test that it outputs an empty dict if the type of transaction is invalid
|
def test_invalid_type(self):
dataDict = {
'type': 'gibberish'
}
target = __import__('')
THParse = target.TransactionHistoryParse(dataDict)
result = THParse.main()
self.assertEqual(result, {})
|
[
"def test_to_dictionary(self):\n self.assertDictEqual(self.payment.to_dictionary(), {\n \"trader_id\": \"32\" * 20,\n \"transaction_number\": 2,\n \"transferred\": {\n \"amount\": 3,\n \"type\": \"BTC\"\n },\n \"payment_id\": 'aaa',\n \"address_from\": 'a',\n \"address_to\": 'b',\n \"timestamp\": 4000,\n \"success\": True\n })",
"def test_item_not_a_dict(self):\n item_args = [\"this is not a dict\"]\n operation = \"dummy\"\n with pytest.raises(MWSError):\n parse_item_args(item_args, operation)",
"def test_create_transaction(self):\n ta = self.transaction\n\n # Make sure the data fields have been filled\n self.assertEqual(len(ta.key), 32)\n self.assertNotEqual(ta.time_created, None)\n self.assertEqual(ta.firstname, \"Donald\")\n self.assertEqual(ta.lastname, \"Duck\")\n self.assertEqual(ta.company, \"None\")\n self.assertEqual(ta.email, \"donald.duck@duckburg.inv\")\n self.assertEqual(ta.telephone, \"991234567\")\n self.assertEqual(ta.mobile, \"+358991234567\")\n self.assertEqual(ta.street, \"1313 Webfoot Walk\")\n self.assertEqual(ta.postalcode, \"00000\")\n self.assertEqual(ta.city, \"Duckburg\")\n self.assertEqual(ta.country, \"US\")\n self.assertEqual(ta.information, \"Quack, damn you!\")\n self.assertEqual(ta.token, '')\n self.assertEqual(ta.time_pending, None)\n self.assertEqual(ta.time_cancelled, None)\n self.assertEqual(ta.time_paid, None)\n self.assertEqual(ta.payment_method_name, '')\n\n # Test properties\n self.assertEqual(ta.is_cancelled, False)\n self.assertEqual(ta.is_delivered, False)\n self.assertEqual(ta.is_pending, False)\n self.assertEqual(ta.is_paid, False)\n self.assertEqual(ta.full_name, \"Donald Duck\")\n\n # Make sure this doesn't crash\n self.assertEqual(ta.qr_code.startswith(\"http\"), True)\n\n # Check price functions\n self.assertEqual(ta.get_transaction_items().count(), 6)\n self.assertEqual(ta.get_total_price(), 70) # Note discounts\n self.assertEqual(ta.get_storeitem_count(self.items[0]), 1)\n self.assertEqual(ta.get_storeitem_count(self.items[2]), 5)\n self.assertEqual(ta.get_storeitem_count(self.items[1]), 0)\n\n # Make sure transaction items went through\n for item in ta.get_transaction_items():\n self.assertIn(item.item.id, [self.items[0].id, self.items[2].id])\n self.assertNotEqual(item.variant, None)\n self.assertEqual(item.time_delivered, None)\n self.assertEqual(len(item.key), 32)\n self.assertEqual(item.is_delivered, False)\n self.assertEqual(item.qr_code.startswith(\"http\"), True)\n\n # Check amounts (manually)\n self.assertEqual(TransactionItem.objects.filter(transaction=ta, item=self.items[0]).count(), 1)\n self.assertEqual(TransactionItem.objects.filter(transaction=ta, item=self.items[2]).count(), 5)\n\n # Check discount(s)\n discount_items = TransactionItem.objects.filter(transaction=ta, item=self.items[2])\n for item in discount_items:\n self.assertEqual(item.original_price, 20)\n self.assertEqual(item.purchase_price, 10)\n non_discount_item = TransactionItem.objects.get(transaction=ta, item=self.items[0])\n self.assertEqual(non_discount_item.original_price, 20)\n self.assertEqual(non_discount_item.purchase_price, 20)",
"def test_get_daily_data_req_empty(self):\n output = self.main.get_daily_data(self.request_empty)\n self.assertIsInstance(\n json.loads(output)[0],\n dict,\n )",
"def test_invalid_datatype(self):\r\n serializer = self.message_serializer(data=[{\"text\": \"Some test text\"}])\r\n assert not serializer.is_valid()\r\n assert serializer.validated_data == {}\r\n assert serializer.data == {}\r\n assert serializer.errors == {\r\n \"non_field_errors\": [\r\n \"Invalid data. Expected a dictionary, but got list.\"\r\n ]\r\n }",
"def test_erroneous_input(self):\n with self.assertRaisesMessage(\n TypeError, \"Expected `cart` to be Dict but found <class 'str'>\"\n ):\n manager.add_to_cart(\"\", 1)",
"def test_default_required_output_for_dict(self):\n class ExampleSerializer(serializers.Serializer):\n omitted = serializers.CharField(default='abc')\n included = serializers.CharField()\n\n serializer = ExampleSerializer({'included': 'abc'})\n with pytest.raises(KeyError):\n serializer.data",
"def testGetDictTransactionManagerTransaction(self):\n class Request: pass\n self._manager.reset()\n handle = Request()\n handle.transaction_id = self._manager.getNextTID()\n handle.message = b\"testing\"\n self._manager.addTransaction(handle)\n result = self._manager.getTransaction(handle.transaction_id)\n self.assertEqual(handle.message, result.message)",
"def test_deposit_confirm_no_txid(client):\n response = client.get(f\"/deposit/confirm_transaction?amount=0\", follow=True)\n content = json.loads(response.content)\n assert response.status_code == 400\n assert content == {\n \"error\": \"no 'transaction_id' provided\",\n \"status_code\": 400\n }",
"def test_get_bill_with_empty_result(self):\n res = self.client().get('/api/bill', headers={'Content-Type': 'application/json'})\n json_data = json.loads(res.data)\n\n self.assertEqual(0, json_data.get('price_sub_total'))\n self.assertEqual(0, json_data.get('tax_sub_total'))\n self.assertEqual(0, json_data.get('grand_total'))",
"def test_division_manufacturing_time_transactions_get(self):\n pass",
"def test_emptydict_json(self):\n dic = Base.to_json_string([{}])\n self.assertEqual(dic, \"[{}]\")",
"def test_blank_transaction_initialized(blank_tester):\n assert blank_tester.orig_usd is None",
"def test_process_json_dict_no_apply(self):\r\n result = json_processor.process_json_dict(self.test_dict_no_apply)\r\n self.assertEqual(None, result)",
"def test_data_without_match(self):\n actual = self.alert.saved_data\n expected = {}\n self.assertEqual(actual, expected)",
"def test_to_dictionary(self):\n self.assertTrue(type(self.equad.to_dictionary()) is dict)",
"def test_missing_keys(self):\n self.assertEqual(None, tsig_keys.check({}))",
"def test_deposit_confirm_invalid_txid(client):\n incorrect_transaction_id = uuid.uuid4()\n response = client.get(\n f\"/deposit/confirm_transaction?amount=0&transaction_id={incorrect_transaction_id}\",\n follow=True,\n )\n content = json.loads(response.content)\n assert response.status_code == 400\n assert content == {\n \"error\": \"no transaction with matching 'transaction_id' exists\",\n \"status_code\": 400\n }",
"def test_get_receipt_type_invalid(api_client, apple_receipt_client):\n # Assuming some receipt data maps to an invalid receipt...\n receipt_data = \"foobar\"\n apple_receipt_client.enqueue_status(receipt_data, {\"status\": 21010})\n\n # ...then the type check endpoint should return an error response.\n response = api_client.post(URL, {\"receipt_data\": receipt_data})\n\n assert response.status_code == status.HTTP_400_BAD_REQUEST\n assert response.json() == {\n \"receipt_data\": [\"The provided receipt is invalid.\"]\n }"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This decorator dumps out the arguments passed to a function before calling it.
|
def dump_args(func):
argnames = func.func_code.co_varnames[:func.func_code.co_argcount]
fname = func.func_name
def echo_func(*args, **kwargs):
print fname, ":", ', '.join('%s=%r' % entry
for entry in zip(argnames,args) + kwargs.items())
return func(*args, **kwargs)
return echo_func
|
[
"def log_argumrnts(logger):\n def decorator(func):\n @wraps\n def wraped_func(*args, **kwargs):\n args = inspect.getcallargs(func, *args, **kwargs)\n msg = \"call `{}` with arguments: {}\".format(func.__name__, args)\n logger.info(msg)\n return func(*args, **kwargs)\n return wraped_func\n return decorator",
"def print_arg_list(f):\n @functools.wraps(f)\n def wrapper(args):\n print(\"reprozip-tests$ \" +\n \" \".join(a if isinstance(a, unicode)\n else a.decode('utf-8', 'replace')\n for a in args))\n return f(args)\n return wrapper",
"def kw_and_pos_args_from_func(func):",
"def log_call(func):\n @wraps(func)\n def logged(*args, **kawrgs):\n header = \"-\" * len(func.__name__)\n print(green(\"\\n\".join([header, func.__name__, header]), bold=True))\n return func(*args, **kawrgs)\n return logged",
"def expandedcallargs(fn, *args, **kwargs):\n return tuple(flat_from_doc(orderedcallargs(fn, *args, **kwargs)))",
"def log_metadata(func):\n @wraps(func)\n def wrapped(*a, **kw):\n\n # Fetch function metadata.\n current_params = locals()\n func_name = func.__name__\n\n # Order the current_params dictionary\n # Because I like stuff alphabetical. \n current_params = OrderedDict(sorted(current_params.items(), key=lambda t: t[0]))\n\n logging.info(\"\")\n logging.info(\"FUNCTION: {}\".format(func_name.upper()))\n logging.info(\" PARAMETER : VALUE \")\n #for param, value in current_params['kw'].iteritems(): #python 2\n for param, value in current_params['kw'].items():\n logging.info(\" {} : {}\".format(param, value))\n logging.info(\"\")\n\n return func(*a, **kw)\n\n return wrapped",
"def _set_args(func, *args, **kwargs):\n\n def wrapped():\n return func(*args, **kwargs)\n\n wrapped.args = args\n wrapped.kwargs = kwargs\n wrapped.__name__ = func.__name__\n\n return wrapped",
"def argsfunc(*args):",
"def stable_serialize_call_arguments(execution, args):\n serialized_args = {}\n for key, value in sorted(args.iteritems()):\n serialized_args[key] = execution.serialize(value)\n return serialized_args",
"def print_arguments(inputs=[], outputs=[]):\n # NOTE : any functions decorated with this go here first when you set a breakpoint in the debugger.\n # To debug the function itself, step into the line that calls the decorated function (f(*args, **kwargs)).\n\n def outer(f):\n @functools.wraps(f)\n def inner(*args, **kwargs):\n print(\"\\n\")\n all_args = list(args) + list(kwargs.values())\n\n if inputs:\n print(f\"Checking inputs for {f.__name__} ...\")\n for input_ in inputs:\n p = all_args[input_]\n if not os.path.exists(p):\n raise FileNotFoundError(f\"Input {p} does not exist.\")\n print(f\"Input {p} found.\")\n\n for output_ in outputs:\n p = all_args[output_]\n print(f\"Output will exist at {p}.\")\n\n print(f\"Calling {f.__name__}.\")\n function_out = f(*args, **kwargs)\n print(\"\\n\")\n return function_out\n\n return inner\n\n return outer",
"def decorator(func):\n @wraps(func)\n def decorated(args_tuple, *args, **kwargs):\n \"\"\"Return concatenation of omitted items (``args_tuple[:count]``) and\n result of original function called without omitted items\n (``args_tuple[count:]``).\n\n \"\"\"\n return args_tuple[:count] + (\n func(\n *(tuple(args_tuple[count:]) + tuple(args)),\n **kwargs\n ),\n )\n return decorated",
"def argument(*args, **kwargs):\r\n\r\n def decorator(function):\r\n if isinstance(function, Command):\r\n func = function.function\r\n else:\r\n func = function\r\n\r\n if not hasattr(func, 'djboss_arguments'):\r\n func.djboss_arguments = []\r\n func.djboss_arguments.append((args, kwargs))\r\n\r\n return function\r\n return decorator",
"def disassemble(func):\n\n @functools.wraps(func)\n def inner(*args, **kwargs):\n result = func(*args, **kwargs)\n\n print(t.format_function_header(func, args, kwargs))\n dis.dis(func)\n print(t.BLUE_LINES)\n\n return result\n\n return inner",
"def format_forward_call_args(self):\n _format_arg = self.format_forward_call_arg\n\n # Build the list of positional and keyword arguments.\n result = [\n _format_arg(arg_name)\n for arg_name in self.arg_names\n ] + [\n '%s=%s' % (arg_name, _format_arg(arg_name))\n for arg_name in self.kwarg_names\n ]\n\n # Add the variable arguments.\n if self.args_param_name:\n result.append('*%s' % _format_arg(self.args_param_name))\n\n if self.kwargs_param_name:\n result.append('**%s' % _format_arg(self.kwargs_param_name))\n\n return ', '.join(result)",
"def dump_func_name(func):\n def echo_func(*func_args, **func_kwargs):\n logging.debug('### Start func: {}'.format(func.__name__))\n return func(*func_args, **func_kwargs)\n return echo_func",
"def create_arguments(func):\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n return functools.partial(func, *args, **kwargs)\n return wrapper",
"def kwargsfunc(**kwargs):",
"def log_func(function):\n @wraps(function)\n def do(*args, **kwargs):\n logger.debug('[%s]: ', str(function.__name__))\n return function(*args, **kwargs)\n return do",
"def kwargs_decorator(deco):\n return update_wrapper(curry(deco), deco)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return base url from article row.
|
def get_url_base(row):
url = row['url']
domain_ending = re.compile(r'\.[a-z]{2,3}$')
domain_prefix = re.compile(r'^[a-z]+\.')
domains_to_keep = {'wsj', 'cnn', 'cbs', 'nbc', 'bbc', 'de'}
net_loc = urlparse(url).netloc
while net_loc.startswith('www.'):
net_loc = net_loc[4:]
while True:
match = domain_ending.search(net_loc)
if not match or match.group(0) in domains_to_keep:
break
net_loc = domain_ending.sub('', net_loc)
net_loc = domain_prefix.sub('', net_loc)
return net_loc
|
[
"def extract_link(row):\n return row[len(row) - 4]",
"def __generateFlickrURL(self, row):\n return \"http://farm{0}.staticflickr.com/{1}/{2}_{3}_{4}.jpg\".format(\n row[0], row[1], row[2], row[3], self._image_size)",
"def _get_urls(row):\n all = row.findAll('th') + row.findAll('td')\n return [t.find('a')['href'] if t.find('a') else np.nan for t in all]",
"def page_url(self, page_pk): \n self.c.execute(\"SELECT url FROM pages WHERE id=%s\", (page_pk,))\n return self.c.fetchone()[0]",
"def make_entity_base_url(url):\n return url if url.endswith(\"/\") else url + \"/\"",
"def _get_url(self, table, sys_id=None):\n base = \"%s/%s\" % (self.base, \"table\")\n\n url_str = 'https://%(fqdn)s/%(base)s/%(table)s' % (\n {\n 'fqdn': self.fqdn,\n 'base': base,\n 'table': table\n }\n )\n\n if sys_id:\n return \"%s/%s\" % (url_str, sys_id)\n\n return url_str",
"def get_article_url(self, canonical_title: CanonicalTitle) -> str:\n # TODO Handle interwiki links.\n title = quote(canonical_title.link, safe=\"/:~\")\n return f\"{self._base_url}/{title}\"",
"def entry2url(row):\n url = \"https://gallica.bnf.fr/ark:/12148/\"\n\n directory = row['directory']\n page = row['page'] - doc2start[directory]\n url += f\"{row['directory']}/f{row['page'] - doc2start[row['directory']]}\"\n\n r_strings = []\n if 'name' in row and pd.notna(row['name']):\n r_strings.append(quote(row['name'].replace('.', ' ')))\n if 'job' in row and pd.notna(row['job']):\n r_strings.append(quote(row['job'].replace('.', ' ')))\n if 'street' in row and pd.notna(row['street']):\n r_strings.append(quote(row['street'].replace('.', ' ')))\n if 'number' in row and pd.notna(row['number']):\n r_strings.append(quote(row['number'].replace('.', ' ')))\n\n if len(r_strings) > 0:\n url += f\".item.r={'%20'.join(r_strings)}.zoom\"\n\n return url",
"def item_url(self):\n return self.get_url(item=True)",
"def get_url(self, index):\n\n\t\treturn 'https://www.cardmarket.com/en/YuGiOh/Products/Singles/' + self.db.loc[index, 'set_url_name'] + '/' + self.db.loc[index, 'url_name']",
"def _fetch_uri(self) -> str:\n result = self.__conn.find_versions([QueryDesc().id(self.id)], limit=1)\n if result:\n return result[0].uri\n return \"\"",
"def baseURL():",
"def _getURL(self):\n return \"http://%s.%s\" % (self.key, self.baseurl)",
"def get_external_url():",
"def get_uri(self):\n return self.host + '/' + self.get_path().lstrip('/')",
"def get_item_url(self, item):\n config = {}\n context = self.get_context\n if context and self.has_development_collection(context):\n config = copy.copy(self.get_config(context))\n\n url = u'{0}{1}'.format(self.view_url(context), item.id.value)\n if config.get('modify_url', True):\n url = u'{0}___{1}-{2}'.format(\n url,\n item.title.value,\n item.location.value,\n )\n return url",
"def blog_items_url(url, ebi, page_no):\n url = url.rstrip('/')\n return \"{}/action/v_frag-ebi_{}-pg_{}/entry/\".format(url, ebi, page_no)",
"def base_url_from_inventory_url(inventory_url: str) -> str:\n return inventory_url.removesuffix(\"/\").rsplit(\"/\", maxsplit=1)[0] + \"/\"",
"def _resolve_url_base(self, url):\n return url[:url.rfind('/')]",
"def ResolveBase(self):\n baser = self\n baseURI = None\n while baser:\n rebase = baser.GetBase()\n if baseURI:\n baseURI = urlparse.urljoin(rebase, baseURI)\n else:\n baseURI = rebase\n if isinstance(baser, Element):\n baser = baser.parent\n else:\n break\n return baseURI"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Count words in text from article row.
|
def count_words(row):
text = row['text']
return len(text.split())
|
[
"def countClauseWord(self, article):\n num = 0\n wordList = article.split(\" \")\n for word in wordList:\n if word in self.clauseWordsList:\n num += 1\n return num",
"def _countWords(self, level, value):\n text = ' '.join(self.dataframe.xs(value, level=level).text.tolist())\n numWords = len(re.findall('\\w+', text))\n return numWords",
"def count_words(line):\n return len(line.split())",
"def word_count(self):\n\n # Split by non-alphanumerical boundaires\n split_text = re.split('\\W',self.text.lower())\n\n # Count occurences\n counts = {}\n for word in split_text:\n if word:\n if word in counts:\n counts[word] += 1\n else:\n counts[word] = 1\n\n return counts",
"def word_count(db, word, dates):\n counts = []\n for date in dates:\n for _, count in data(db, \"word_ondate\", date, word):\n if count:\n counts.append(count)\n else:\n counts.append(0)\n return counts",
"def __count_words(self) -> None:\n self.n_words = len(self.data.split())",
"def count_words(line):\n r = re.findall(\"[a-zA-Z_]+\", line)\n return len(r)",
"def get_word_counts(docs):\n pass",
"def get_wordcount(page):\n soup = BeautifulSoup(page.text, 'html.parser')\n words = soup.get_text().split()\n return len(words)",
"def cmd_wordscount(msg):\n\n global WORDSCOUNT\n\n return state.done(\"Actual word count is: %d words\" % WORDSCOUNT)",
"def generate_word_count(self, row):\n\tword_counter = Counter(row[1])\n\treturn [ (word, [ (row[0], word_counter[word]) ] ) \\\n for word in word_counter ]",
"def counts_per_word(df,word):\n # create a table which is summing all the counts of a given word with\n # colums being cause of death and rows being the sites.\n piv = pd.pivot_table(df, values=word,\n index=['site'], columns=['gs_text34'],\n aggfunc=sum)\n \n # make a list of the causes\n causes = list(piv.columns)\n \n # create a new column from the index\n piv['site'] = piv.index\n \n # reorganize table by creating columns for the cause of death and the counts.\n counts_table = pd.melt(piv, id_vars=['site'], value_vars=causes,\n var_name='Cause of death',\n value_name='Times '+word+ ' is mentioned.')\n return(counts_table)",
"def word_counts(text):\n # Could use syllable_counts, return length of lists; faster to do manually\n result = []\n counter = 0\n for (word, tag) in tag_text(text):\n if tag in PUNCTUATION_TAGS:\n result.append((counter, word))\n counter = 0\n else:\n counter += 1\n if counter:\n result.append((counter, None))\n return result",
"def words (file_name):\n\n file_contents = read_file (file_name)\n count_words = 0\n for line in file_contents:\n line_split = line.split ()\n count_words += len (line_split)\n return count_words",
"def count(self, word):\n self.nWords += 1",
"def test_count_words():\n resp = count_words(test_data)\n assert type(resp) == str",
"def wordCount(wordListDF):\r\n wordsDF= wordListDF.groupBy('word').count()\r\n return wordsDF",
"def word_count(text):\r\n allwords = text.split() # a list of words\r\n words = set(allwords) # that is a set, therefore we avoid duplicates.\r\n return {word:allwords.count(word) for word in words}",
"def get_num_words_spoken_by_character_per_episode(content):\n \n\n counts = defaultdict(Counter)\n \n\n for line in csv.DictReader(StringIO(content)):\n counts[line['Character']][line['Episode']] += len(line['Line'].split())\n\n return counts",
"def process(self, element):\n text_line = element.strip()\n if not text_line:\n self.empty_line_counter.inc(1)\n words = re.findall(r'[\\w\\']+', text_line, re.UNICODE)\n for w in words:\n self.words_counter.inc()\n self.word_lengths_counter.inc(len(w))\n self.word_lengths_dist.update(len(w))\n return words"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Print all rows in Pandas DataFrame x.
|
def print_full(x):
pd.set_option('display.max_rows', len(x))
print(x)
pd.reset_option('display.max_rows')
|
[
"def print_full(self, dataframe):\n pandas.set_option('display.max_rows', len(x))\n print(dataframe)\n pandas.reset_option('display.max_rows')",
"def print_full_df(df):\n\n pd.set_option('display.max_rows', len(df))\n pd.set_option('display.max_columns', len(df.columns))\n print(df)\n pd.reset_option('display.max_rows')\n pd.reset_option('display.max_columns')",
"def print_all_records(self):",
"def pprint_rows_df(cols: List[str], rows: List[List[Any]]) -> None:\n table = pd.DataFrame(list(generate_rows(cols, rows)))\n print(tabulate(table, headers=table.keys(), tablefmt='fancy_grid'))",
"def display_data_frame(self):\n header_text = ''\n col_keys = []\n for key in self.columns:\n header_text += (key + '\\t')\n col_keys.append(key)\n\n print(header_text)\n for i in range(0, len(self.columns[col_keys[0]].column_data)):\n print_string = ''\n for key in self.columns:\n print_string += (str(self.columns[key].column_data[i]) + '\\t\\t')\n print(print_string)",
"def print_dataframe(self, data: pd.DataFrame, rows: int = 5):\n if (data.shape[1]) > 5:\n colum_start = 0\n colum_end = 5\n ntables = data.shape[1] // 5\n for _ in range(0, ntables):\n data_split = data.iloc[:, colum_start:colum_end]\n self.report_file.write(self.parse_dataframe(data_split, rows))\n colum_start = copy.copy(colum_end)\n colum_end += 5\n colums_left = data.shape[1] - ntables * 5\n colum_end = colum_end + colums_left - 5\n data_split = data.iloc[:, colum_start:colum_end]\n self.report_file.write(self.parse_dataframe(data_split, rows))\n else:\n self.report_file.write(self.parse_dataframe(data, rows))",
"def print_rows(rows):\n # All of the function body is a todo task\n for row in rows:\n print(row)",
"def frame_print ( frame ) :\n ## \n if isinstance ( frame , ROOT.TTree ) : frame = DataFrame ( frame )\n ## \n node = as_rnode ( frame ) \n res = \"DataFrame Enries/#%d\" % len ( frame ) \n ##\n cols = frame_columns ( node ) \n res += \"\\nColumns:\\n%s\" % multicolumn ( cols , indent = 2 , pad = 1 )\n return res",
"def rows(df):\n logging.info(\"Number of rows = {:,}\".format(len(df)))",
"def displayHead(df: pyspark.sql.DataFrame, nrows: int = 5):\n return df.limit(nrows).toPandas()",
"def print_row(row: Row):\n if row is None:\n print(\"MISSING\")\n return\n print(\"|\", end=\"\")\n for cell in row:\n print(cell.value, end=\"|\")\n print()",
"def display(self):\n\n with self.Session.begin() as session:\n inspector = inspect(self.engine)\n schemas = inspector.get_schema_names()\n main = [{table_name: inspector.get_columns(table_name, schema=schema) for table_name in inspector.get_table_names(schema=schema)} for schema in schemas]\n for i in main[0]:\n print(i)\n display(pd.read_sql_table(i, session.bind))\n print(\"\\n\\n\")",
"def display_info(df_name , df):\n \n print(\"Data: {}\".format(df_name))\n print(\"---------------------------------------------\")\n print(\"Columns and Rows: {}\".format(df.shape))\n print(\"Few coloumns of data frame\")\n print(df.head())",
"def display_column_data(self):\n print(self.column_name)\n for data in self.column_data:\n print(str(data))",
"def display_df(df):\r\n\r\n console = Console()\r\n table = Table(\r\n Column(\"source_text\", justify=\"center\"),\r\n Column(\"target_text\", justify=\"center\"),\r\n title=\"Sample Data\",\r\n pad_edge=False,\r\n box=box.ASCII,\r\n )\r\n\r\n for i, row in enumerate(df.values.tolist()):\r\n table.add_row(row[0], str(row[1]))\r\n\r\n console.print(table)",
"def display_df_info(df_name, my_df, v=False):\n\n print(\"Data: {}\".format(df_name))\n print(\"Shape (rows, cols) = {}\".format(my_df.shape))\n print(\"++++++First few rows +++++++++\")\n print(my_df.head())\n\n # Optional: Display other optional information with the (v)erbose flag\n if v:\n print(\"Dataframe Info:\")\n print(my_df.info())",
"def show_df(df):\n if type(df) is pd.Series:\n display(HTML(df.to_frame().to_html()))\n else:\n display(HTML(df.to_html()))",
"def rawData(df):\n\n totalRows = len(df.index)\n for i in range(1, totalRows, 5):\n begin = i\n end = begin + 5\n if end > totalRows:\n totalRows = end\n print('Bikeshare Raw Data: \\nYou are viewing rows {0} to {1} out of {2}.'.format(begin, end, totalRows))\n print(df[begin:end])\n time.sleep(1)\n\n question = 'Would you like to see 5 more rows?'\n proceed = proceedBool(question)\n \n if proceed == False:\n return",
"def display_frame(self):\r\n x_train, x_test, y_train, y_test = self.train_test()\r\n print(self.df)\r\n print(\"x_train: \",x_train.shape)\r\n print(\"x_test: \",x_test.shape)\r\n print(\"y_train: \",y_train.shape)\r\n print(\"y_test: \",y_test.shape)\r\n\r\n #optional function\r"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculate global stats on article db.
|
def global_stats(articles: pd.DataFrame):
print(f'Number of articles: {len(articles):,}')
num_sources = len(pd.value_counts(articles['base_url'], sort=False))
print(f'Number of news sources: {num_sources}')
mean_wc = articles['word_count'].mean()
print(f'Global mean word count: {mean_wc:.1f}')
missing_authors = (articles['authors'] == '').sum()
print(f'Missing authors: {missing_authors:,}')
missing_titles = (articles['title'] == '').sum()
print(f'Missing titles: {missing_titles}')
missing_texts = (articles['text'] == '').sum()
print(f'Missing texts: {missing_texts:,}')
|
[
"def stats():\n db = ingest.load_database()\n ingest.database_stats(db)",
"def calc_statistics(self):\n pass",
"def articles_total():",
"def init_stats(self):\n self.relation_stats = {\n \"sentences\": collections.defaultdict(int),\n \"paragraphs\": collections.defaultdict(int),\n \"entity negatives\": collections.defaultdict(int)\n }",
"def get_stats(self):\n\n self.collect_stats()\n\n #Query the DB\n\n\n return self.stat_fields",
"def fetch_count(self):\n self.counters_db.connect(self.counters_db.COUNTERS_DB)\n self.static_nat_entries = 0\n self.dynamic_nat_entries = 0\n self.static_napt_entries = 0\n self.dynamic_napt_entries = 0\n self.static_twice_nat_entries = 0\n self.dynamic_twice_nat_entries = 0\n self.static_twice_napt_entries = 0\n self.dynamic_twice_napt_entries = 0\n self.snat_entries = 0\n self.dnat_entries = 0\n\n\n exists = self.counters_db.exists(self.counters_db.COUNTERS_DB, 'COUNTERS_GLOBAL_NAT:Values')\n if exists:\n counter_entry = self.counters_db.get_all(self.counters_db.COUNTERS_DB, 'COUNTERS_GLOBAL_NAT:Values')\n if 'STATIC_NAT_ENTRIES' in counter_entry:\n self.static_nat_entries = counter_entry['STATIC_NAT_ENTRIES']\n if 'DYNAMIC_NAT_ENTRIES' in counter_entry:\n self.dynamic_nat_entries = counter_entry['DYNAMIC_NAT_ENTRIES']\n if 'STATIC_NAPT_ENTRIES' in counter_entry:\n self.static_napt_entries = counter_entry['STATIC_NAPT_ENTRIES']\n if 'DYNAMIC_NAPT_ENTRIES' in counter_entry:\n self.dynamic_napt_entries = counter_entry['DYNAMIC_NAPT_ENTRIES']\n if 'STATIC_TWICE_NAT_ENTRIES' in counter_entry:\n self.static_twice_nat_entries = counter_entry['STATIC_TWICE_NAT_ENTRIES']\n if 'DYNAMIC_TWICE_NAT_ENTRIES' in counter_entry:\n self.dynamic_twice_nat_entries = counter_entry['DYNAMIC_TWICE_NAT_ENTRIES']\n if 'STATIC_TWICE_NAPT_ENTRIES' in counter_entry:\n self.static_twice_napt_entries = counter_entry['STATIC_TWICE_NAPT_ENTRIES']\n if 'DYNAMIC_TWICE_NAPT_ENTRIES' in counter_entry:\n self.dynamic_twice_napt_entries = counter_entry['DYNAMIC_TWICE_NAPT_ENTRIES']\n if 'SNAT_ENTRIES' in counter_entry:\n self.snat_entries = counter_entry['SNAT_ENTRIES']\n if 'DNAT_ENTRIES' in counter_entry:\n self.dnat_entries = counter_entry['DNAT_ENTRIES']",
"def calculate_stats(self):\n self._stats_analysis('occupancy', self.occupancy)\n self._stats_analysis('bandwidth', self.bandwidth)",
"def recalculate_statistics(self):\n self.statistics = Statistics()\n for a in self.alleles:\n self.statistics.add_allele(a)",
"def get_stats(self):\n employees = self.object.employees_employed.all()\n total_employees = employees.count()\n\n birthplace_known_count = Employee.objects.birthplace_known(bureau_states=self.object).count()\n\n # Employees with date of birth filled\n employees_with_dob = employees.exclude(date_of_birth='')\n # Age in 1865\n ages = get_ages_in_year(employees_with_dob, 1865)\n\n stats = [\n ('Avg. age in 1865', get_float_format(get_mean(ages), places=1)),\n ('Median age in 1865', get_float_format(get_median(ages), places=0)),\n ('% VRC', get_float_format(self.object.percent_vrc_employees())),\n ('% USCT', get_float_format(\n get_percent(part=Employee.objects.usct(bureau_states=self.object).count(), total=total_employees))\n ),\n ('% Foreign-born', get_float_format(\n get_percent(part=Employee.objects.foreign_born(bureau_states=self.object).count(),\n total=birthplace_known_count))\n ),\n ('% Born there', get_float_format(\n get_percent(part=get_number_employees_born_in_bureau_state(employees, self.object),\n total=birthplace_known_count))\n ),\n ('% Female', get_float_format(\n get_percent(part=employees.filter(gender=Employee.Gender.FEMALE).count(), total=total_employees))\n ),\n ('% Identified as \"colored\"', get_float_format(\n get_percent(part=employees.filter(colored=True).count(), total=total_employees))\n ),\n ('% Died during assignment', get_float_format(\n get_percent(part=employees.filter(died_during_assignment=True).count(), total=total_employees))\n ),\n ('Former slaves', employees.filter(former_slave=True).count()),\n ('% Former slaveholder', get_float_format(\n get_percent(part=employees.filter(slaveholder=True).count(), total=total_employees))\n ),\n ('% Union veterans', get_float_format(\n get_percent(part=employees.filter(union_veteran=True).count(), total=total_employees))\n ),\n ('% Confederate veterans', get_float_format(\n get_percent(part=employees.filter(confederate_veteran=True).count(), total=total_employees))\n ),\n ('Left-hand penmanship contest entrants', employees.filter(penmanship_contest=True).count()),\n ]\n\n # Breakdown per AilmentType\n for ailment_type in AilmentType.objects.all():\n ailment_type_count = employees.filter(ailments__type=ailment_type).count()\n stats.append((f'% with {ailment_type}',\n get_float_format(get_percent(part=ailment_type_count, total=total_employees))))\n\n # Breakdown per Ailment, if more than one for the type\n if ailment_type.ailments.count() > 1:\n for ailment in ailment_type.ailments.all():\n ailment_count = employees.filter(ailments=ailment).count()\n stats.append((f'% with {ailment}',\n get_float_format(get_percent(part=ailment_count, total=total_employees))))\n\n return stats",
"def web_stats():\r\n\twith db.connect() as connection:\r\n\t\tdata = \\\r\n\t\t{\r\n\t\t\t\"count\": db.get_data_record_count(connection)[\"count\"],\r\n\t\t\t\"count_api_keys\": db.get_api_key_count(connection)[\"count\"],\r\n\t\t\t\"count_users\": db.get_user_count(connection)[\"count\"],\r\n\t\t\t\"recent\": db.get_recent_data_records(connection, 100),\r\n\t\t\t\"api_key_use_counts\": db.get_api_key_use_counts(connection)\r\n\t\t}\r\n\t\treturn render_template(\"web_stats.html\", page_title=misc.page_title(\"Count\"), data=data)",
"def AnalyticsQuery(table, full_col_name):\n total = 0\n count = 0.0\n for row in table.fetch_all_rows():\n total += len(Regex.WORD.findall(row[full_col_name]))\n count += 1.0\n print(\"(Analytics) AverageWordCount({0}) = {1}\".format(full_col_name, total / count))\n print(' ')\n sys.stdout.flush()",
"def _calculate_overall_performance(self):\n return sum(self._episodic_performances) / len(self._episodic_performances)",
"def get_stats(cls):\n return cls.word_count, cls.sentence_count, cls.document_count",
"def CalculateStats(self):\n self._summary = {}\n for domain, data in self._data.items():\n data_np = numpy.array(data)\n self._summary[domain] = {\n 'mean': numpy.nanmean(data_np),\n 'min': numpy.nanmin(data_np),\n 'max': numpy.nanmax(data_np),\n 'stddev': numpy.nanstd(data_np),\n 'count': data_np.size,\n }",
"def _get_offense_stats(self, team):\n pass",
"def stats(self, irc, msg, args):\n memos = 0\n appends = 0\n for memo in self.db:\n memos += 1\n appends += len(memo.appends)\n irc.reply(format('There are %n and %n in my memo database.',\n (memos, 'memo'), (appends, 'append')))",
"def _run_analysis(self):\n\n count = {}\n frequencies = {}\n relatives = {}\n\n for document in self.corpus:\n count[document] = Counter()\n frequencies[document] = {}\n relatives[document] = {}\n for gender in self.genders:\n count[document][gender] = document.get_count_of_words(gender.identifiers)\n frequencies[document][gender] = document.get_word_frequencies(gender.identifiers)\n relatives[document] = _get_gender_word_frequencies_relative(count[document])\n\n return count, frequencies, relatives",
"def db_update():\n if Config.only_pixel:\n if db_update.only_pixel_update:\n return\n measure_dict = dissimilarity_measure.measure_dict\n update_shape_dissimilarity(measure_dict)\n #print(measure_dict)\n db_update.only_pixel_update = True\n else:\n if Config.cli_args.online:\n # online\n measure_dict = dissimilarity_measure.measure_dict\n #measure_dict.clear()\n edges, db_update.cog_index = db_update.mongodb.edges_documents(), -1\n if edges:\n db_update.crowd_edge_count = len(edges)\n db_update.crowd_correct_edge = 0\n for e, edge in edges.items():\n first_piece_id = edge['x']\n if edge['tag'] == 'L-R':\n orient = 'LR'\n else:\n orient = 'TD'\n second_piece_id = edge['y']\n db_update.edges_confidence[e] = float(edge['confidence'])\n if Config.measure_weight:\n wp = edge['weight']\n confidence = edge['confidence']\n if confidence > 0:\n wn = wp / confidence - wp + 0.0\n else:\n wn = 0.0\n opposers = edge['opposers']\n for o in opposers:\n wn += opposers[o]\n measure = wn - wp\n else:\n measure = len(edge['opposers']) - len(edge['supporters'])\n key = str(first_piece_id)+orient+str(second_piece_id)\n measure_dict[key] = measure\n if orient == 'LR' and first_piece_id + 1 == second_piece_id and second_piece_id % Config.cli_args.rows != 0:\n db_update.crowd_correct_edge += 1\n if orient == 'TD' and first_piece_id + Config.cli_args.rows == second_piece_id:\n db_update.crowd_correct_edge += 1\n update_shape_dissimilarity(measure_dict)\n else:\n # offline\n measure_dict = dissimilarity_measure.measure_dict\n #measure_dict.clear()\n edges, db_update.cog_index = db_update.mongodb.cog_edges_documents(Config.timestamp, db_update.cog_index)\n if edges:\n db_update.crowd_edge_count = len(edges)\n db_update.crowd_correct_edge = 0\n # print(\"crowd_edge_count: %d\" % crowd_edge_count)\n for e, edge in edges.items():\n first_piece_id, second_piece_id = int(e.split('-')[0][:-1]), int(e.split('-')[1][1:])\n if e.split('-')[0][-1] == 'L':\n orient = 'LR'\n else:\n orient = 'TD'\n key = str(first_piece_id)+orient+str(second_piece_id)\n wp = float(edge['wp'])\n wn = float(edge['wn'])\n oLen = float(edge['oLen'])\n sLen = float(edge['sLen'])\n db_update.edges_confidence[e] = wp/(wn + wp) if (wn + wp) > 0 else 0\n if Config.measure_weight:\n measure = wn - wp\n else:\n measure = oLen - sLen\n measure_dict[key] = measure\n if orient == 'LR' and first_piece_id + 1 == second_piece_id and second_piece_id % Config.cli_args.rows != 0:\n db_update.crowd_correct_edge += 1\n if orient == 'TD' and first_piece_id + Config.cli_args.rows == second_piece_id:\n db_update.crowd_correct_edge += 1\n update_shape_dissimilarity(measure_dict)",
"def as_aggregated_rlstats(cursor):\r\n total = Resource.objects.by_project(project).aggregate(\r\n total=Sum('total_entities'))['total']\r\n \r\n # Create a kwargs var to be passed to AggregatedRLStats init method\r\n kwargs = {'total': total}\r\n \r\n for row in queryset:\r\n # Create a fake language object and associate it to the object key\r\n kwargs.update({\r\n 'object': Language(code=row['language__code'], \r\n name=row['language__name']),\r\n 'last_update': row['last_update'], \r\n 'translated': row['translated']\r\n })\r\n\r\n yield AggregatedRLStats(**kwargs)",
"def get_word_distribution():\n db = DataParser.get_connection()\n cursor = db.cursor()\n config = DataParser.get_config()\n cursor.execute(\"use %s\" % config[\"database\"][\"database_name\"])\n cursor.execute(\"select article_id, word_id from words_articles order by article_id, word_id\")\n article_words = cursor.fetchall()\n article_words = list(map(lambda t: (t[0] - 1, t[1] - 1), article_words))\n cursor.execute(\"select word_id, count(*) as word_count from words_articles group by word_id order by word_id\")\n word_count = cursor.fetchall()\n word_count = list(map(lambda t: (t[0] - 1, t[1]), word_count))\n cursor.execute(\"select article_id, word_id, count(*) as word_count \"\n \"from words_articles group by word_id, article_id order by article_id, word_id\")\n word_article_count = cursor.fetchall()\n word_article_count = list(map(lambda t: (t[0] - 1, t[1] - 1, t[2]), word_article_count))\n return word_count, article_words, word_article_count"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculate aggregate word count statistics on each source's articles.
|
def calculate_word_count_stats(articles: pd.DataFrame):
by_source = articles.groupby(['base_url'])['word_count']
by_source = by_source.agg(['count', 'mean', 'std'])
by_source.sort_values('count', ascending=False, inplace=True)
print_full(by_source)
top_sources = by_source.head(10).index
top_counts = by_source.reset_index()[by_source.index.isin(top_sources)]
sns.barplot(x='base_url', y='count', data=top_counts)
sns.plt.show()
sns.boxplot(x='base_url', y='word_count',
data=articles[articles['base_url'].isin(top_sources)])
sns.plt.show()
|
[
"def calculate_stats(twitter_content, language):\r\n\tnlp = stanza.Pipeline(language)\r\n\t#initialize variables\r\n\ttoken_without_frequencies = Counter()\r\n\ttoken_frequencies = Counter()\r\n\tupos_frequencies = Counter()\r\n\tner_frequencies = Counter()\r\n\tnum_sentences = 0\r\n\tmax_sentence = 0\r\n\tmin_sentence = 1000\r\n\tmax_tweet = 0\r\n\tmin_tweet = 1000\r\n\t#get the right stopwords\r\n\tif language == 'en':\r\n\t\tstop_lan='english'\r\n\telif language == 'nl':\r\n\t\tstop_lan='dutch'\r\n\telse:\r\n\t\traise NotImplementedError\r\n\r\n\tstop_and_punct = stopwords.words(stop_lan)\r\n\tfor i in string.punctuation:\r\n\t\tstop_and_punct.append(i)\r\n\tfor i in range(len(twitter_content['Text'])):\r\n\t\tcurrent_article = twitter_content['Text'][i]\r\n\t\t# Skip empty articles\r\n\t\tif current_article != '':\r\n\t\t\t# Process the article with the stanza pipeline\r\n\t\t\tprocessed_article = nlp(current_article)\r\n\t\t\tsentences = processed_article.sentences\r\n\t\t\ttokens_per_tweet = 0\r\n\r\n\t\t\t# Iterate through all sentences of the article\r\n\t\t\tfor sentence in sentences:\r\n\t\t\t\tnum_sentences += 1\r\n\t\t\t\t#should remove stopwords and punctuation form the string\r\n\t\t\t\tall_tokens_without = [token.text.lower() for token in sentence.tokens if\r\n\t\t\t\t\t\t\t\t\t token.text.lower() not in stop_and_punct]\r\n\t\t\t\tall_tokens = [token.text.lower() for token in sentence.tokens]\r\n\t\t\t\ttokens_per_tweet += len(all_tokens)\r\n\t\t\t\tif len(all_tokens) > max_sentence:\r\n\t\t\t\t\tmax_sentence = len(all_tokens)\r\n\t\t\t\tif len(all_tokens) < min_sentence:\r\n\t\t\t\t\tmin_sentence = len(all_tokens)\r\n\t\t\t\tall_upos = [word.pos for word in sentence.words]\r\n\t\t\t\tall_ner = [token.ner for token in sentence.tokens]\r\n\t\t\t\ttoken_frequencies.update(all_tokens)\r\n\t\t\t\ttoken_without_frequencies.update(all_tokens_without)\r\n\t\t\t\tupos_frequencies.update(all_upos)\r\n\t\t\t\tner_frequencies.update(all_ner)\r\n\t\t\t# Add the tokens to a counter\r\n\t\t\tif tokens_per_tweet > max_tweet:\r\n\t\t\t\tmax_tweet = tokens_per_tweet\r\n\t\t\tif tokens_per_tweet < min_tweet:\r\n\t\t\t\tmin_tweet = tokens_per_tweet\r\n\treturn \ttoken_without_frequencies, token_frequencies, upos_frequencies, ner_frequencies, num_sentences, max_sentence, min_sentence, max_tweet, min_tweet",
"def word_count_by_source(word, method, sources, date1=None, date2=None):\n counts = {SOURCE_BASE[source][\"title\"]: 0 for source in sources}\n for source in sources:\n info = data(source, method, date1=date1, date2=date2, word=word)\n *_, count = chain.from_iterable(info)\n if count:\n counts[SOURCE_BASE[source][\"title\"]] = count\n return counts",
"def get_word_counts(docs):\n pass",
"def articles_total():",
"def number_of_articles():",
"def get_articles_total():\n articles = (p for p in pages if 'published' in p.meta)\n length = sum(1 for _ in articles)\n return length",
"def get_word_distribution():\n db = DataParser.get_connection()\n cursor = db.cursor()\n config = DataParser.get_config()\n cursor.execute(\"use %s\" % config[\"database\"][\"database_name\"])\n cursor.execute(\"select article_id, word_id from words_articles order by article_id, word_id\")\n article_words = cursor.fetchall()\n article_words = list(map(lambda t: (t[0] - 1, t[1] - 1), article_words))\n cursor.execute(\"select word_id, count(*) as word_count from words_articles group by word_id order by word_id\")\n word_count = cursor.fetchall()\n word_count = list(map(lambda t: (t[0] - 1, t[1]), word_count))\n cursor.execute(\"select article_id, word_id, count(*) as word_count \"\n \"from words_articles group by word_id, article_id order by article_id, word_id\")\n word_article_count = cursor.fetchall()\n word_article_count = list(map(lambda t: (t[0] - 1, t[1] - 1, t[2]), word_article_count))\n return word_count, article_words, word_article_count",
"def _calculate_translated_wordcount(self):\r\n wc = 0\r\n translated = SourceEntity.objects.filter(\r\n id__in=Translation.objects.filter(language=self.language,\r\n resource=self.resource, rule=5).values_list(\r\n 'source_entity_id', flat=True))\r\n wordcount = Translation.objects.filter(source_entity__in=translated,\r\n language=self.resource.source_language).aggregate(Sum('wordcount'))['wordcount__sum']\r\n self.translated_wordcount = wordcount or 0",
"def gatherAllWordsFromArticles(listOfArticles, pathToArticles):\n\n wordAmount = 0\n words = set()\n dictOfWords = dict()\n dictOfTermOccurrences = dict()\n\n workingListOfOccurrences = []\n mapOfWords = []\n\n for currentFileName in listOfArticles:\n with open(pathToArticles + currentFileName) as currentFile:\n indexesOfWordsInCurrentFile = []\n for word in currentFile.read().split():\n if word in words:\n indexesOfWordsInCurrentFile.append(dictOfWords[word])\n else:\n dictOfTermOccurrences[word] = 1\n words.add(word)\n dictOfWords[word] = wordAmount\n mapOfWords.append(word)\n indexesOfWordsInCurrentFile.append(wordAmount)\n wordAmount+=1\n\n workingListOfOccurrences.append(indexesOfWordsInCurrentFile)\n\n\n matrix = numpy.zeros((len(words), len(listOfArticles)), float)\n\n for x, obj in enumerate(workingListOfOccurrences):\n for index in obj:\n matrix[index,x]+=1\n for index in set(obj):\n dictOfTermOccurrences[mapOfWords[index]]+=1\n\n\n return words, dictOfWords, matrix, dictOfTermOccurrences, mapOfWords",
"def sentiment_for_source(source):\n articles = article.load_articles()\n articles = [art for art in articles if art.source == source]\n\n sentlist = []\n for art in articles:\n sentlist += [value for value, _ in art.citations_sentiment_list()]\n \n return sum(sentlist) / len(sentlist)",
"def process(self, articles):\n\n # Preprocess documents\n document_token_matrix = {\n article[\"url\"]: self.__generate_tokens(article[\"article_text\"])\n for article in articles\n }\n stemmed_document_token_matrix = {\n article[\"url\"]: self.__generate_stemmed_tokens(\n document_token_matrix[article[\"url\"]]\n )\n for article in articles\n }\n tokenised_documents = [\n \" \".join(tokenised_document)\n for tokenised_document in document_token_matrix.values()\n ]\n\n # Calculate tf-idf rankings for each unique token in the corpus.\n ranking = self.__generate_tfidf_ranking(\n self.__tfidf_vectorizer, tokenised_documents\n )\n\n # Calculate article sentiments\n urls = [article[\"url\"] for article in articles]\n sentiment_scores = [\n self.__generate_sentiment_score(stemmed_document_token_matrix[url])\n for url in urls\n ]\n sentiment_zscores = zscore(sentiment_scores)\n article_sentiment = {}\n\n for idx in range(len(urls)):\n article_sentiment[urls[idx]] = {\n \"sentiment_score\": sentiment_scores[idx],\n \"sentiment_z_score\": sentiment_zscores[idx],\n }\n\n output = [\n {\n \"url\": article[\"url\"],\n \"title\": article[\"title\"],\n \"date\": article[\"date\"],\n \"z_score\": article_sentiment[article[\"url\"]][\"sentiment_z_score\"],\n \"sentiment_score\": article_sentiment[article[\"url\"]][\"sentiment_score\"],\n \"topics\": self.__generate_topic_tags(\n document_token_matrix[article[\"url\"]], ranking\n ),\n }\n for article in articles\n ]\n\n return output",
"def _run_analysis(self):\n\n count = {}\n frequencies = {}\n relatives = {}\n\n for document in self.corpus:\n count[document] = Counter()\n frequencies[document] = {}\n relatives[document] = {}\n for gender in self.genders:\n count[document][gender] = document.get_count_of_words(gender.identifiers)\n frequencies[document][gender] = document.get_word_frequencies(gender.identifiers)\n relatives[document] = _get_gender_word_frequencies_relative(count[document])\n\n return count, frequencies, relatives",
"def summarizeTitlesByCount(titlesAlignments, limit=None):\n return _sortHTML(titlesAlignments, 'readCount', limit)",
"def raw_counts(self, query_term, doc):",
"def add_tokens_count(article):\n article['tokens_count'] = Counter(article['text'])\n return article",
"def analyze_article(articles: List[Article]):\n ents = []\n comments = []\n for article in articles:\n for comment in article.comments:\n comments.append(comment.upper())\n doc = nlp(article.content)\n for ent in doc.ents:\n ents.append({\"text\": ent.text,\n \"label\": ent.label})\n return {\"ents\": ents,\n \"comments\": comments}",
"def main(directory):\n docs = []\n for entry in entries:\n docs.append(Document(entry, path))\n\n processed = []\n\n print('Processing documents...')\n print()\n for document in docs:\n processed.append(document.pre_process())\n \n processed_counts = termCounts(processed)\n \n with open('wordCounts.txt', 'w') as file:\n file.write(json.dumps(processed_counts))\n \n return processed_counts",
"def summarize_text(text):\n return summarize(text, word_count=50)",
"def count_words_across_all_papers():\n\n # Count the number of papers to read and prepare the loading bar\n num_files = len([name for name in os.listdir(PAPER_SOURCE) if name.endswith(\".txt\")])\n loading_section_size = num_files / 30\n count = 0\n\n # The defaultdict to hold the global word count for the number of different papers each word occurs in\n global_word_count = defaultdict(int)\n\n # The defaultdict to hold the total count of all words\n global_total_word_count = defaultdict(int)\n\n for filename in os.listdir(PAPER_SOURCE):\n if filename.endswith(\".txt\"):\n\n # Display the loading bar\n loading_bar(loading_section_size, count, num_files)\n count += 1\n\n # Read the paper\n paper = read_in_paper(filename, sentences_as_lists=True)\n\n # Get the paper's vocab\n all_sections = [section for _, section in paper.iteritems()]\n paper_words = [word for section in all_sections for sentence in section for word in sentence]\n vocab = set(paper_words)\n\n # Add to the counter dict for words that occurred in this paper\n for word in vocab:\n global_word_count[word] += 1\n\n # Add to the total counts dict\n for word in paper_words:\n global_total_word_count[word] += 1\n\n # Write the wordcount\n with open(GLOBAL_WORDCOUNT_WRITE_LOC + \"global_wordcount.pkl\", \"wb\") as f:\n pickle.dump(global_word_count, f)\n\n with open(GLOBAL_WORDCOUNT_WRITE_LOC + \"global_total_wordcount.pkl\", \"wb\") as f:\n pickle.dump(global_total_word_count, f)",
"def _collect_words(self, data, init_words=None):\n logging.info('Building word list...')\n words = init_words if init_words is not None else {}\n for sample in tqdm(data['data']):\n for paragraph in sample['paragraphs']:\n # collect words in context\n for word in paragraph['context']:\n if word.text not in words:\n words[word.text] = 0\n else:\n words[word.text] += 1\n\n # collect words in question\n for qa in paragraph['qas']:\n for word in qa['question']:\n if word.text not in words:\n words[word.text] = 0\n else:\n words[word.text] += 1\n\n return words"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Display statistics on articles.
|
def show_stats():
articles = build_df()
global_stats(articles)
calculate_word_count_stats(articles)
calculate_missing_values(articles)
sns.kdeplot(articles['word_count'], bw=1)
sns.plt.show()
|
[
"def articles_total():",
"def popular_articles():\n\n results = fetch_query(\n \"\"\"select articles.title, count(log.path)\n from articles, log\n where log.path = '/article/' || articles.slug\n and log.status = '200 OK'\n group by articles.title\n order by count(log.path) desc limit 3;\"\"\"\n )\n print('\\n\\n' + \"Most popular three articles of all time:\" + '\\n')\n for item in results:\n print(\"\\\"\" + item[0].title() + \"\\\": \" + str(\"{:,}\".format(item[1])) +\n \" views\")",
"def print_popular_articles():\n articles = 0\n views = 1\n articlesandviews = top3populararticles()\n print \"Top 3 Popular Articles:\"\n print \"Article Title ------ Views\"\n for result in articlesandviews:\n print result[articles] + \" ------ \" + str(result[views])",
"def number_of_articles():",
"async def summary(self):\n return await self.wiki.http.get_summary(self.title)",
"def summary(request, *args, **kwargs):\n return shortcuts.render_to_response(\n 'sye_admin/index.html',\n {'vendor_count': counters.get_count('vendors'),\n 'category_count': counters.get_count('categories'),\n 'orphan_vendors': _count_orphans()})",
"def print_statistics(self):\n pass",
"def count():\n click.echo('articles: {}'.format(Article.query.count()))\n click.echo('events: {}'.format(Event.query.count()))\n click.echo('stories: {}'.format(Story.query.count()))",
"def summary(request):\n\n return render(request, 'activities/summary.html', {})",
"def show_all(soup):\n articles = soup.select('.r-ent')\n for index, article in enumerate(articles):\n print(f'---------- Article-{index} ----------')\n\n if article.find(class_='title').find('a'):\n print('標題:', article.find(class_='title').find('a').string)\n print('鏈結:', article.find(class_='title').find('a').get('href'))\n else:\n # The article has been deleted, so there is no link.\n print('標題:', article.find(class_='title').string.strip())\n\n print('作者:', article.find(class_='author').string)\n\n if article.find(class_='hl'):\n print('推文數:', article.find(class_='hl').string)\n else:\n print('尚無推文')\n\n print('日期:', article.find(class_='date').string)",
"def _statistics_view():\n\n base = requests.get(constants.API_TRONSCAN + '/stats/overview?limit=1').json()\n nodes = requests.get(constants.SERVER_TRON_API + '/node/nodemap?total=1').json()\n detail = base['data'][-1]\n\n text = views.STATS_VIEW.format(\n nodes=str(nodes['total']),\n height=str(detail['totalBlockCount']),\n total_tx=str(detail['totalTransaction']),\n total_accounts=str(detail['totalAddress']),\n new_block_height=str(detail['newBlockSeen']),\n new_accounts=str(detail['newAddressSeen']),\n new_tx=str(detail['newTransactionSeen']),\n time=helpers.date_format(detail['date'])\n )\n return text",
"def display_stats():\n #Queries\n session = Session()\n\n nb_total_entries = session.query(Catalog).count()\n nb_total_already_confirmed = session.query(Catalog).filter(Catalog.already_confirmed == True).count()\n\n session.close()\n\n #Displaying\n logging.info(\" ~~~ \")\n logging.info(\"NUMBER OF ENTRIES : {}\".format(nb_total_entries))\n logging.info(\"NUMBER OF ALREADY CONFIRMED : {}\".format(nb_total_already_confirmed))\n logging.info(\" ~~~ \")",
"def show_popular_article_authors():\n rows = get_popular_article_authors()\n\n print \"Most Popular Article Authors \"\n print \"-----------------------------\"\n\n for name, num in rows:\n print '{0} - {1} views'.format(name, num)\n print \"\\n\"",
"def print_top_articles():\n print('1. What are the most popular three articles of all time?')\n query = \"\"\"Select * from Top_Viewed_Articles limit 3;\"\"\"\n results = execute_query(query)\n for article, count in results:\n print('\"{}\" article viewed count is {}.'.format(article, int(count)))\n print('=' * 10)",
"def news():\n news = newsapi.get_everything(q='Nvidia',language='en',sort_by='relevancy')\n if news['totalResults'] > 0 and news['status'] == 'ok':\n count = 0\n articleList = []\n while count != 3:\n article = news['articles'][count]\n date = article['publishedAt']\n date = datetime.strptime(date, '%Y-%m-%dT%H:%M:%SZ')\n dateAndTime = date.strftime(\"%d %B %Y, %H:%M\")\n article['publishedAt'] = dateAndTime\n articleList.append(article)\n count = count + 1\n return render_template(\"news.html\", articles=articleList)\n else:\n return \"<p>Couldn't find any article</p>\"",
"def popular_authors():\n\n results = fetch_query(\n \"\"\"select author_slug.name, count(log.path)\n from author_slug, log\n where log.path = '/article/' || author_slug.slug\n and log.status = '200 OK'\n group by author_slug.name\n order by count(log.path) desc;\"\"\"\n )\n print('\\n\\n' + \"Authors listed by popularity as defined by \"\n \"total article views:\" + '\\n')\n for item in results:\n print(item[0] + \": \" + str(\"{:,}\".format(item[1])) + \" views\")",
"def entrez_summary(request):\n\tif request.method == 'GET' and 'id' in request.GET:\n\t\tid = request.GET['id']\n\t\tdb = request.GET.get('database', 'nucleotide') #assume nucleotide by default\n\t\t\n\t\t#fetch summary information for the id\n\t\t\n\t\thandle = Entrez.esummary(db=db, id=id)\n\t\trecord = Entrez.read(handle)\n\t\tif record is None or len(record) < 1:\n\t\t\treturn JsonResponse(\"Error: Could not get summary information for id '%s'\" % id, ERROR)\n\t\treturn JsonResponse(record[0]);\t\n\traise Http404",
"def articles():\n the_titles = [[a[0], a[1]] for a in articles]\n return render_template('articles.html', titles = the_titles)",
"def show_top_three_articles():\n rows = get_top_three_articles()\n\n print \"Top Three Articles \"\n print \"---------------------\"\n\n for title, num in rows:\n print '\\'{0}\\' - {1} views'.format(title, num)\n print \"\\n\"",
"def view_statistics(self):\n pipeline = self._get_one_pipeline()\n uri = pipeline.get_artifacts_uri_by_component(\n GDPComponent.DataStatistics.name)[0]\n view_statistics(uri)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
An implementation of col2im based on fancy indexing and np.add.at
|
def col2im_indices(cols, x_shape, field_height=3, field_width=3, padding=1,stride=1):
N, C, H, W = x_shape
H_padded, W_padded = H + 2 * padding, W + 2 * padding
x_padded = np.zeros((N, C, H_padded, W_padded), dtype=cols.dtype)
k, i, j = get_im2col_indices(x_shape, field_height, field_width, padding, stride)
cols_reshaped = cols.reshape(C * field_height * field_width, -1, N)
cols_reshaped = cols_reshaped.transpose(2, 0, 1)
np.add.at(x_padded, (slice(None), k, i, j), cols_reshaped)
if padding == 0:
return x_padded
return x_padded[:, :, padding:-padding, padding:-padding]
|
[
"def col2im_indices(self, cols, x_shape, field_height=3, field_width=3, padding=1,\n stride=1):\n N, C, H, W = x_shape\n\n H_padded, W_padded = H + 2 * padding, W + 2 * padding\n x_padded = np.zeros((N, C, H_padded, W_padded), dtype=cols.dtype)\n k, i, j = self._get_im2col_indices(x_shape, field_height, field_width, padding,\n stride)\n cols_reshaped = cols.reshape(C * field_height * field_width, -1, N)\n cols_reshaped = cols_reshaped.transpose(2, 0, 1)\n # np.add.at(x_padded, (slice(None), k, i, j), cols_reshaped)\n cupyx.scatter_add(x_padded, (slice(None), k, i, j), cols_reshaped)\n if padding == 0:\n return x_padded\n return x_padded[:, :, padding:-padding, padding:-padding]",
"def __im2col_fractal_indices(indices, fmap):\n block_size = config['mac'][1]\n block_size_m = config['mac'][0]\n _, howo, _, kernel_h, kernel_w, _ = fmap.shape\n batch_size, index_i1, index_j1, index_i0, index_j0 = indices\n n_index = batch_size\n\n hw_index = index_i1*block_size_m + index_i0\n\n c1_index = (((index_j1*block_size + index_j0) // block_size) //\n kernel_w.value) // kernel_h.value\n\n kh_index = (((index_j1*block_size + index_j0) // block_size) //\n kernel_w.value) % kernel_h.value\n\n kw_index = ((index_j1*block_size + index_j0) \\\n // block_size) % kernel_w.value\n\n c0_index = (index_j1*block_size + index_j0) % block_size\n if optim_dict[\"c0_optim_flg\"]:\n c1_index = 0\n kh_index = (index_j1*4 + index_j0 // 4) // kernel_w.value\n kw_index = (index_j1*4 + index_j0 // 4) % kernel_w.value\n c0_index = index_j0 % 4\n dtype = compute_dtype\n\n return tvm.select( \\\n tvm.any(hw_index < 0, hw_index > howo.value - 1), \\\n tvm.const(0.0, dtype), \\\n fmap(n_index, hw_index, \\\n c1_index, kh_index, kw_index, c0_index))",
"def conv_backward_im2col(dout, cache):\n x, w, b, conv_param, x_cols = cache\n stride, pad = conv_param['stride'], conv_param['pad']\n\n db = np.sum(dout, axis=(0, 2, 3))\n\n num_filters, _, filter_height, filter_width = w.shape\n dout_reshaped = dout.transpose(1, 2, 3, 0).reshape(num_filters, -1)\n dw = dout_reshaped.dot(x_cols.T).reshape(w.shape)\n\n dx_cols = w.reshape(num_filters, -1).T.dot(dout_reshaped)\n # dx = col2im_indices(dx_cols, x.shape, filter_height, filter_width, pad, stride)\n dx = col2im_cython(dx_cols, x.shape[0], x.shape[1], x.shape[2], x.shape[3],\n filter_height, filter_width, pad, stride)\n\n return dx, dw, db",
"def col(m, n):\r\n return m[:,n]",
"def test_col_to_arr_plus_one_copy():\n sample = np.asarray(\n np.random.normal(size=(20, 1)),\n dtype=np.float,\n order='F'\n )\n mat = carma.col_to_arr_plus_one(sample, True)\n assert np.allclose(mat, sample + 1)",
"def test_col_to_arr_plus_one():\n sample = np.asarray(\n np.random.normal(size=(20, 1)),\n dtype=np.float,\n order='F'\n )\n mat = carma.col_to_arr_plus_one(sample, False)\n assert np.allclose(mat, sample + 1)",
"def vectorize(img):\n r = img[:,:,0].flatten()\n g = img[:,:,1].flatten()\n b = img[:,:,2].flatten()\n vect = np.concatenate((r,g,b))\n return vect",
"def conv_forward_im2col(x, w, b, conv_param):\n N, C, H, W = x.shape\n num_filters, _, filter_height, filter_width = w.shape\n stride, pad = conv_param['stride'], conv_param['pad']\n\n # Check dimensions\n assert (W + 2 * pad - filter_width) % stride == 0, 'width does not work'\n assert (H + 2 * pad - filter_height) % stride == 0, 'height does not work'\n\n # Create output\n out_height = (H + 2 * pad - filter_height) // stride + 1\n out_width = (W + 2 * pad - filter_width) // stride + 1\n out = np.zeros((N, num_filters, out_height, out_width), dtype=x.dtype)\n\n # x_cols = im2col_indices(x, w.shape[2], w.shape[3], pad, stride)\n x_cols = im2col_cython(x, w.shape[2], w.shape[3], pad, stride)\n res = w.reshape((w.shape[0], -1)).dot(x_cols) + b.reshape(-1, 1)\n\n out = res.reshape(w.shape[0], out.shape[2], out.shape[3], x.shape[0])\n out = out.transpose(3, 0, 1, 2)\n\n cache = (x, w, b, conv_param, x_cols)\n return out, cache",
"def convert_image_indexing(img, src, dst):\n src=_default_indexing.get(src,src)\n dst=_default_indexing.get(dst,dst)\n funcargparse.check_parameter_range(src,\"src\",[\"rcb\",\"rct\",\"xyb\",\"xyt\"])\n funcargparse.check_parameter_range(dst,\"dst\",[\"rcb\",\"rct\",\"xyb\",\"xyt\"])\n if src==dst:\n return img\n if src[:2]==dst[:2]: # same order, different row direction\n return img[::-1,:,...] if src[:2]==\"rc\" else img[:,::-1,...]\n if src[2]==dst[2]: # same row direction, different order\n if src[2]==\"t\":\n return img.swapaxes(0,1)\n if src==\"rcb\":\n return (img[::-1,:,...].swapaxes(0,1))[:,::-1,...]\n else:\n return (img[:,::-1,...].swapaxes(0,1))[::-1,:,...]\n # different row direction, different order\n if src==\"rcb\": # dst==\"xyt\"\n return img[::-1,:,...].swapaxes(0,1)\n if src==\"rct\": # dst==\"xyb\"\n return img.swapaxes(0,1)[:,::-1,...]\n if src==\"xyb\": # dst==\"rct\"\n return img[:,::-1,...].swapaxes(0,1)\n if src==\"xyt\": # dst==\"rcb\"\n return img.swapaxes(0,1)[::-1,:,...]",
"def getcol(self, i):\n M, N = self.shape\n i = int(i)\n if i < 0:\n i += N\n if i < 0 or i >= N:\n raise IndexError('index (%d) out of range' % i)\n idx = slice(*self.indptr[i:i+2])\n data = self.data[idx].copy()\n indices = self.indices[idx].copy()\n indptr = np.array([0, len(indices)], dtype=self.indptr.dtype)\n return csc_matrix((data, indices, indptr), shape=(M, 1),\n dtype=self.dtype, copy=False)",
"def im2index(im):\r\n assert len(im.shape) == 3\r\n height, width, ch = im.shape\r\n #height, width, ch = (1216, 1936, 3)\r\n assert ch == 3\r\n m_lable = np.zeros((height, width, 1), dtype=np.uint8)\r\n for w in range(width):\r\n for h in range(height):\r\n #print(im)\r\n b, g, r = im[h, w, :]\r\n m_lable[h, w, :] = color2index[(r, g, b)]\r\n return m_lable",
"def extract_col(pars) :\n data,err,bitmask,cols,models,rad,pix0,back,sigmodels = pars\n spec = np.zeros([len(models),len(cols)])\n sig2 = np.zeros([len(models),len(cols)])\n mask = np.zeros([len(models),len(cols)],dtype=np.uintc)\n ny=data.shape[0]\n ncol=data.shape[1]\n y,x = np.mgrid[0:data.shape[0],0:data.shape[1]]\n pix=np.zeros(data.shape)\n\n for i,model in enumerate(models) :\n\n # center of trace\n ymid=model(cols)+pix0\n\n # calculate distance of each pixel from trace center\n ylo = np.int(np.min(np.floor(ymid-rad)))\n yhi = np.int(np.max(np.ceil(ymid+rad)))\n dist=y[ylo:yhi+1,:]-ymid\n\n # determine contribution of each pixel to boxcar\n contrib = np.zeros(dist.shape,float)\n # full pixel contribution\n iy,ix = np.where( (np.abs(dist)<rad-0.5) )\n contrib[iy,ix] = 1.\n # fractional pixel contribution\n iy,ix = np.where( (np.abs(dist)>rad-0.5) & (np.abs(dist)<rad+0.5) )\n contrib[iy,ix] = 1-(np.abs(dist[iy,ix])-(rad-0.5))\n \n # add the contributions\n spec[i,:] = np.sum( data[ylo:yhi+1,:]*contrib, axis=0)\n sig2[i,:] = np.sum(err[ylo:yhi+1,:]**2*contrib**2, axis=0)\n # for bitmask take bitwise_or of pixels that have full contribution\n mask[i,:] = np.bitwise_or.reduce(\n bitmask[ylo:yhi+1,:]*contrib.astype(int),axis=0) \n\n # background\n background = np.empty_like(data)\n background[:] = np.nan\n background_err = copy.copy(background)\n if len(back) > 0 :\n dist = y - ymid\n\n nback=0\n for bk in back :\n iy,ix = np.where( (dist>bk[0]) & (dist<bk[1]) )\n background[iy,ix] = data[iy,ix]\n background_err[iy,ix] = err[iy,ix]**2\n nback+=np.abs(bk[1]-bk[0])\n\n spec[i,:] -= np.nanmedian(background,axis=0)*2*rad\n sig2[i,:] += np.nansum(background_err,axis=0)/nback*(2*rad)\n\n return spec, np.sqrt(sig2), mask",
"def collocation_points(self) -> np.ndarray:",
"def oneincol(ar, col_id):\n\n tar = ar[:]\n tar[:,col_id] = numpy.ones(len(ar[:,col_id]))\n return tar",
"def matrix_manip(A, B):\n\n output1 = dict()\n output1['A_transpose'] = np.transpose(A)\n output1['A_3rd_col'] = A[:, 2]\n # Select last two rows from last three columns of the matrix A\n (m, n) = np.shape(A)\n col_slice = A[:, [n - 3, n - 2, n - 1]]\n output1['A_slice'] = col_slice[[m - 2, m - 1], :]\n\n # Find all positions in A greater then 3 and increment them by 1. Afterwards add a new column of ones to the matrix (from right)\n\n A2 = np.array(A, copy=True)\n A2[A2 > 3] += 1\n mat_ones = np.ones((m, 1))\n output['A_gr_inc'] = np.append(A2, mat_ones, axis=1)\n\n\n # soucin matic?\n # dot(a, b)[i,j,k,m] = sum(a[i,j,:] * b[k,:,m])\n (k, l) = np.shape(output1['A_gr_inc'])\n output1['C'] = np.dot(output1['A_gr_inc'], np.transpose(output1['A_gr_inc']))\n\n # np.arange(start, stop, step - optional)\n (r, c) = np.shape(output1['A_gr_inc'])\n # [1...c]\n sum_c = (np.arange(1, c + 1))\n # soucin prvku v sloupcich [...]\n sum_A_gr = np.sum(output1['A_gr_inc'], axis=0)\n\n final_sum = np.dot(sum_c, np.transpose(sum_A_gr))\n output1['A_weighted_col_sum'] = float(final_sum)\n\n # Subtract a vector (4,6)t from all columns of matrix B\n vector = np.array([4, 6])\n D = B - np.vstack(vector)\n output1['D'] = D\n\n # Select all column vectors in the matrix D,\n # which have greater euclidean length than the\n # average length of column vectors in D\n\n final = np.linalg.norm(D, axis=0)\n average = np.sum(final) / len(D)\n\n # B = np.delete(B, 2, 0) delete third row of B\n # C = np.delete(C, 1, 1) delete second column of C\n # D(:, find(sqrt(sum(D^ 2)) > average)\n\n for i in range(len(final) - 1, -1, -1):\n if final[i] <= average:\n output1['D_select'] = np.delete(D, i, 1)\n\n print(output1)\n return output1",
"def combined_indices_1(pxarray: np.ndarray) -> float:\n\n return round(excess_greenness_index(pxarray) + cive(pxarray), 2)",
"def flatten_index(self, i, j):\n return i * self.col_dim + j",
"def fast_get_col(self,j):\r\n col = self.col_view[:,j].copy()\r\n col.data = self.X.data[col.data]\r\n return col",
"def c2ixy(date1, date2, x, y):\n rc2i = asmatrix(zeros(shape=(3,3), dtype=float))\n _sofa.iauC2ixy(date1, date2, float(x), float(y), rc2i)\n return rc2i"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Compute kernels for positions. pos = (y, x) gaussian = exp(0.5 (pos/sigma)2).
|
def _compute_pos_kernels(size, sigma):
if size % 2 != 1:
raise ValueError('Kernel size must be odd')
hs = size // 2
row = -np.array(range(-hs, hs + 1), dtype=np.float32)
pos = np.zeros((size, size, 2), dtype=np.float32)
pos[:, :, 1] = np.broadcast_to(row, (size, size))
pos[:, :, 0] = pos[:, :, 1].T
n2 = ((pos / sigma) ** 2).sum(axis=2)
gaussian = np.exp(-0.5 * n2)
return pos, gaussian
|
[
"def multivariate_gaussian(self, pos):\n\n n = self.mu.shape[0]\n Sigma_det = np.linalg.det(self.sigma)\n Sigma_inv = np.linalg.inv(self.sigma)\n N = np.sqrt((2*np.pi)**n * Sigma_det)\n # This einsum call calculates (x-mu)T.Sigma-1.(x-mu) in a vectorized\n # way across all the input variables.\n fac = np.einsum('...k,kl,...l->...', pos-self.mu, Sigma_inv, pos-self.mu)\n return np.exp(-fac / 2) / N",
"def gaussian_kernel(x, y, sigma=1):\n\n if np.ndim(x) == 1 and np.ndim(y) == 1:\n result = np.exp(- (np.linalg.norm(x - y, 2)) ** 2 / (2 * sigma ** 2))\n elif (np.ndim(x) > 1 and np.ndim(y) == 1) or (np.ndim(x) == 1 and np.ndim(y) > 1):\n result = np.exp(- (np.linalg.norm(x - y, 2, axis=1) ** 2) / (2 * sigma ** 2))\n elif np.ndim(x) > 1 and np.ndim(y) > 1:\n result = np.exp(- (np.linalg.norm(x[:, np.newaxis] - y[np.newaxis, :], 2, axis=2) ** 2) / (2 * sigma ** 2))\n return result",
"def gaussianKernel(x1, x2, sigma):\n\n\treturn np.exp( - np.sum( (x1 - x2) ** 2 )/(2 * (sigma ** 2)) );",
"def gauss_kernel(sigma):\n l = int(np.ceil(2 * sigma))\n x = np.linspace(-l, l, 2*l+1)\n\n # FORNOW\n gx = np.zeros_like(x)\n\n \"\"\"\n *******************************************\n *** TODO: compute gaussian kernel at each x\n *******************************************\n \"\"\"\n \n sigma_denom = (sigma * np.sqrt(2*math.pi))\n \n for i in range(0, len(x)):\n exponent = -x[i]**2/(2*sigma**2)\n gx[i] = 1 /sigma_denom * math.exp(exponent)\n\n \"\"\"\n *******************************************\n \"\"\"\n\n gx = np.expand_dims(gx,0)\n return gx\n\n\n \"\"\"\n *******************************************\n *** ADDED: function to compute colvolution vertically for separable gaussian functionality\n *******************************************\n \"\"\"",
"def gaussian_distribution(pos1, pos2, sd):\n diff_y = pos2[0] - pos1[0]\n diff_x = pos2[1] - pos1[1]\n return math.exp(-(diff_x**2 + diff_y**2) / (2 * sd**2))",
"def kernel_gaussian(size, sigma):\n size = max(size, 3)\n if size % 2 == 0:\n size += 1\n k = (size-1) / 2\n G = - np.arange(-k, k+1)**2\n G = (G + G[:, np.newaxis]) / (2. * sigma * sigma)\n np.exp(G, out = G)\n return G",
"def gaussian_kernel(std=1.3):\n def kernel(x, y=None):\n # Discriminate two cases\n # - when only x is given, the Gram matrix will be symmetric. We can use this fact to save computation\n # - when both x and y are given, then do normal calculation\n if y is None:\n d = squareform(pdist(x))\n else:\n d = cdist(x, y)\n K = np.exp(-1/2*np.square(d/std))\n # Possibly, you can zero-out the contributions from non-neighbors\n # D = K.shape[1]\n # neighbors = 5\n # idx = np.argpartition(K,D-neighbors, axis=1)[:,:D-neighbors]\n # for i,id in enumerate(idx):\n # K[i,id] = 0\n return K\n return kernel",
"def createGaborKernels (inclinations=[0,30,60,90,120,150],kernel_size=9,pos_var=16,pos_w=10, pos_psi=90):\n\n\tif kernel_size%2==0:\n\t\tkernel_size+=1\n\n\tresultKernels = []\n\n\tfor pos_phase in inclinations:\n\t\tvar = pos_var/10.0\n\t\tw = pos_w/10.0\n\t\tphase = pos_phase*CV_PI/180.0\n\t\tpsi = CV_PI*pos_psi/180.0\n \n\t\tkernel = cvCreateMat(kernel_size,kernel_size,CV_32FC1)\n\t\tcvZero(kernel)\n\n\t\tfor x in range(-kernel_size/2+1,kernel_size/2+1):\n \t\tfor y in range(-kernel_size/2+1,kernel_size/2+1):\n\t\t\t\tkernel_val = exp( -((x*x)+(y*y))/(2*var))*cos( w*x*cos(phase)+w*y*sin(phase)+psi)\n\t\t\t\tcvSet2D(kernel,y+kernel_size/2,x+kernel_size/2,cvScalar(kernel_val))\n\n\t\tresultKernels.append(kernel)\n\n\treturn resultKernels",
"def marginalize_gaussian_process(gp, variable, center=True):\n kernel_types = [RBF, Matern]\n kernel = extract_covariance_kernel(gp.kernel_, kernel_types)\n\n constant_kernel = extract_covariance_kernel(gp.kernel_, [ConstantKernel])\n if constant_kernel is not None:\n kernel_var = constant_kernel.constant_value\n else:\n kernel_var = 1\n\n # Warning extract_gaussian_process scales kernel_var by gp.y_train_std**2\n x_train, y_train, K_inv, kernel_length_scale, kernel_var, \\\n transform_quad_rules = \\\n extract_gaussian_process_attributes_for_integration(gp)\n\n # x_train = gp.X_train_.T\n # kernel_length_scale = kernel.length_scale\n # transform_quad_rules = (not hasattr(gp, 'var_trans'))\n L_factor = gp.L_.copy()\n\n tau_list, P_list, u_list, lamda_list, Pi_list, nu_list, __ = \\\n get_gaussian_process_squared_exponential_kernel_1d_integrals(\n x_train, kernel_length_scale, variable, transform_quad_rules,\n skip_xi_1=True)\n\n if center is True:\n A_inv = K_inv*kernel_var\n tau = np.prod(np.array(tau_list), axis=0)\n A_inv_y = A_inv.dot(y_train)\n shift = tau.dot(A_inv_y)\n shift += gp._y_train_mean\n else:\n shift = 0\n\n kernel_var /= float(gp._y_train_std**2)\n\n length_scale = np.atleast_1d(kernel_length_scale)\n nvars = variable.num_vars()\n marginalized_gps = []\n for ii in range(nvars):\n tau = np.prod(np.array(tau_list)[:ii], axis=0)*np.prod(\n np.array(tau_list)[ii+1:], axis=0)\n u = np.prod(u_list[:ii])*np.prod(u_list[ii+1:])\n assert np.isscalar(kernel_var)\n kernel = kernel_var*UnivariateMarginalizedSquaredExponentialKernel(\n tau, u, length_scale[ii], gp.X_train_[:, ii:ii+1])\n # undo kernel_var *= gp._y_train_std**2 in extact_gaussian_process_attr\n gp_ii = UnivariateMarginalizedGaussianProcess(\n kernel, gp.X_train_[:, ii:ii+1].T, L_factor, gp.y_train_,\n gp._y_train_mean, gp._y_train_std, mean=shift)\n if hasattr(gp, 'var_trans'):\n variable_ii = IndependentMarginalsVariable(\n [gp.var_trans.variable.marginals()[ii]])\n var_trans_ii = AffineTransform(variable_ii)\n gp_ii.set_variable_transformation(var_trans_ii)\n marginalized_gps.append(gp_ii)\n return marginalized_gps",
"def gaussian_kernel(l=5, sig=1.):\n ax = np.arange(-l // 2 + 1., l // 2 + 1.)\n xx, yy = np.meshgrid(ax, ax)\n kernel = np.exp(-(xx**2 + yy**2) / (2. * sig**2))\n return kernel / np.sum(kernel)",
"def gaussian_kernel(knots, sigma, basis_len):\n\n n_knots = len(knots)\n X = np.arange(basis_len) / basis_len\n\n X_base = - np.square(X[:, None] - knots) / (2 * sigma)\n X_base = np.exp(X_base)\n\n return X_base",
"def gaussian2(self, shape):\n sx = shape[0]\n sy = shape[1]\n\n x0 = -sx / 2\n x1 = x0 + sx\n y0 = -sy / 2\n y1 = y0 + sy\n\n X, Y = np.mgrid[x0:x1, y0:y1]\n G = np.exp( -(X**2/np.float(sx) + Y**2/np.float(sy)) )\n #G /= G.sum()\n G /= np.max(G)\n\n return G",
"def gaussian_pts(xpts, p):\n xpts = np.array(xpts)\n f = gaussian_func(p)\n return f(xpts)",
"def generatingKernel():\n\n # https://www.quora.com/What-is-the-difference-between-edge-detection-Sobel-detection-and-Canny-detection\n # https://en.wikipedia.org/wiki/Sobel_operator\n # Sobel operator x \n kernel_x = np.array([\n [-1,0,1],\n [-2,0,2],\n [-1,0,1]\n ])\n\n # Sobel operator y\n kernel_y = np.array([\n [1,2,1],\n [0,0,0],\n [-1,-2,-1]\n ])\n\n # return sobel x and sobel y\n return kernel_x, kernel_y",
"def test_get_gaussian_kernel_2D_pytorch_peak():\n ksize = 29\n sigma = 7\n kernel = get_gaussian_kernel_2D_pytorch(ksize, sigma)\n\n assert isinstance(kernel, torch.Tensor)\n kernel = kernel.numpy()[0,0,:,:]\n\n # generated Gaussian kernel should have odd dimensions\n assert kernel.shape[0] % 1 == 0\n assert kernel.shape[1] % 1 == 0\n assert kernel.ndim == 2\n\n center_row = kernel.shape[0] // 2\n center_col = kernel.shape[1] // 2\n\n coords = np.where(kernel == kernel.max())\n coords = np.array(coords).T\n\n # should be only 1 peak\n assert coords.shape == (1, 2), \"Peak is not unique\"\n assert coords[0, 0] == center_row, \"Peak is not at center row\"\n assert coords[0, 1] == center_col, \"Peak is not at center column\"",
"def gaussian(x, center, fwhm):\n # FWHM = 2*sqrt(2 ln2) sigma = 2.3548 sigma\n sigma = fwhm / 2.3548;\n return(np.exp(-0.5 * ((x - center) / sigma)**2) / sigma / np.sqrt(2.0 * np.pi))",
"def twoD_Gaussian(xy, amplitude, xo, yo, sigma_x, sigma_y, theta, f, g, h, k, l, m):\n \n x, y = xy\n xo = float(xo)\n yo = float(yo) \n a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2)\n b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2)\n c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2)\n tot = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) \n + c*((y-yo)**2)))\n tot = tot + background(xy, f, g, h, k, l, m)# f + g*x + h*y + k*x*y + l*x**2 + m*y**2\n return(tot.ravel())",
"def gaussian_kernel(\n kernel_size: int,\n sigma: float = 1.,\n n: int = 2,\n) -> torch.Tensor:\n\n shape = (kernel_size,) * n\n\n kernel = unravel_index(\n torch.arange(kernel_size ** n),\n shape,\n ).float()\n\n kernel -= (kernel_size - 1) / 2\n kernel = (kernel ** 2).sum(1) / (2. * sigma ** 2)\n kernel = torch.exp(-kernel)\n kernel /= kernel.sum()\n\n return kernel.reshape(shape)",
"def gaussian(mu, sigma, x):\n return np.exp(- ((mu - x) ** 2) / (sigma ** 2) / 2.0) / np.sqrt(2.0 * np.pi * (sigma ** 2))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
takes a word as a string returns True if the word does not contain the letter 'e'
|
def has_no_e(word):
for letter in word:
if letter == 'e':
return False
return True
|
[
"def has_no_e(word):\n for c in word:\n if c == 'e':\n return False\n return True",
"def not_letter(character: str) -> bool:\n return character not in LETTERS",
"def exclude(letter):\n return letter in string.punctuation or letter in string.whitespace",
"def avoids(word, forbidden):\n # Feels like there should be a more efficient way to do this using\n # set intersection, but I'll just check the word character by character\n for letter in forbidden:\n if word.find(letter)!=-1:\n return False\n return True",
"def check_for_vowel_word(word: str):\n return word[0] in \"aeiou\" or word[0] in \"AEIOU\"",
"def without_alpha(s):\n return contain_alpha_regexp.search(s) is None",
"def single_letter(word):\n\tif len(word)==1 and word!='a' and word!='I':\n\t\treturn True\n\treturn False",
"def _is_exception_word(self, word):\n return word in self._exception_words_list",
"def has_bad_word(s):\n bad = ('ab', 'cd', 'pq', 'xy')\n return any(b in s for b in bad)",
"def ukrainian(word):\n for letter in word:\n if re.search('[\\u0400-\\u04FF]', letter):\n continue\n return False\n return True",
"def valid_word( word ):\n return re.match( '[a-z]+', word.lower() ) and len( word ) > 2",
"def contains_only_vowels(input_str):\n return all(c in VOWELS for c in list(input_str.lower()))",
"def not_contain_strings(string):\n return ('ab' not in string) and ('cd' not in string) and ('pq' not in string) and ('xy' not in string)",
"def is_trash(word):\n trash = ['href=', '@<a', '<br', '@a', 'br', '͡°', ' ͜ʖ', '͡º', ' ͜ʖ͡º', ' ͜ʖ']\n for item in trash:\n if item in word:\n return True\n return False",
"def win(word):\n return '_' not in word",
"def letter_check(self, letter, word):\n\n for i in range(0,len(self.word)):\n letter = self.word[i]\n if self.guess == letter:\n self.reveal[i] = self.guess\n if '_' not in self.reveal:\n return True\n else:\n return False",
"def is_word(word):\n return (True in (wordtype(word)))",
"def is_negative_letter(self,letter):\n return letter in self._negative",
"def all_knowns_in_potential(word, potential_word) -> bool:\n for k in set(word):\n if k in ascii_letters:\n if k not in potential_word:\n return False\n return True",
"def armVowel(c):\n return bool(re.search(\"[ԱԵԸԻՈՒՕ]\", c.upper()))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
execute program to create the graph
|
def create_graph(self):
self.my_graph = eval_or_exec(self.program)
self.parse_graph()
|
[
"def main():\n\n # Storing the name of the files\n network_file = sys.argv[1]\n tests_file = sys.argv[2]\n results_file = sys.argv[3]\n\n # Initiate\n digraph = Digraph()\n stations = StationCatalog()\n\n # Load stations\n stations.load(network_file)\n\n # Create Nodes\n for station in stations.catalog:\n digraph.addNode(Node(station))\n\n # Create Edges\n for src in digraph.nodes:\n for id in src.station.conns:\n dest = digraph.getNodeByName(stations.getStation(id).name)\n digraph.addEdge(Edge(src, dest))\n # Guarantees simetrie between connections\n digraph.addEdge(Edge(dest, src))\n\n # Tests\n testSet = open(tests_file, 'r')\n results = open(results_file, 'w')\n for line in testSet:\n cityA = line.strip().split(' ')[0]\n cityB = line.strip().split(' ')[1]\n graph = deepcopy(digraph)\n results.write(f'{search(graph, cityA, cityB)}\\n')\n\n testSet.close()\n results.close()",
"def generate_graphs(project):\n common.run_dljc(project,\n ['graphtool'],\n ['--graph-jar', common.get_jar('prog2dfg.jar'),\n '--cache'])",
"def run(self):\n self._workspace_ref().run_graph(self._def.name)",
"def _main(argv):\n flag = flags.FLAGS\n flag.showprefixforinfo = False\n if len(argv) < 3:\n raise app.UsageError(\n 'Invalid number of arguments; expected 2 or more, got %d' %\n (len(argv) - 1))\n\n build_graph(argv[1:-1], argv[-1], flag.similarity_threshold,\n flag.id_feature_name, flag.embedding_feature_name)",
"def main():\n edgelist = sys.argv[1]\n prot_nodes = sys.argv[2]\n\n # Two dicts with appropriate relabeling and recoloring schema\n relabel, recolor = labels_colors(prot_nodes)\n\n # Create plot\n plt.figure(figsize=(8, 8), dpi=150)\n plt.title(\"Proteins with Significant Tanimoto Summaries\")\n\n # Pass edge list into networkx object\n edge_list = open(edgelist, \"rb\")\n G = nx.read_edgelist(edge_list)\n edge_list.close()\n # Relabel nodes to uniprot ID using dict created above.\n H = nx.relabel_nodes(G, relabel)\n # Use color dict to create list of color which mirrors the order of nodes.\n color_list = []\n for node in H.nodes():\n color_list.append(recolor[node])\n # Draw network with appropriate colors\n nx.draw(H, node_color=color_list, with_labels=1)\n plt.savefig(\"output/network.png\", format=\"PNG\")\n plt.show()",
"def main() -> None:\n operating_system = util.get_os()\n parser = _create_parser(operating_system)\n args = parser.parse_args(sys.argv[1:])\n\n with _convert_error_to_log(traceback=args.traceback):\n _validate_args(args)\n\n pdf_name = \"graph.pdf\"\n dot_name = \"graph.dot\"\n git_root = args.git_directory\n with tempfile.TemporaryDirectory() as tmpdir:\n dot_file = Path(str(tmpdir)) / dot_name\n pdf_file = Path(str(tmpdir)) / pdf_name\n\n if args.snapshot:\n _render(dot_file, args.snapshot, git_root, args.hide_content)\n print(f\"Output saved to '{args.snapshot}'\")\n else:\n _mainloop(\n git_root,\n dot_file,\n pdf_file,\n args.pdf_viewer,\n operating_system,\n args.hide_content,\n )",
"def make_program(pdef):\n\tNw=pdef.get('Nw',1000)\n\tmethod=pdef.get('workspace_graph','staggered_grid')\n\tfilename = pdef['filename']\n\tfolder = pdef['output']\n\n\tworld = WorldModel()\n\tworld.readFile(filename)\n\trobot = world.robot(0)\n\tprogram = GLRedundancyProgram(world,robot)\n\tprogram.resolution.readJsonSetup(pdef)\n\tprogram.folder = folder\n\n\tif not pdef.get('clean',False):\n\t\ttry:\n\t\t\tprint \"Loading from\",folder,\"...\"\n\t\t\tprogram.resolution.load(folder)\n\t\t\t#TEMP: DO PADDING FOR BAXTER\n\t\t\tfor iq,d in program.resolution.Gw.nodes_iter(data=True):\n\t\t\t\tif 'config' in d:\n\t\t\t\t\tif len(d['config']) < robot.numLinks():\n\t\t\t\t\t\td['config'] = d['config'] + [0.0]*(robot.numLinks()-len(d['config']))\n\t\texcept IOError as e:\n\t\t\tprint \"Did not successfully load from folder %s, generating from scratch\"%(folder,)\n\t\t\tprogram.resolution.sampleWorkspace(Nw,method=method)\n\telse:\n\t\tprogram.resolution.sampleWorkspace(Nw,method=method)\n\n\t#TEMP: make workspace grid following exact\n\t#if problem.startswith('planar'):\n\t#\tfor n,data in program.rr.Gw.nodes_iter(data=True):\n\t#\t\tdata['params'][1] = 0\n\n\tprogram.settings = pdef\n\treturn program",
"def main():\n # set up the program to take in arguments from the command line",
"def create_graph(self, graph_name):",
"def draw_from_program_file(\n self, model_filename, is_text, output_dir, output_filename\n ):\n program = self.load_program(model_filename, is_text)\n utils.graphviz(program.global_block(), output_dir, output_filename)",
"def graph_extension_cli(self):\n self.show_step(1)\n self.env.revert_snapshot(\"ready_with_3_slaves\")\n self.cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__)\n self.show_step(2)\n self.fuel_web.update_nodes(\n self.cluster_id,\n {\n 'slave-01': ['controller'],\n 'slave-02': ['compute', 'cinder']\n })\n self.show_step(3)\n self.fuel_web.provisioning_cluster_wait(self.cluster_id)\n self.show_step(4)\n with self.ssh_manager.open_on_remote(\n self.admin_ip, '/root/graph.yaml', \"w\") as f:\n f.write(TEST_GRAPH)\n cmd = \\\n 'fuel2 graph upload -e {} -t my_graph -f /root/graph.yaml'.format(\n self.cluster_id)\n self.ssh_manager.check_call(self.admin_ip, cmd)\n self.show_step(5)\n self.env.make_snapshot(\"extension_graph_prepare_env\", is_make=True)\n self.env.resume_environment()\n self.env.sync_time()\n self.show_step(6)\n self.deploy_custom_graph_wait_cli('my_graph')\n self.show_step(7)\n self.fuel_web.assert_all_tasks_completed(self.cluster_id)\n self.show_step(8)\n self.cluster_id = self.fuel_web.get_last_created_cluster()\n self.check_created_by_tasks_file()",
"def launch(self):\n out_log, err_log = fu.get_logs(path=self.path, mutation=self.mutation, step=self.step)\n self.output_plotscript_path = fu.add_step_mutation_path_to_name(self.output_plotscript_path, self.step, self.mutation)\n # Create the input script for gnuplot\n xvg_file_list = []\n with open(self.output_plotscript_path, 'w') as ps:\n ps.write('set term '+self.term+'\\n')\n ps.write('set output \"' + self.output_png_path + '\"'+'\\n')\n ps.write('plot')\n for k, v in self.input_xvg_path_dict.iteritems():\n if isinstance(v, basestring) and os.path.isfile(v):\n ps.write(' \"' + v + '\" u 1:3 w lp t \"' + k + '\",')\n else:\n xvg_file = fu.add_step_mutation_path_to_name(k + '.xvg', self.step, self.mutation)\n np.savetxt(xvg_file, v, fmt='%4.7f')\n out_log.info('Creating file: '+os.path.abspath(xvg_file))\n xvg_file_list.append(os.path.abspath(xvg_file))\n ps.write(' \"' + xvg_file + '\" u 0:2 w lp t \"' + k + '\", ')\n\n\n gplot = 'gnuplot' if self.gnuplot_path is None else self.gnuplot_path\n cmd = [gplot, self.output_plotscript_path]\n\n command = cmd_wrapper.CmdWrapper(cmd, out_log, err_log)\n returncode = command.launch()\n return returncode",
"def main():\r\n # Parse arguments and options\r\n parser = argparse.ArgumentParser(description='Test AOA graph generation algorithms with given files')\r\n parser.add_argument('infiles', nargs='*',\r\n help='Project files to test')\r\n parser.add_argument('--table-file', '-t', default='resultados.csv',\r\n help='Name of file to append test results in CSV format (default: resultados.csv)')\r\n parser.add_argument('-r', '--repeat', default=1, type=int,\r\n help='Number of repetitions (default: 1)')\r\n parser.add_argument('--SVG', action='store_true',\r\n help='Draw the graph in a SVG file')\r\n parser.add_argument('--no-stop', action='store_true',\r\n help='Do not stop when an algorithm fails')\r\n\r\n parser.add_argument('-c', '--CohenSadeh', action='store_true',\r\n help='Test Cohen Sadeh algorithm')\r\n parser.add_argument('-s', '--Sharma', action='store_true',\r\n help='Test Sharma algorithm')\r\n parser.add_argument('-l', '--Salas', action='store_true',\r\n help='Test Lorenzo Salas algorithm')\r\n parser.add_argument('-g', '--GentoMunicio', action='store_true',\r\n help='Test Gento Municio algorithm')\r\n parser.add_argument('-o', '--Optimal', action='store_true',\r\n help='Test set based optimal algorithm')\r\n parser.add_argument('-m', '--Mouhoub', action='store_true',\r\n help='Test Mouhoub algorithm')\r\n parser.add_argument('-p', '--Syslo_Polynomial', action='store_true',\r\n help='Test Syslo Polynomial algorithm')\r\n parser.add_argument('-y', '--Syslo_Optimal', action='store_true',\r\n help='Test Syslo Optimal algorithm')\r\n args = parser.parse_args()\r\n\r\n if args.repeat < 1:\r\n print 'Number of repetitions must be > 0'\r\n return 1\r\n\r\n try:\r\n f_csv = open(args.table_file, \"a\")\r\n except IOError:\r\n print 'Can not open table file (%s) to append results in CSV format' % (args.table_file, )\r\n return 1 \r\n\r\n # List of name and function of each algorithm to test\r\n algorithms = [] \r\n if args.CohenSadeh:\r\n algorithms.append( ('CohenSadeh', algoritmoCohenSadeh.cohen_sadeh) )\r\n if args.Sharma: \r\n algorithms.append( ('Sharma', algoritmoSharma.sharma1998ext) )\r\n if args.Optimal:\r\n algorithms.append( ('Conjuntos', algoritmoConjuntos.algoritmoN) )\r\n if args.GentoMunicio:\r\n algorithms.append( ('GentoMunicio', algoritmoGentoMunicio.gento_municio) )\r\n if args.Salas:\r\n algorithms.append( ('Salas', algoritmoSalas.salas) )\r\n if args.Mouhoub:\r\n algorithms.append( ('Mouhoub', algoritmoMouhoub.mouhoub) )\r\n if args.Syslo_Polynomial:\r\n algorithms.append( ('Syslo Polinomico', algoritmoSysloPolynomial.sysloPolynomial) )\r\n if args.Syslo_Optimal:\r\n algorithms.append( ('Syslo Optimo', algoritmoSysloOptimal.sysloOptimal) )\r\n # Perform tests on each file \r\n for filename in args.infiles:\r\n print \"\\nFilename: \", filename \r\n data = openProject(filename)\r\n if not data:\r\n print 'Can not read or understand file'\r\n else:\r\n # XXX Aqui habria que cortar si falla el checkeo del fichero\r\n check_activities(data)\r\n\r\n # Test each algorithm\r\n for name, alg in algorithms:\r\n print name\r\n\r\n # Get successors from activities table\r\n successors = {}\r\n for i in data:\r\n successors[i[1]] = i[2]\r\n\r\n \r\n # Count prelations\r\n list_of_predecessors = successors.values()\r\n num_of_predecessors = 0\r\n for predecessors in list_of_predecessors:\r\n num_of_predecessors += len(predecessors)\r\n \r\n # Get predecessors from successors\r\n prelaciones = graph.reversed_prelation_table(successors)\r\n\r\n # Run algorithm\r\n pert_graph = None\r\n itime = os.times()\r\n for i in range(args.repeat):\r\n try:\r\n pert_graph = alg(prelaciones)\r\n except Exception:\r\n print traceback.format_exc()\r\n print \" --- Algorithm failed! --- \"\r\n if not args.no_stop:\r\n return 1\r\n break\r\n\r\n if pert_graph:\r\n ftime = os.times()\r\n utime = ftime[0] - itime[0]\r\n \r\n # Print test results\r\n print \"utime %.4f\"% (utime)\r\n print \"utime: \", utime\r\n print \"numero de nodos: \", pert_graph.number_of_nodes()\r\n print \"numero de arcos: \", pert_graph.number_of_arcs()\r\n print \"numero de arcos reales: \", pert_graph.numArcsReales()\r\n print \"numero de arcos ficticios: \", pert_graph.numArcsFicticios()\r\n print \"numero de predecesors/sucesores: \", num_of_predecessors\r\n print \"Validation: \"\r\n if not validation.check_validation(successors, pert_graph) and not args.no_stop:\r\n return 1\r\n print \"\"\r\n\r\n # XXX ??Falta incluir aqui el numero de actividades??\r\n result_line = '\"' + filename + '\",' + '\"' + name + '\",' + str(len(data)) + ',' + str(num_of_predecessors) + ',' + \\\r\n str(pert_graph.number_of_nodes()) + ',' + str(pert_graph.number_of_arcs()) + ',' + \\\r\n str(pert_graph.numArcsReales()) + ',' + str(pert_graph.numArcsFicticios()) + ',' + \"%.4f\"%(utime)\r\n f_csv.write(result_line + \"\\n\")\r\n \r\n if pert_graph == 1:\r\n print \"No hay resultados que mostrar\"\r\n\r\n # Draw graph and save in a file (*.svg)\r\n if args.SVG:\r\n image_text = graph.pert2image(pert_graph) \r\n fsalida = open(os.path.split(filename)[1] + '_' + name + '.svg', 'w')\r\n fsalida.write(image_text)\r\n fsalida.close()\r\n\r\n f_csv.close()\r\n return 0",
"def process_graphs(args):\n os.makedirs(args.output_folder, exist_ok=True)\n\n for graph_type in args.graph_type:\n for graph_idx in range(args.num_graphs):\n seed = args.seed+graph_idx\n graph = create_graph(num_vars=args.num_vars,\n num_categs=args.num_categs,\n edge_prob=args.edge_prob,\n graph_type=graph_type,\n num_latents=args.num_latents,\n deterministic=args.deterministic,\n seed=seed)\n name = 'graph_%s_%i_%i' % (graph_type, args.num_vars, seed)\n if args.num_latents > 0:\n name += '_l%i' % (args.num_latents)\n export_graph(filename=os.path.join(args.output_folder, name),\n graph=graph,\n num_obs=args.num_obs,\n num_int=args.num_int)",
"def main(args):\n mnist = load_mnist(val_seed=123)\n\n if not args.no_training:\n train_network(mnist, 1, args.outdir)\n train_network(mnist, 2, args.outdir)\n\n model1 = load_network(1, args.outdir)\n model2 = load_network(2, args.outdir)\n encoder1, decoder1 = split_network(model1)\n encoder2, decoder2 = split_network(model2)\n\n create_montage(mnist, model1, model2, args.outdir)\n create_scatter(mnist, encoder1, decoder1, args.outdir)\n do_experiment_on_model1_rules(decoder1, args.outdir)\n do_experiment_on_model2_rules(mnist, encoder2, decoder2, args.outdir)",
"def main():\r\n patterns = []\r\n lineCount = 0\r\n\r\n # Read the input file\r\n for line in sys.stdin:\r\n if lineCount > 0:\r\n patterns.append(line.strip())\r\n lineCount += 1\r\n\r\n myGraph = Graph(patterns)\r\n myGraph.printString()",
"def main():\n args = parse_args()\n\n # load model\n model_name = args.model\n model = load_model(model_name)\n model.load_state_dict(torch.load(args.checkpoint_path)['model'])\n model.eval()\n\n # load dataset\n test_dataset = load_dataset(dataset_name=args.dataset, dataset_part='test')\n all_first_soft_scores, all_second_soft_scores, gt_labels = \\\n get_soft_scores_and_true_labels(test_dataset, model)\n\n # plot the roc curves\n roc_curve_figure = plt.figure()\n roc_curve_figure = plot_roc_curve(roc_curve_figure,\n all_first_soft_scores,\n all_second_soft_scores,\n gt_labels)\n roc_curve_figure.savefig(\n os.path.join(FIGURES_DIR,\n f'{args.dataset}_{args.model}_roc_curve.png'))\n\n # plot the det curve for the scores of the first output of the network\n det_curve_figure = plt.figure()\n det_curve_figure = plot_det_curve(det_curve_figure,\n all_first_soft_scores,\n all_second_soft_scores,\n gt_labels)\n det_curve_figure.savefig(\n os.path.join(FIGURES_DIR,\n f'{args.dataset}_{args.model}_det_curve.png'))",
"def main():\n myReader = FastAreader()\n myDAG = None\n for start,end,adjList in myReader.readDag():\n myDAG = DAG(start, end, adjList)\n myDAG.makeGraph2()\n for key in sorted(myDAG.DAG.keys()):\n prevData, nextData = (list(), list())\n for i in range(len(myDAG.DAG[key].prev)):\n prevData.append(myDAG.DAG[key].prev[i].name)\n for i in range(len(myDAG.DAG[key].next)):\n nextData.append(myDAG.DAG[key].next[i].name)\n myDAG.findLongestPath4()\n print(myDAG.DAG[myDAG.end].total)\n print('->'.join(myDAG.backTrack()))",
"def build_graph(self):\r\n self._create_placeholders()\r\n self._create_network()\r\n self._create_loss()\r\n self._create_optimizer()\r\n self._create_summaries()\r\n self._show_current_model()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Computes the necessary wind loads for the structure and then assigns them to the appropriate nodes
|
def assign_wind_loads(self):
Vr = self.mean_hourly_wind_speed
roa = 1.22 # kg/m3 - density of air in Great Britan
if self.terrain_category == 2:
# This calculates the variation of wind speed with height
Kr = 1.10 # Terrain roughness factor
Z0 = 0.01 # Terrain aerodynamic roughness parameter (meters)
alpha = 0.14 # Power law index of variation of wind speed with height
He = 0 # Effective height
Cn = 1.2 # Overall drag/pressure coefficuent, dependent on solidarity ratio, educated guess
Kcom = 1
K0 = 1
H = 55 # (meters) Presumably this is the height of the tower, it's very fucking vague
K1 = (1+(alpha/2))*(10/H)**alpha
S1 = (H/100.8)*((10/H)**alpha)
K6 = max((H/10),10)
S2 = (K6/100.8)*((10/H)**alpha)
K2 = ((2/S1)+((2/(S2**2))*((e**(-S1))-1)))**0.5
Gx = K1*K2*((3.976/Kr)-2.485)
K3 = (1+(alpha/2))*((10/K6)**alpha)
K4 = ((2/S2)+((2/(S2**2))*((e**(-S2))-1)))**0.5
K5 = ((K6/H)**alpha)*(1-(1-(K6/H))**2)/(1-(1-(K6/H))**(alpha+2))
Gy = K3*K4*K5*((3.976/Kr)-2.845)
Gb = 0.9 # max(Gx,Gy) # this is the cheating method
for node in self.node_list:
if node['y']>0:
if (float(node['z'])/1000) >= 10+He:
Vz = Vr((((float(node['z'])/1000)-He)/10)**alpha)
elif (float(node['z'])/1000) < 10+He:
Vz = Vr(((0.25/(10+He))*(float(node['z'])/1000))+0.75)
Qz = (roa/2)*(Vz**2)
As = 0# Structural components of projected area on windward side - the area over which the wind pressure acts, to be broken into panels across the height of the structure.
PTW = Qz*As*Cn*(1+(Kcom*Gb))*K0 # PTW is the maximum wind load acting on a particular panel, it can be split in to 50% acting on the top and 50% on the bottom (or i presume the load can be spread evenly across all nodes within As)
|
[
"def distribute_unit_load(self, aEID, piercedElements, nPiercings):\r\n aModel = self.aeroModel\r\n sModel = self.structuralModel\r\n #print \"piercedElements = \",piercedElements\r\n nIDs = []\r\n if nPiercings == 0:\r\n #assert len(nPiercings)==1,'fix me...'\r\n #print \"nPiercings=0 distributing load to closest nodes...u=%g v=%g\" %(-1,-1)\r\n log.info(\"nPiercings=0 distributing load to closest nodes...\")\r\n for sEID in piercedElements:\r\n nIDs += sModel.get_element_node_ids(sEID)\r\n #print \"nIDs1 = \",nIDs\r\n nIDs = list(set(nIDs))\r\n log.info(\"nIDs2 = %s\" % nIDs)\r\n aCentroid = aModel.Centroid(aEID)\r\n nodes = sModel.getNodeIDLocations(nIDs)\r\n\r\n #print \"nodes = \", nodes\r\n weights = self.get_weights(aCentroid, nodes)\r\n distribution = self.create_distribution(nIDs, weights)\r\n\r\n log.info(\"element aEID=%s sEID=%s weights=%s\" % (aEID, sEID, ListPrint(weights)))\r\n #print \"distribution = \", distribution\r\n #print \"nIDs = \", nIDs\r\n #print \"weights = \", weights\r\n #print \"nodes = \", nodes\r\n #print \"nPiercings = \", nPiercings\r\n else:\r\n log.info(\"mapping load to actual element...\")\r\n nClose = 3 # number of elements to map to\r\n closeElements = piercedElements[:nClose]\r\n\r\n setCloseNodes = set([])\r\n for closeElement in reversed(closeElements):\r\n log.info(\"closeElement = %s\" % closeElement)\r\n #sEID, pIntersect, u1, v1, sDist\r\n (sEID, P, u, v, sDist) = closeElement # TODO: bug here...???\r\n\r\n #closePoint = closeElement[1]\r\n #closeElement = sEID\r\n closePoint = P\r\n\r\n # get list of nearby structural nodes\r\n setElementNodes = set(sModel.get_element_node_ids(sEID))\r\n setCloseNodes = setCloseNodes.union(setElementNodes)\r\n\r\n # setup for weighted average\r\n nIDs = list(setCloseNodes)\r\n sNodes = sModel.getNodeIDLocations(nIDs)\r\n weights = self.get_weights(closePoint, sNodes)\r\n distribution = self.create_distribution(nIDs, weights)\r\n\r\n log.info(\"element aEID=%s sEID=%s weights=%s\" %(aEID, sEID, ListPrint(weights)))\r\n log.info(\"-------------------------\\n\")\r\n sys.stdout.flush()\r\n return (distribution)",
"def _load_nodes(self) -> NoReturn:\n total = self.project_size[1]\n self._nodes = {\n self.object_name(shared_enum.ElementType.NODE, index): index\n for index in range(total)\n }",
"def __init__(self, timesteps, wind_speed_path=\"arpae_wind.p\",\r\n wind_direction_path=\"fort_worth_wind.p\", **kwargs):\r\n self.wind_speed = []\r\n self.wind_direction = []\r\n self.stab_class = []\r\n self.r_y = []\r\n self.r_z = []\r\n speed_data = pickle.load(open(\"InputData/DataObjectInstances/\" + wind_speed_path, \"rb\"))\r\n dir_data = pickle.load(open(\"InputData/DataObjectInstances/\" + wind_direction_path, \"rb\"))\r\n a = np.array([927, 370, 283, 707, 1070, 1179])\r\n l = np.array([0.102, 0.0962, 0.0722, 0.0475, 0.0335, 0.022])\r\n q = np.array([-1.918, -0.101, 0.102, 0.465, 0.624, 0.700])\r\n k = np.array([0.250, 0.202, 0.134, 0.0787, .0566, 0.0370])\r\n p = np.array([0.189, 0.162, 0.134, 0.135, 0.137, 0.134])\r\n self.wind_speed = np.zeros(timesteps)\r\n self.wind_direction = np.zeros(timesteps)\r\n self.stab_class = np.zeros(timesteps)\r\n self.ground_temp = np.ones(timesteps) * GROUND_TEMP\r\n self.a_temp = self.ground_temp - 20\r\n self.pressure = np.ones(timesteps) * PRESSURE\r\n # emissivities of the ground and air\r\n self.e_a = np.ones(timesteps) * 0.1\r\n self.e_g = np.ones(timesteps) * 0.5\r\n # Stability classes are chosen randomly with equal probability, subject to constraints\r\n # based on wind speed. Stability classes 5 and 6 are never chosen because they rarely\r\n # occur during the day.\r\n sc = None\r\n if \"stability_class\" in kwargs:\r\n sc = kwargs.pop(\"stability_class\")\r\n print(\"Using stability class {}\".format(sc))\r\n\r\n for ind in range(0, timesteps):\r\n # Set up a deterministic set of wind speeds for testing purposes\r\n # Note: Wind direction only matters for Distributed Detector (DD) type\r\n if sc is not None:\r\n self.stab_class[ind] = sc\r\n self.wind_direction[ind] = 0\r\n if sc < 2:\r\n self.wind_speed[ind] = 1.4\r\n elif sc < 3:\r\n self.wind_speed[ind] = 2.4\r\n elif sc < 5:\r\n self.wind_speed[ind] = 3.7\r\n else:\r\n self.wind_speed[ind] = 7.2\r\n\r\n else: # use the bootstrap wind speeds\r\n self.wind_speed[ind] = np.random.choice(speed_data.wind_speed)\r\n self.wind_direction[ind] = np.random.choice(dir_data.wind_direction)\r\n if self.wind_speed[ind] < 2:\r\n self.stab_class[ind] = np.random.randint(2)\r\n elif self.wind_speed[ind] < 3:\r\n self.stab_class[ind] = np.random.randint(3)\r\n elif self.wind_speed[ind] < 5:\r\n self.stab_class[ind] = np.random.randint(1, 4)\r\n self.stab_class[ind] = np.random.randint(1, 4)\r\n else:\r\n self.stab_class[ind] = np.random.randint(2, 4)\r\n sf.set_kwargs_attrs(self, kwargs)\r\n self.a, self.l, self.q = np.zeros(timesteps), np.zeros(timesteps), np.zeros(timesteps)\r\n self.k, self.p = np.zeros(timesteps), np.zeros(timesteps)\r\n for ind in range(0, timesteps):\r\n self.a[ind] = a[int(self.stab_class[ind])]\r\n self.l[ind] = l[int(self.stab_class[ind])]\r\n self.q[ind] = q[int(self.stab_class[ind])]\r\n self.k[ind] = k[int(self.stab_class[ind])]\r\n self.p[ind] = p[int(self.stab_class[ind])]",
"def panda_four_load_branch():\r\n net = pp.create_empty_network()\r\n\r\n busnr1 = pp.create_bus(net, name=\"bus1\", vn_kv=10., geodata=[0, 0])\r\n busnr2 = pp.create_bus(net, name=\"bus2\", vn_kv=.4, geodata=[0, -1])\r\n busnr3 = pp.create_bus(net, name=\"bus3\", vn_kv=.4, geodata=[0, -2])\r\n busnr4 = pp.create_bus(net, name=\"bus4\", vn_kv=.4, geodata=[0, -3])\r\n busnr5 = pp.create_bus(net, name=\"bus5\", vn_kv=.4, geodata=[0, -4])\r\n busnr6 = pp.create_bus(net, name=\"bus6\", vn_kv=.4, geodata=[0, -5])\r\n\r\n pp.create_ext_grid(net, busnr1)\r\n\r\n pp.create_transformer(net, busnr1, busnr2, std_type=\"0.25 MVA 10/0.4 kV\")\r\n\r\n pp.create_line(net, busnr2, busnr3, name=\"line1\", length_km=0.05,\r\n std_type=\"NAYY 4x120 SE\")\r\n pp.create_line(net, busnr3, busnr4, name=\"line2\", length_km=0.05,\r\n std_type=\"NAYY 4x120 SE\")\r\n pp.create_line(net, busnr4, busnr5, name=\"line3\", length_km=0.05,\r\n std_type=\"NAYY 4x120 SE\")\r\n pp.create_line(net, busnr5, busnr6, name=\"line4\", length_km=0.05,\r\n std_type=\"NAYY 4x120 SE\")\r\n\r\n pp.create_load(net, busnr3, 0.030, 0.010)\r\n pp.create_load(net, busnr4, 0.030, 0.010)\r\n pp.create_load(net, busnr5, 0.030, 0.010)\r\n pp.create_load(net, busnr6, 0.030, 0.010)\r\n return net",
"def load_fluctuations_2D_all(self):\n if(self.HaveElectron):\n self.nane = np.zeros( (self.n_cross_section,len(self.time_steps),len(self.mesh['R'])) )\n self.nane_bar = np.zeros((len(self.time_steps)))\n if(self.load_ions):\n self.dni = np.zeros( (self.n_cross_section,len(self.time_steps),len(self.mesh['R'])) )\n self.dni_bar = np.zeros((len(self.time_steps)))\n\n self.phi = np.zeros((self.n_cross_section,len(self.time_steps),len(self.mesh['R'])))\n self.phi_bar = np.zeros((len(self.time_steps)))\n for i in range(len(self.time_steps)):\n flucf = self.xgc_path + 'xgc.3d.'+str(self.time_steps[i]).zfill(5)+'.h5'\n fluc_mesh = h5.File(flucf,'r')\n if (i == 0):\n self.n_plane = fluc_mesh['dpot'].shape[1]\n dn = int(self.n_plane/self.n_cross_section)\n self.planes = np.arange(self.n_cross_section) * dn\n\n self.phi_bar[i] = np.mean(fluc_mesh['dpot'][...])\n if(self.HaveElectron):\n self.nane_bar[i] = np.mean(fluc_mesh['eden'][...])\n if(self.load_ions):\n self.dni_bar[i] = np.mean(fluc_mesh['iden'][...])\n for j in range(self.n_cross_section):\n self.phi[j,i] += np.swapaxes(fluc_mesh['dpot'][...][:,self.planes[j]],0,1)\n self.phi[j,i] -= self.phi_bar[i]\n\n if(self.HaveElectron):\n self.nane[j,i] += np.swapaxes(fluc_mesh['eden'][...][:,self.planes[j]],0,1)\n self.nane[j,i] -= self.nane_bar[i]\n if(self.load_ions):\n self.dni[j,i] += np.swapaxes(fluc_mesh['iden'][...][:,self.planes[j]],0,1)\n self.dni[j,i] -= self.dni_bar[i]\n fluc_mesh.close()\n\n\n\n\n return 0",
"def compute_loads(self):\n print('*** Kustaa - computing annual loads ***')\n \n for g in self.groups: # groups ['Natural', 'Forestry', 'Agriculture',...]\n s = list(self.C[g].keys()) # loading sources within group\n s = [x for x in s if x in list(self.data.columns)] # loop only those in data\n\n for k in s: \n print(k)\n A = self.data[k] # area or unit\n dA = self.area_err[k] / 100.0 * A # area error\n cn = self.C[g][k]['N'] # export coeff\n cp = self.C[g][k]['P']\n cs = self.C[g][k]['SS']\n\n if (g == 'Forestry' and self.ForestryModel == 'Kalle'):\n\n # print('compute forestry')\n # account for temporal decay of loading\n cn = np.array([np.multiply(m, self.rC[k]['N']) for m in cn])\n cp = np.array([np.multiply(m, self.rC[k]['P']) for m in cp])\n cs = np.array([np.multiply(m, self.rC[k]['SS']) for m in cs])\n \n self.N_load[k], self.N_var[k] = annual_forestry_load(cn, A, dA)\n self.P_load[k], self.P_var[k] = annual_forestry_load(cp, A, dA)\n self.SS_load[k], self.SS_var[k] = annual_forestry_load(cs, A, dA)\n\n else:\n\n self.N_load[k], self.N_var[k] = annual_load(cn, A, dA, scale=self.scale)\n self.P_load[k], self.P_var[k] = annual_load(cp, A, dA, scale=self.scale)\n self.SS_load[k], self.SS_var[k] = annual_load(cs, A, dA, scale=self.scale)\n \n # sum means and variances to group level\n self.N_load[g] = self.N_load[s].sum(axis=1, skipna=True)\n self.P_load[g] = self.P_load[s].sum(axis=1, skipna=True)\n self.SS_load[g] = self.SS_load[s].sum(axis=1, skipna=True)\n\n self.N_var[g] = self.N_var[s].sum(axis=1, skipna=True)\n self.P_var[g] = self.P_var[s].sum(axis=1, skipna=True)\n self.SS_var[g] = self.SS_var[s].sum(axis=1, skipna=True)",
"def generate_wind():\n# Taken by converting UTM Zone 11 coordinates on\n# https://www.engineeringtoolbox.com/utm-latitude-longitude-d_1370.html\n# These values specific to files called yosemite_landscape_12-03-2019_0900_120m\n west_lon = -120.006255\n east_lon = -119.4736\n south_lat = 37.464649\n north_lat = 37.822073\n\n# Open .shp and .dbf files with rb\n myshp = open(\"SHAPEFILES/HOUR1/yosemite_landscape_12-03-2019_0900_120m.shp\", \"rb\")\n mydbf = open(\"SHAPEFILES/HOUR1/yosemite_landscape_12-03-2019_0900_120m.dbf\", \"rb\")\n wind = Wind(myshp, mydbf, west_lon, east_lon, south_lat, north_lat)\n\n# Regrid the base data onto a 30mx30m grid and bounded at the coordinates described\n# Our model focuses on the area between -120W to -119.5W, and 37.5N to 37.8N\n new_wind = wind.regrid(30, -120, -119.5, 37.5, 37.8)\n return new_wind",
"def add_roads(nodetree, grid_node, road_bl_objects):\n # take object names from SceneCity high-poly assets collection\n road_collector_node = nodetree.nodes.new(\"RoadPortionsCollectionNode\")\n road_collector_node.location = (600, -500)\n\n for i, (name, kind) in enumerate(road_bl_objects):\n # ToDo-me: creating sockets manually instead of using defined blender/SC operator?\n new_socket = road_collector_node.inputs.new(\"WeightedRoadPortionSocket\", \"WeightedRoadPortion\" + name)\n\n # add and link static road portions\n static_road_portion_node = nodetree.nodes.new(\"StaticRoadPortionNode\")\n static_road_portion_node.location = (100, -i * 250 - 500)\n static_road_portion_node.type = kind\n\n nodetree.links.new(static_road_portion_node.outputs[\"Road portion\"], new_socket)\n\n # add and link blender object getter nodes\n object_getter_node = nodetree.nodes.new(\"ObjectsGetterNode\")\n object_getter_node.blender_object_name = name\n object_getter_node.location = (-200, -i * 250 - 500)\n nodetree.links.new(object_getter_node.outputs[\"Objects\"], static_road_portion_node.inputs[\"Objects\"])\n\n road_portions_instancer_node = nodetree.nodes.new(\"RoadPortionsInstancerNode\")\n road_portions_instancer_node.location = (1000, -500)\n road_portions_instancer_node.grid_values_to_consider = 'road = all'\n nodetree.links.new(road_collector_node.outputs[\"Road portions\"], road_portions_instancer_node.inputs[\"Road portions\"])\n nodetree.links.new(grid_node.outputs[\"Grid\"], road_portions_instancer_node.inputs[\"Grid\"])\n\n object_instancer_node = nodetree.nodes.new(\"ObjectsInstancerNode\")\n object_instancer_node.location = (1500, -500)\n object_instancer_node.blender_objects_name_prefix = \"Roads\"\n nodetree.links.new(road_portions_instancer_node.outputs[\"Objects\"], object_instancer_node.inputs[\"Objects\"])\n\n # use create operator\n source_node_path = 'bpy.data.node_groups[\"' + object_instancer_node.id_data.name + '\"].' + object_instancer_node.path_from_id()\n bpy.ops.node.objects_instancer_node_create(source_node_path=source_node_path)",
"def mapLoads(self):\r\n log.info(\"---starting mapLoads---\")\r\n self.bdf = open(self.bdffile, 'wb')\r\n #self.buildMappingMatrix()\r\n log.info(\"self.loadCase = %s\" % self.loadCase)\r\n self.loadCases = {self.loadCase:{}}\r\n\r\n #self.loadCases = {self.loadCase={}, }\r\n momentCenter = array([self.xref, 0., 0.])\r\n sumMoments = array([0., 0., 0.])\r\n sumForces = array([0., 0., 0.])\r\n sys.stdout.flush()\r\n for aEID, distribution in self.mappingMatrix.iteritems():\r\n #print \"aEID = \",aEID\r\n #print \"***distribution = \",distribution\r\n sumLoad = 0.\r\n area = self.aeroModel.Area(aEID)\r\n normal = self.aeroModel.Normal(aEID)\r\n Cp = self.aeroModel.Cp(aEID)\r\n #print \"Cp = \",Cp\r\n #print \"area[%s]=%s\" % (aEID, area)\r\n\r\n p = self.getPressure(Cp)\r\n centroid = self.aeroModel.Centroid(aEID)\r\n r = momentCenter - centroid\r\n F = area * p\r\n Fn = F * normal\r\n sumMoments += cross(r, Fn)\r\n sumForces += Fn\r\n for sNID, percentLoad in sorted(distribution.iteritems()):\r\n sumLoad += percentLoad\r\n\r\n Fxyz = Fn * percentLoad # negative sign is to be consistent with nastran\r\n self.addForce(sNID, Fxyz)\r\n\r\n #print \"Fxyz = \",Fxyz\r\n #print \"type(structuralModel) = \", type(self.structuralModel)\r\n\r\n #comment = 'percentLoad=%.2f' % percentLoad\r\n #self.structuralModel.writeLoad(self.bdf, self.loadCase, sNID,\r\n # Fxyz[0], Fxyz[1], Fxyz[2], comment)\r\n\r\n #msg = '$ End of aEID=%s sumLoad=%s p=%s area=%s F=%s normal=%s\\n' % (aEID, sumLoad, p, area, F, normal)\r\n #self.bdf.write(msg)\r\n\r\n self.writeLoads() # short version of writing loads...\r\n self.bdf.close()\r\n\r\n log.info(\"pInf=%g [psi]; qInf= %g [psi]\" % (self.pInf, self.qInf))\r\n log.info(\"sumForcesFEM [lb] = %s\" % ListPrint(sumForces))\r\n log.info(\"sumMomentsFEM [lb-ft] = %s\" % ListPrint(sumMoments/12.)) # divided by 12 to have moments in lb-ft\r\n\r\n Cf = sumForces /(self.Sref * self.qInf)\r\n log.info(\"Cf = %s\" % ListPrint(Cf))\r\n\r\n Cm = sumMoments / (self.Sref * self.qInf * self.Lref)\r\n log.info(\"Cm = %s\" % ListPrint(Cm*12.)) # multiply by 12 to nondimensionalize ??? maybe 144...\r\n\r\n #self.bdf.write('$***********\\n')\r\n log.info(\"wrote loads to %r\" % self.bdffile)\r\n log.info(\"---finished mapLoads---\")",
"def load_fluctuations_3D_all(self):\n #similar to the 2D case, we first read one file to determine the total toroidal plane number in the simulation\n flucf = self.xgc_path + 'xgc.3d.'+str(self.time_steps[0]).zfill(5)+'.h5'\n fluc_mesh = h5.File(flucf,'r')\n\n self.planes = np.unique(np.array([np.unique(self.prevplane),np.unique(self.nextplane)]))\n self.planeID = {self.planes[i]:i for i in range(len(self.planes))} #the dictionary contains the positions of each chosen plane, useful when we want to get the data on a given plane known only its plane number in xgc file.\n if(self.HaveElectron):\n self.nane = np.zeros( (self.n_cross_section,len(self.time_steps),len(self.planes),len(self.mesh['R'])) )\n self.nane_bar = np.zeros((len(self.time_steps)))\n\n if(self.load_ions):\n self.dni = np.zeros( (self.n_cross_section,len(self.time_steps),len(self.planes),len(self.mesh['R'])) )\n self.dni_bar = np.zeros((len(self.time_steps)))\n\n self.phi = np.zeros( (self.n_cross_section,len(self.time_steps),len(self.planes),len(self.mesh['R'])) )\n self.phi_bar = np.zeros((len(self.time_steps)))\n for i in range(len(self.time_steps)):\n flucf = self.xgc_path + 'xgc.3d.'+str(self.time_steps[i]).zfill(5)+'.h5'\n fluc_mesh = h5.File(flucf,'r')\n\n if(i==0):\n #self.n_plane = fluc_mesh['dpot'].shape[1]\n dn = int(self.n_plane/self.n_cross_section)\n self.center_planes = np.arange(self.n_cross_section)*dn\n\n self.phi_bar[i] = np.mean(fluc_mesh['dpot'][...])\n if (self.HaveElectron):\n self.nane_bar[i] = np.mean(fluc_mesh['eden'][...])\n if (self.load_ions):\n self.dni_bar[i] = np.mean(fluc_mesh['iden'][...])\n\n for j in range(self.n_cross_section):\n self.phi[j,i] += np.swapaxes(fluc_mesh['dpot'][...][:,(self.center_planes[j] + self.planes)%self.n_plane],0,1)\n self.phi[j,i] -= self.phi_bar[i]\n if(self.HaveElectron):\n self.nane[j,i] += np.swapaxes(fluc_mesh['eden'][...][:,(self.center_planes[j] + self.planes)%self.n_plane],0,1)\n self.nane[j,i] -= self.nane_bar[i]\n if(self.load_ions):\n self.dni[j,i] += np.swapaxes(fluc_mesh['iden'][...][:,(self.center_planes[j] + self.planes)%self.n_plane],0,1)\n self.dni[j,i] -= self.dni_bar[i]\n fluc_mesh.close()\n\n return 0",
"def connect_assets_to_elec_nodes():\n path = os.path.join(DATA_INTERMEDIATE, 'elec_distribution.shp')\n elec_sites = gpd.read_file(path)\n\n path = os.path.join(DATA_INTERMEDIATE, 'gas_sites.shp')\n gas_sites = gpd.read_file(path)\n\n path = os.path.join(DATA_INTERMEDIATE, 'airports.shp')\n airports = gpd.read_file(path)\n\n path = os.path.join(DATA_INTERMEDIATE, 'railway_stations.shp')\n railway_stations = gpd.read_file(path)\n\n sites = gas_sites.append(airports)\n sites = sites.append(railway_stations)\n\n output_edges = []\n output_nodes = []\n\n for idx, site in sites.iterrows():\n\n nearest = nearest_points(site['geometry'], elec_sites.unary_union)[1]\n\n geom = LineString([\n (\n site['geometry'].coords[0][0],\n site['geometry'].coords[0][1]\n ),\n (\n nearest.coords[0][0],\n nearest.coords[0][1]\n ),\n ])\n\n nearest = gpd.GeoDataFrame({'geometry': [nearest]}, index=[idx], crs='epsg:27700')\n nearest['geometry'] = nearest['geometry'].buffer(10)\n\n elec_site = gpd.overlay(elec_sites, nearest, how='intersection')\n\n output_edges.append({\n 'geometry': geom,\n 'properties': {\n 'origin_id': elec_site['id'][0],\n 'dest_funth': site['functionth'],\n 'dest_func': site['function'],\n 'dest_dist': site['distinctiv'],\n },\n })\n\n output_nodes.append({\n 'geometry': site['geometry'],\n 'properties': {\n 'origin_id': elec_site['id'][0],\n 'dest_funth': site['functionth'],\n 'dest_func': site['function'],\n 'dest_dist': site['distinctiv'],\n },\n })\n\n output_edges = gpd.GeoDataFrame.from_features(output_edges, crs='epsg:27700')\n path = os.path.join(DATA_INTERMEDIATE, 'network_edges.shp')\n output_edges.to_file(path, crs='epsg:27700')\n\n output_nodes = gpd.GeoDataFrame.from_features(output_nodes, crs='epsg:27700')\n path = os.path.join(DATA_INTERMEDIATE, 'network_nodes.shp')\n output_nodes.to_file(path, crs='epsg:27700')",
"def run_net( self ):\n layer_index = 0\n cur_vault = 0\n self.vault_time = [ [ 0, 0 ] for i in range( self.total_vault ) ] # weight time , feature time\n # compute MAC, buffer access, reg_access, bus access, cross vaule access\n self.vault_energy = [ [ 0, 0, 0, 0, 0 ] for _ in range( self.vault_shape[ 0 ] * self.vault_shape[ 1 ] ) ]\n self.mem_topo = dict() # layer: vault_idx: [ [ pe_start, pe_end, cur_time, op_time ] ]\n\n\n \"\"\"\n use different strategy for cases when all weights could be loaded into chips or not\n \"\"\"\n cur_vault = 0\n while layer_index != len( self.layer_stack ):\n cur_layer = self.layer_stack[ layer_index ]\n if cur_layer[ \"type\" ] == \"CONV\":\n cur_vault = self.load_weight_conv( cur_vault, layer_index )\n elif cur_layer[ \"type\" ] == \"FC\":\n cur_vault = self.load_weight_fc( cur_vault, layer_index )\n \n if cur_vault >= self.total_vault:\n all_load = False \n else:\n # if all load, reset weight loading time\n all_load = True \n self.vault_time = [ [ 0, 0 ] for i in range( self.total_vault ) ]\n \n while layer_index != len( self.layer_stack ):\n cur_layer = self.layer_stack[ layer_index ]\n cur_layer_name = cur_layer[ \"name\" ]\n if cur_layer[ \"type\" ] == \"INPUT\":\n self.place_input_in_ram( cur_vault )\n cur_vault = 0\n\n elif cur_layer[ \"type\" ] == \"CONV\":\n cur_vault = self.process_conv( cur_vault, layer_index, all_load )\n\n elif cur_layer[ \"type\" ] == \"FC\":\n cur_vault = self.process_fc( cur_vault, layer_index, all_load )",
"def load_world(self):\n self.land, self.land_recs = self.load_geo(\n #shp_fname = #'data/ne_10m_admin_0_map_units/ne_10m_admin_0_map_units.shp'\n shp_fname = os.path.join(self.data_dir, 'ne_10m_admin_0_countries.shp')\n )\n self.ocean, self.ocean_recs = self.load_geo(\n shp_fname = os.path.join(self.data_dir, 'ne_10m_geography_marine_polys.shp')\n )\n\n self.lakes, self.lake_recs = self.load_geo(\n shp_fname = os.path.join(self.data_dir, 'ne_10m_lakes.shp')\n )\n\n self.area2geo = {**self.land_recs, **self.ocean_recs, **self.lake_recs}",
"def translateSWCs():\n num_nodes = 0\n outputname = 'combined-offset-connected.swc'\n out_f = open(outputname, 'w')\n # out_test = open('combined-offset-connected.swc', 'w')\n prev_nodes = {}\n filenames = []\n z_offsets = []\n for filename in os.listdir('.'):\n if '.swc' in filename and not '-offset' in filename:\n filenames.append(filename)\n z_offsets.append(float(filename.split('z=')[1].split(' ')[0])/10.0)\n indexes = range(len(z_offsets))\n indexes.sort(key=z_offsets.__getitem__)\n for i in indexes:\n f = open(filenames[i])\n lines = f.readlines()\n f.close()\n num_nodes += len(prev_nodes)\n nodes = {}\n leaves = []\n for line in [line.split(' ') for line in lines if not line.split(' ')[0] in ['#', '\\r\\n']]:\n index = int(float(line[0])) + num_nodes # node index\n nodes[index] = {}\n nodes[index]['type'] = int(float(line[1])) # sec_type\n nodes[index]['y'] = float(line[2]) # note the inversion of x, y.\n nodes[index]['x'] = float(line[3])\n nodes[index]['z'] = float(line[4]) + z_offsets[i]\n nodes[index]['r'] = float(line[5]) # radius of the sphere.\n nodes[index]['parent'] = int(float(line[6])) # index of parent node\n if not nodes[index]['parent'] == -1:\n nodes[index]['parent'] += num_nodes\n leaves.append(index)\n for index in nodes: # keep nodes with no children\n parent = nodes[index]['parent']\n if parent in leaves:\n leaves.remove(parent)\n for index in leaves:\n nodes[index]['type'] = 7\n print 'Saving '+filenames[i]+' to '+outputname\n if prev_nodes:\n leaves = [index for index in nodes if (nodes[index]['type'] == 7 or nodes[index]['parent'] == -1)]\n for prev_index in [index for index in prev_nodes if (prev_nodes[index]['type'] == 7 or\n prev_nodes[index]['parent'] == -1)]:\n for index in leaves:\n distance = math.sqrt((prev_nodes[prev_index]['x']-nodes[index]['x'])**2 +\n (prev_nodes[prev_index]['y']-nodes[index]['y'])**2 +\n (prev_nodes[prev_index]['z']-nodes[index]['z'])**2)\n # print prev_index, index, distance\n if distance < 2.:\n prev_nodes[prev_index]['type'] = 8\n nodes[index]['type'] = 8\n nodes[index]['parent'] = prev_index\n leaves.remove(index)\n break\n for index in prev_nodes:\n line = str(index)+' '+str(prev_nodes[index]['type'])+' '+str(prev_nodes[index]['y'])+' '+\\\n str(prev_nodes[index]['x'])+' '+str(prev_nodes[index]['z'])+' '+str(prev_nodes[index]['r'])+' '+\\\n str(prev_nodes[index]['parent'])+'\\n'\n out_f.write(line)\n prev_nodes = copy.deepcopy(nodes)\n for index in prev_nodes:\n line = str(index)+' '+str(prev_nodes[index]['type'])+' '+str(prev_nodes[index]['y'])+' '+\\\n str(prev_nodes[index]['x'])+' '+str(prev_nodes[index]['z'])+' '+str(prev_nodes[index]['r'])+' '+\\\n str(prev_nodes[index]['parent'])+'\\n'\n out_f.write(line)\n out_f.close()",
"def extract_data(ROOT_PATH, gtfs_foldername):\r\n # Generate graph with stops as nodes\r\n gtfs_path = os.path.join(ROOT_PATH, gtfs_foldername)\r\n G = nx.Graph()\r\n df_stops = pd.read_csv(gtfs_path+'\\stops.txt')\r\n stops = zip(df_stops.stop_id, df_stops.stop_name, df_stops.stop_lat,\r\n df_stops.stop_lon)\r\n\r\n for stop_id, stop_name, stop_lat, stop_lon in stops:\r\n node_name = stop_name+',' + stop_id[-1]\r\n\r\n# exceptions = []\r\n# if stop_name in exceptions:\r\n# print(stop_name)\r\n# raise Exception('Current stop is an exception')\r\n\r\n if node_name not in G.nodes:\r\n G.add_node(node_name, stop_lat = stop_lat,\r\n stop_lon = stop_lon)\r\n else:\r\n if G.nodes[node_name]['stop_lat'] == stop_lat and G.nodes[node_name]['stop_lon'] == stop_lon:\r\n pass\r\n else:\r\n print(node_name)\r\n warnings.warn('Stops repeating in stops.txt with different \\\r\n data, check stops.txt file again.')\r\n\r\n\r\n # Extract travel time data from time-table adherence files\r\n count_files = 0\r\n count_trips = 0\r\n list_tripshapes = set() # list of shape names of all trips analyzed\r\n csv_path = os.path.join(ROOT_PATH, 'cumtd_datasheets')\r\n\r\n # Iterate over files in the directory\r\n for filename in tqdm(os.listdir(csv_path)):\r\n\r\n file_path = os.path.join(csv_path, filename)\r\n df = pd.read_csv(file_path, header=1)\r\n count_files += 1\r\n\r\n # Delete columns with two values in Type and reindex\r\n index_toremove = df.loc[df.Type.isin(['Stop without doors','Stop with doors','Dead run out', ])].index\r\n df = df.drop(index_toremove)\r\n index_trips = df.loc[df.Type.isin(['Additional trip','Trip'])].index\r\n\r\n # Iterate over trips in the file\r\n for item in zip(index_trips[0:-1], index_trips[1:]):\r\n\r\n index_start, index_end = item\r\n trip_shape = df.Graphic[index_start][9:]\r\n list_tripshapes.add(trip_shape)\r\n count_trips += 1\r\n\r\n # list of stop pairs between index_start and index_end\r\n stop_pairs = zip(df.index[index_start+1:index_end-1],\r\n df.index[index_start+2:index_end])\r\n\r\n for pair in stop_pairs:\r\n\r\n index_1, index_2 = pair\r\n\r\n types_ofnodes = ['Stop', 'Drive through']\r\n if str(df['Type'][index_1]) not in types_ofnodes or str(df['Type'][index_2]) not in types_ofnodes:\r\n continue\r\n\r\n node1 = str(df['Stop'][index_1])\r\n node2 = str(df['Stop'][index_2])\r\n\r\n if G.has_node(node1) and G.has_node(node2):\r\n\r\n time_node1 = _get_sec(df['Actual dep'][index_1])\r\n time_node2 = _get_sec(df['Actual dep'][index_2])\r\n dist_node1 = df['Sched. total distance'][index_1]*1609.34\r\n dist_node2 = df['Sched. total distance'][index_2]*1609.34\r\n time = time_node2 - time_node1\r\n distance = dist_node2 - dist_node1\r\n\r\n if time < 0 or distance < 0 or distance >= 2000:\r\n print(node1); print(node2)\r\n print(\"Time: {} and Distance: {}\".format(time, distance))\r\n warnings.warn('Time and distance should be positive and bounded')\r\n else:\r\n _add_timedata(G, node1, node2, count_trips, time, distance, trip_shape)\r\n else:\r\n print(node1); print(node2)\r\n warnings.warn('Stops from data do not exist in graph')\r\n\r\n remove_list = []\r\n for u,v,data in G.edges(data=True):\r\n if len(data['time_data']) <= 10:\r\n remove_list.append((u,v))\r\n G.remove_edges_from(remove_list)\r\n\r\n # Checks and printing stats\r\n print(\"\\nNo.of csv data files parsed: {}\\n\".format(count_files))\r\n print(\"No.of trips parsed: {}\\n\".format(count_trips))\r\n\r\n total_nodes = G.number_of_nodes()\r\n total_edges = G.number_of_edges()\r\n inactive_nodes = list(nx.isolates(G))\r\n num_activenodes = total_nodes - len(inactive_nodes)\r\n num_samples = 0\r\n for edge in list(G.edges.data()):\r\n num_samples += len(edge[2]['time_data'])\r\n avg_samplecount = num_samples/(total_edges)\r\n\r\n print(\"Total no.of stops (nodes): {}\\n\".format(total_nodes))\r\n print(\"Total no.of edges (road segements with data): {}\\n\".format(total_edges))\r\n print(\"No.of stops (nodes) with data: {}\\n\".format(num_activenodes))\r\n print(\"Average no.of samples on each road segment: {}\\n\".format(avg_samplecount))\r\n\r\n return G, list_tripshapes",
"def loaddata():\n data = json.load(open('./overpass/street_graph.json'))\n total = len(data['elements'])\n\n for i, element in enumerate(data['elements']):\n etype = element['type']\n eid = element['id']\n\n if etype == 'node':\n # load to GEOHASH with ID\n red.geoadd('base:nodehash', element['lon'], element['lat'], eid)\n # add to node count\n red.pfadd('base:node:count', eid)\n\n elif etype == 'way':\n # add nodes to way\n red.rpush('base:way:{}:nodes'.format(eid), *element['nodes'])\n # add to way count\n red.pfadd('base:way:count', eid)\n\n # add this way to node relations\n for node in element['nodes']:\n red.rpush('base:node:{}:ways'.format(node), eid)\n\n # add this way's tags\n for tag, value in element['tags'].items():\n red.set('base:way:{}:{}'.format(eid, tag), value)\n\n print('loaded {}/{}'.format(i+1, total), end='\\r', flush=True)\n\n return 'done'",
"def loadDataMJD():\n from array import array\n from ROOT import TChain, TFile, TTree\n\n # load the data\n tt = TChain(\"skimTree\")\n for ds in dsList:\n tt.Add(\"%s/final95t/final95t_DS%s.root\" % (dsi.cutDir, ds))\n\n # declare output\n fName = \"%s/data/latDS%s.root\" % (dsi.latSWDir, ''.join([str(d) for d in dsList]))\n fOut = TFile(fName,\"RECREATE\")\n tOut = TTree(\"skimTree\", \"skimTree\")\n run = array('i',[0])\n iEvt = array('i',[0])\n iHit = array('i',[0])\n chan = array('i',[0])\n hitE = array('d',[0.])\n isEnr = array('i',[0])\n weight = array('d',[0.])\n tOut.Branch(\"run\", run, \"run/I\")\n tOut.Branch(\"iEvent\", iEvt, \"iEvent/I\")\n tOut.Branch(\"iHit\", iHit, \"iHit/I\")\n tOut.Branch(\"channel\", chan, \"channel/I\")\n tOut.Branch(\"trapENFCal\", hitE, \"trapENFCal/D\")\n tOut.Branch(\"isEnr\", isEnr, \"isEnr/I\")\n tOut.Branch(\"weight\", weight, \"weight/D\")\n\n for iE in range(tt.GetEntries()):\n tt.GetEntry(iE)\n run[0] = tt.run\n iEvt[0] = tt.iEvent\n for iH in range(tt.channel.size()):\n iHit[0] = tt.iHit.at(iH)\n chan[0] = tt.channel.at(iH)\n hitE[0] = tt.trapENFCal.at(iH)\n\n # calculate weight based on 1/efficiency\n if hitE[0] > effLim:\n weight[0] = 1/effMax\n else:\n idx = (np.abs(xEff-hitE[0])).argmin()\n weight[0] = 1/np.interp(hitE[0], xEff[idx:idx+1], detEff[idx:idx+1])\n # if hitE[0] < effLim:\n # print(\"%.2f %.2f \" % (hitE[0], weight[0]))\n\n if \"P\" in tt.detName.at(iH): isEnr[0] = 1\n elif \"B\" in tt.detName.at(iH): isEnr[0] = 0\n else:\n print(\"WTF, error\")\n exit(0)\n tOut.Fill()\n\n tOut.Write()\n fOut.Close()\n\n # verify\n # f2 = TFile(fName)\n # t2 = f2.Get(\"skimTree\")\n # t2.Scan(\"run:channel:isEnr:trapENFCal:weight\")",
"def read_everything():\n\n ### Paths to the fullsed, source and temperature files:\n fullsed_path = '../OldBeAtlas/fullsed_v2/'\n #fullsed_path = '../OldBeAtlas/fullsed/'\n source_path = '../OldBeAtlas/source/'\n temps_path = '../OldBeAtlas/temperatures/'\n\n ### assumed distance [parsecs] for the calculations\n dist_std = 10.\n\n\n ###########################\n \n ### The domain of the power-law grid:\n npar, sigpar, Mpar, obpar, cosipar = domain_PLgrid()\n filepars=[npar,sigpar,Mpar,obpar]\n\n print(\"Reading the OldBeAtlas files...\")\n print(\"\")\n\n files_fullsed=sorted(glob.glob(fullsed_path+'*'))\t\n files_source=sorted(glob.glob(source_path+'*'))\n files_temps=sorted(glob.glob(temps_path+'*'))\n\n files_fullsed_new=[] ### will receive the names of the fullsed\n ### files to be opened.\n\n ### It is assumed that the names of the fullsed files are of the form:\n ### fullsed_mod191_PLn4.0_sig0.05_h072_Rd050.0_Be_M04.80_ob1.10_H0.30_Z0.014_bE_Ell.sed2\n ### or\n ### fullsed_mod01_PLn3.5_sig0.00_h060_Rd050.0_Be_M03.80_ob1.20_H0.77_Z0.014_bE_Ell.sed2\n for i in range(0,len(npar)):\n for j in range(0,len(sigpar)):\n for k in range(0,len(Mpar)):\n for l in range(0,len(obpar)):\n ### Check if there is a fullsed file with some specific\n ### values of n, Sig, M and ob:\n for ifile in xrange(0,len(files_fullsed)):\n if ('PLn{0}_sig{1}_h072_Rd050.0_Be_'\\\n .format(filepars[0][i],filepars[1][j])+\\\n 'M{0}_ob{1}_H0.30_Z0.014_bE_Ell'\\\n .format(filepars[2][k],filepars[3][l]) in \\\n files_fullsed[ifile]) \\\n or ('PLn{0}_sig{1}_h060_Rd050.0_Be_'\\\n .format(filepars[0][i],filepars[1][j])+\\\n 'M{0}_ob{1}_H0.30_Z0.014_bE_Ell'\\\n .format(filepars[2][k],filepars[3][l]) in \\\n files_fullsed[ifile]):\n \n ### elements of 'files_fullsed_new' are = \n ### [ [n,sig,M,ob], \"fullsed file\" ]\n files_fullsed_new.append([[ filepars[0][i],\\\n filepars[1][j],\\\n filepars[2][k],\\\n filepars[3][l]],\\\n files_fullsed[ifile]]) \n\n ### Now that we have a 'files_fullsed_new' list complete, the idea is\n ### to create lists of source and temperature files in such a way that, \n ### for each fullsed file stored in a 'files_fullsed_new' line, \n ### there is a line with the correspondent source file in \n ### 'files_source_new' and a line with the correspondent temp file in \n ### 'files_temps_new'. \n\n ### It is assumed that the names of the source files are of the form:\n ### Be_M03.40_ob1.45_H0.54_Z0.014_bE_Ell.txt\n ### (Notice that the it is contained in the name of the fullsed file.)\n files_source_new=[] ### will receive the names of the source\n ### files to be opened.\n for iffn in xrange(0,len(files_fullsed_new)):\n ### Check if there is a source file whose name is contained in \n ### the name of the specific fullsed file:\n for ifs in xrange(0,len(files_source)):\n if files_source[ifs].replace(source_path,'').replace('.txt','')\\\n in files_fullsed_new[iffn][1]:\n files_source_new.append(files_source[ifs])\n ### (Notice that I have assumed that there is always a source file \n ### associated with a fullsed file. That is not the case with the \n ### temperature files below.)\n\n\n ### It is assumed that the names of the temperature files are of the form:\n ### mod126_PLn3.5_sig0.28_h072_Rd050.0_Be_M09.60_ob1.20_H0.30_Z0.014_bE_Ell30_avg.temp\n ### (Notice that the it is contained in the name of the fullsed file.)\n files_temps_new=[] ### will receive the names of the temperature\n ### files to be opened.\n for iffn in xrange(0,len(files_fullsed_new)):\n achei=0 ### Some fullsed files may not have correspondent temp files,\n ### like the ones of purely photospherical models.\n ### Check if there is a temperature file whose name is contained in\n ### the name of the specific fullsed file.\n ### If not, add \"EMPTY\" to the 'files_temps_new' list.\n for ifs in xrange(0,len(files_temps)):\n if files_temps[ifs].replace(temps_path,'').replace(\\\n '30_avg.temp','')\\\n in files_fullsed_new[iffn][1]:\n files_temps_new.append(files_temps[ifs])\n achei=1\n if achei == 0:\n files_temps_new.append('EMPTY')\n\n\n ### Now, building the 'fullsed_contents' list. It will contain the \n ### relevant contents of all available fullsed, source and temperature \n ### files of the grid.\n\n fullsed_contents=[] ### This list will receive the important contents\n ### of all the files\n for ifile in xrange(0,len(files_fullsed_new)):\n\n ### Reading the fullsed, source and temperature files:\n \n fullsedtest=files_fullsed_new[ifile][1]\n f0=open(fullsedtest,'r')\n f0linhas=f0.readlines()\n f0.close()\n\n sourcetest=files_source_new[ifile]\n f1=open(sourcetest,'r')\n f1linhas=f1.readlines()\n f1.close() \n\n tempstest=files_temps_new[ifile]\n if tempstest != 'EMPTY':\n ### OBS: This pyhdust procedure will print \n ### \"'FILE' completely read!\"\n ncr, ncmu, ncphi, nLTE, nNLTE, Rstarz, Raz, betaz, dataz, \\\n pcr, pcmu, pcphi = hdt.readtemp(tempstest)\n abttemp=[\n [dataz[0,i,ncmu/2,0]/Rstarz for i in \\\n xrange(0,len(dataz[0,:,ncmu/2,0]))],\n [dataz[3,i,ncmu/2,0] for i in \\\n xrange(0,len(dataz[3,:,ncmu/2,0]))]\n ]\n else:\n abttemp=[\n [np.nan,np.nan],\n [np.nan,np.nan]\n ]\n\n\n ### Obtaining each element of the 'fullsed_contents' list\n\n nobs=int(f0linhas[3].split()[1]) ### number of different cosi\n nlbd=int(f0linhas[3].split()[0]) ### number of lambdas for each cosi\n contents=[ \n fullsedtest, ### 0: Name of fullsed file\n np.zeros(nobs), ### 1: will receive the cosi's\n np.zeros((nobs,nlbd,3)), ### 2: will receive the SED\n sourcetest, ### 3: Name of source file\n np.zeros(5), ### 4: will receive the \n ### parameters of the star \n ### (source)\n tempstest, ### 5: Name of temperature file\n np.zeros((2,len(abttemp[0]))), ### 6: will receive the temp \n ### profile\n [[],[]]\n ]\n contents[1][:] = np.nan\n contents[2][:] = np.nan\n contents[4][:] = np.nan\n contents[6][:] = np.nan\n\n\n ### Receiving cosi and SED (\"1\" and \"2\")\n for iobs in xrange(0,nobs):\n mu = float(f0linhas[5+iobs*nlbd].split()[0])\n contents[1][iobs] = mu\n for ilbd in xrange(0, nlbd):\n auxi = f0linhas[5+iobs*nlbd+ilbd].split()\n contents[2][iobs, ilbd, 0] = float(auxi[2])\n contents[2][iobs, ilbd, 1] = float(auxi[3])\n contents[2][iobs, ilbd, 2] = float(auxi[7])\n\n\n ### Receiving parameters of the star (source) (\"4\")\n contents[4][0] = float(f1linhas[3].split()[2]) ### M\n contents[4][1] = float(f1linhas[4].split()[2]) ### R_pole\n contents[4][2] = float(f1linhas[5].split()[2]) ### W\n contents[4][3] = float(f1linhas[6].split()[2]) ### L\n contents[4][4] = float(f1linhas[7].split()[2]) ### Beta_GD\n \n ### Receiving the temperature profile (\"6\")\n for i in xrange(0,len(contents[6][0,:])):\n contents[6][0,i] = abttemp[0][i]\n contents[6][1,i] = abttemp[1][i]\n \n ### elements of 'fullsed_contents':\n fullsed_contents.append([files_fullsed_new[ifile][0],contents])\n\n print(\"\")\n\n return files_fullsed_new, files_source_new, files_temps_new, fullsed_contents, \\\n fullsed_path, source_path, temps_path, dist_std",
"def FIG5_RegionalTransport():\n\n def create_transport_matrix(so_lat=-60, na_lat=65):\n n_matrix={} #Matrix with entries n_{ij} in paper as a list for each layer\n \n ddeg = 2. #Choice of binning, required for the region labels. ddeg=2 has no influence here on the result as we are perfectly on the region boundaries\n \n for j in range(len(folders_all)): \n \n if subfolders_all[j]=='Sim3D':\n pdata=ParticleData.from_nc(folders_all[j] + '/', filenames_all[j], tload=[0,121], Ngrids=12)\n else:\n pdata=ParticleData.from_nc(folders_all[j] + '/', filenames_all[j], tload=[0,-1], Ngrids=40)\n\n pdata.remove_nans() #Remove some Nans\n \n #Give the particles labels according to the basin they are in at different times\n initial_region = pdata.set_region_labels(ddeg, 0, so_lat=so_lat, na_lat=na_lat)\n final_region = pdata.set_region_labels(ddeg, -1, so_lat=so_lat, na_lat=na_lat)\n \n n=np.zeros((8,8)) #n-matrix for the specific layer\n \n for i in range(len(initial_region)):\n n[int(initial_region[i]),int(final_region[i])]+=1\n \n n_matrix[models_all[j]]=n\n \n np.save(outdir_paper + 'n_matrix_solat_'+str(so_lat) + '_na_lat_'+str(na_lat), n_matrix)\n \n \n# for so_lat in [-56, -60, -62]: \n# create_transport_matrix(so_lat=so_lat)\n\n# for na_lat in [60, 70]: \n# create_transport_matrix(na_lat=na_lat)\n\n \n \n def polward_transport():\n n_matrix = np.load(outdir_paper + 'n_matrix_solat_-60_na_lat_65.npy').tolist()\n\n n_matrix_so56 = np.load(outdir_paper + 'n_matrix_solat_-56_na_lat_65.npy').tolist()\n n_matrix_so62 = np.load(outdir_paper + 'n_matrix_solat_-62_na_lat_65.npy').tolist()\n\n n_matrix_na60 = np.load(outdir_paper + 'n_matrix_solat_-60_na_lat_60.npy').tolist()\n n_matrix_na70 = np.load(outdir_paper + 'n_matrix_solat_-60_na_lat_70.npy').tolist()\n\n layers = [0, 2, 4, 7, 10, 13, 16, 19, 22, 23, 25]\n depths = [nemo_depth[d] for d in layers]\n depths.append(140)\n depths.append(160)\n depths.append(180)\n \n f_to_southern_ocean=[]\n f_to_southern_ocean_56=[]\n f_to_southern_ocean_62=[]\n \n f_to_arctic=[]\n f_to_arctic_60=[]\n f_to_arctic_70=[]\n \n _, region_names = regions(2.)\n \n for i in range(len(folders_all)):\n print models_all[i]\n n=n_matrix[models_all[i]]\n n56=n_matrix_so56[models_all[i]]\n n62=n_matrix_so62[models_all[i]]\n \n n60=n_matrix_na60[models_all[i]]\n n70=n_matrix_na70[models_all[i]]\n \n #row-normalize the matrices x to get F\n \n #(so_lat,sa_lat) = (-60, 65)\n s=np.sum(n,axis=1)\n f=np.zeros(n.shape) \n for j in range(len(n)):\n if s[j]!=0:\n f[j:,]=n[j,:]/s[j]\n f_to_southern_ocean.append(f[:,7])\n f_to_arctic.append(f[:,6])\n \n #(so_lat,sa_lat) = (-56, 65)\n s=np.sum(n56,axis=1)\n f=np.zeros(n56.shape) \n for j in range(len(n56)):\n if s[j]!=0:\n f[j:,]=n56[j,:]/s[j]\n f_to_southern_ocean_56.append(f[:,7])\n\n #(so_lat,sa_lat) = (-62, 65)\n s=np.sum(n62,axis=1)\n f=np.zeros(n62.shape) \n for j in range(len(n62)):\n if s[j]!=0:\n f[j:,]=n62[j,:]/s[j]\n\n f_to_southern_ocean_62.append(f[:,7])\n\n #(so_lat,sa_lat) = (-60, 60)\n s=np.sum(n60,axis=1)\n f=np.zeros(n60.shape) \n for j in range(len(n60)):\n if s[j]!=0:\n f[j:,]=n60[j,:]/s[j]\n\n f_to_arctic_60.append(f[:,6])\n\n #(so_lat,sa_lat) = (-60, 70)\n s=np.sum(n70,axis=1)\n f=np.zeros(n70.shape) \n for j in range(len(n70)):\n if s[j]!=0:\n f[j:,]=n70[j,:]/s[j]\n\n f_to_arctic_70.append(f[:,6])\n \n #To percent\n f_to_southern_ocean=100*np.array(f_to_southern_ocean)\n f_to_southern_ocean_56=100*np.array(f_to_southern_ocean_56)\n f_to_southern_ocean_62=100*np.array(f_to_southern_ocean_62)\n f_to_arctic=100*np.array(f_to_arctic)\n f_to_arctic_60=100*np.array(f_to_arctic_60)\n f_to_arctic_70=100*np.array(f_to_arctic_70)\n \n \n #Create figures\n plt.figure(figsize = (12,10)) \n gs1 = gridspec.GridSpec(2, 2)\n gs1.update(wspace=.25, hspace=.35)\n \n #SO F\n plt.subplot(gs1[0])\n plt.grid(linestyle='--', linewidth=1)\n plt.title('a) Transport to Southern Ocean', size=12, y=1.01)\n plt.ylabel('$F_{basin,southern}$ [%]', size=12)\n \n plt.plot(depths[0:-3], f_to_southern_ocean[0:-3,5], label = region_names[4], marker='o', linestyle=':', c='b', markersize=5)\n plt.fill_between(depths[0:-3], f_to_southern_ocean_62[:,5][0:-3], f_to_southern_ocean_56[:,5][0:-3] , color='b', alpha=0.2)\n yerr1 = f_to_southern_ocean[:,5][-3:]-f_to_southern_ocean_62[:,5][-3:]\n yerr2 = f_to_southern_ocean_56[:,5][-3:]-f_to_southern_ocean[:,5][-3:] \n plt.errorbar(depths[-3:], f_to_southern_ocean[:,5][-3:], yerr=[yerr1,yerr2], c='b', fmt='o', capsize=5, markersize=5)\n \n plt.plot(depths[0:-3], f_to_southern_ocean[:,4][0:-3], label = region_names[3], marker='o', linestyle=':', c='g', markersize=5)\n plt.fill_between(depths[0:-3], f_to_southern_ocean_62[:,4][0:-3], f_to_southern_ocean_56[:,4][0:-3] , color='g', alpha=0.2) \n yerr1 = f_to_southern_ocean[:,4][-3:]-f_to_southern_ocean_62[:,4][-3:]\n yerr2 = f_to_southern_ocean_56[:,4][-3:]-f_to_southern_ocean[:,4][-3:] \n plt.errorbar(depths[-3:], f_to_southern_ocean[:,4][-3:], yerr=[yerr1,yerr2], c='g', fmt='o', capsize=5, markersize=5)\n\n plt.plot(depths[0:-3], f_to_southern_ocean[:,3][0:-3], label = region_names[2], marker='o', linestyle=':', c='r', markersize=5)\n plt.fill_between(depths[0:-3], f_to_southern_ocean_62[:,3][0:-3], f_to_southern_ocean_56[:,3][0:-3] , color='r', alpha=0.2) \n yerr1 = f_to_southern_ocean[:,3][-3:]-f_to_southern_ocean_62[:,3][-3:]\n yerr2 = f_to_southern_ocean_56[:,3][-3:]-f_to_southern_ocean[:,3][-3:] \n plt.errorbar(depths[-3:], f_to_southern_ocean[:,3][-3:], yerr=[yerr1,yerr2], c='r', fmt='o', capsize=5, markersize=5)\n\n plt.grid(linestyle='--', linewidth=1)\n leg = plt.legend(prop={'size': 10})\n leg.set_title('Basin of origin', prop = {'size':12})\n plt.tick_params(axis='both', which='major', labelsize=10)\n plt.yticks(np.arange(0,10,2)) \n xlabels = np.arange(0,140,20)\n xlabels = np.append(xlabels, ['(ii)','(iii)', '(iv)'])\n plt.xticks(np.arange(0,200,20), xlabels)\n \n #F_SOSO\n plt.subplot(gs1[1])\n plt.title('b) Transport within Southern Ocean', size=12, y=1.01)\n plt.ylabel(r'$F_{southern,southern}$ [%]', size=12)\n plt.plot(depths[0:-3], f_to_southern_ocean[:,7][0:-3], marker='o', linestyle=':', c='k', markersize=5)\n plt.fill_between(depths[0:-3], f_to_southern_ocean_62[:,7][0:-3], f_to_southern_ocean_56[:,7][0:-3] , color='k', alpha=0.2)\n yerr1 = f_to_southern_ocean[:,7][-3:]-f_to_southern_ocean_62[:,7][-3:]\n yerr2 = f_to_southern_ocean_56[:,7][-3:]-f_to_southern_ocean[:,7][-3:] \n plt.errorbar(depths[-3:], f_to_southern_ocean[:,7][-3:], yerr=[yerr1,yerr2], c='k', fmt='o', capsize=5, markersize=5)\n plt.grid(linestyle='--', linewidth=1)\n plt.tick_params(axis='both', which='major', labelsize=10)\n plt.yticks(np.arange(0,90,10))\n plt.xticks(np.arange(0,200,20), xlabels)\n\n #Arctic F\n plt.subplot(gs1[2])\n plt.title(r'c) Transport to Arctic', size=12, y=1.01) \n plt.ylabel(r'$F_{basin,arctic}$ [%]', size=12)\n plt.plot(depths[0:-3], f_to_arctic[:,2][0:-3], label = region_names[1], marker='o', linestyle=':', c='g', markersize=5)\n plt.fill_between(depths[0:-3], f_to_arctic_60[:,2][0:-3], f_to_arctic_70[:,2][0:-3] , color='g', alpha=0.2) \n yerr1 = f_to_arctic_60[:,2][-3:]-f_to_arctic[:,2][-3:]\n yerr2 = f_to_arctic[:,2][-3:]-f_to_arctic_70[:,2][-3:] \n plt.errorbar(depths[-3:], f_to_arctic[:,2][-3:], yerr=[yerr1,yerr2], c='g', fmt='o', capsize=5, markersize=5)\n plt.plot(depths[0:-3], f_to_arctic[:,1][0:-3], label = region_names[0], marker='o', linestyle=':', c='r', markersize=5)\n plt.fill_between(depths[0:-3], f_to_arctic_60[:,1][0:-3], f_to_arctic_70[:,1][0:-3] , color='r', alpha=0.2)\n yerr1 = f_to_arctic_60[:,1][-3:]-f_to_arctic[:,1][-3:]\n yerr2 = f_to_arctic[:,1][-3:]-f_to_arctic_70[:,1][-3:]\n plt.errorbar(depths[-3:], f_to_arctic[:,1][-3:], yerr=[yerr1,yerr2], c='r', fmt='o', capsize=5, markersize=5)\n plt.grid(linestyle='--', linewidth=1)\n leg = plt.legend(prop={'size': 10})\n leg.set_title('Basin of origin', prop = {'size':12})\n plt.tick_params(axis='both', which='major', labelsize=10)\n plt.yticks(np.arange(0,16,2))\n plt.xticks(np.arange(0,200,20), xlabels)\n plt.xlabel('(i) fixed depth [m]', size=10)\n ax = plt.gca()\n ax.xaxis.set_label_coords(0.4, -0.1) \n\n\n #F_AA\n plt.subplot(gs1[3])\n plt.title('d) Transport within Arctic', size=12, y=1.01)\n plt.ylabel(r' $F_{arctic,arctic}$ [%]', size=12)\n plt.plot(depths[0:-3], f_to_arctic[:,6][0:-3], label = region_names[5], marker='o', linestyle=':', c='k', markersize=5)\n plt.fill_between(depths[0:-3], f_to_arctic_60[:,6][0:-3], f_to_arctic_70[:,6][0:-3] , color='k', alpha=0.2)\n yerr1 = f_to_arctic_60[:,6][-3:]-f_to_arctic[:,6][-3:]\n yerr2 = f_to_arctic[:,6][-3:]-f_to_arctic_70[:,6][-3:]\n plt.errorbar(depths[-3:], f_to_arctic[:,6][-3:], yerr=[yerr1,yerr2], c='k', fmt='o', capsize=5, markersize=5) \n plt.grid(linestyle='--', linewidth=1)\n plt.tick_params(axis='both', which='major', labelsize=10)\n plt.yticks(np.arange(40,90,10))\n plt.xticks(np.arange(0,200,20), xlabels)\n plt.xlabel('(i) fixed depth [m]', size=10)\n ax = plt.gca()\n ax.xaxis.set_label_coords(0.4, -0.1) \n\n\n plt.savefig(outdir_paper + 'F5_regional_transport_to_poles.pdf', dpi=900, bbox_inches='tight')\n \n polward_transport()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
outputs an slf file in beam format
|
def create_slf_file(self):
mesh = open(self.name, 'w')
mesh.write('numel numnp nmat nmode (This is for a beam bridge)\n')
mesh.write(str(len(self.edge_list))+'\t'+str(len(self.node_list))
+ '\t'+str(len(self.beams)) + '\t0\n')
mesh.write('matl no., E mod, Poiss. Ratio,density, Area, Iy, Iz\n')
tables = open('./tables/CHSTables.txt', 'r')
for i,beam in enumerate(self.beams):
mesh.write(str(i)+' '+str(self.beams[i]['emod'])+'\t0.3000\t'
+ str(self.beams[i]['density'])+'\t'+str(self.beams[i]['area'])
+ '\t'+str(self.beams[i]['iy'])+'\t'+str(self.beams[i]['ix']) + '\n')
mesh.write('el no.,connectivity, matl no, element type\n')
for i, edge in enumerate(self.edge_list):
mesh.write(str(i)+'\t'+str(edge['pt_a'])+'\t'+str(edge['pt_b'])
+ '\t'+str(edge['material'])+'\t2 \n')
mesh.write('node no., coordinates\n')
for node in self.node_list:
mesh.write(node['id']+'\t'+str(node['x'])+'\t'+str(node['y'])+'\t'+str(node['z'])+"\n")
mesh.write("element with specified local z axis: x, y, z component\n -10\n")
mesh.write('prescribed displacement x: node disp value\n')
for node in self.fixed_list:
# if node[1] == True: # un-comment when dealing with fixed-roller structures
mesh.write(node[0]['id']+"\t0.0\n")
mesh.write('-10\nprescribed displacement y: node disp value\n')
for node in self.fixed_list:
mesh.write(node[0]['id']+"\t0.0\n")
mesh.write('-10\nprescribed displacement z: node disp value\n')
for node in self.fixed_list:
mesh.write(node[0]['id']+"\t0.0\n")
mesh.write('-10\nprescribed angle phi x: node angle value\n')
for node in self.fixed_list:
# if node[1] == True: # un-comment when dealing with fixed-roller structures
mesh.write(node[0]['id']+"\t0.0\n")
mesh.write('-10\nprescribed angle phi y: node angle value\n')
for node in self.fixed_list:
mesh.write(node[0]['id']+"\t0.0\n")
mesh.write('-10\nprescribed angle phi z: node angle value\n')
for node in self.fixed_list:
mesh.write(node[0]['id']+"\t0.0\n")
mesh.write('-10\nnode with point load x, y, z and 3 moments phi x, phi y, phi z\n')
if self.BROKEN:
for node in self.nodeselfloads:
trans = 0
broken_long = 0
for thing in self.load_nodes:
if thing == node[0]:
node[1] = node[1] + self.vertical_cable_load
trans = self.transverse_cable_load
if self.GROUND_BROKEN:
for thing in self.ground_node:
if thing == node[0]:
node[1] = node[1] + self.vertical_ground_load_broken
trans = self.transverse_ground_load
broken_long = self.longitudinal_ground_load
for thing in self.break_node:
if thing == node[0]:
node[1] = node[1] + self.vertical_cable_load_broken
broken_long = self.longitudinal_cable_load
trans = self.transverse_cable_load
else:
for thing in self.ground_node:
if thing == node[0]:
node[1] = node[1] + self.vertical_ground_load
trans = self.transverse_ground_load
for thing in self.break_node:
if thing == node[0]:
node[1] = node[1] + self.vertical_cable_load_broken
broken_long = self.longitudinal_cable_load
trans = self.transverse_cable_load
mesh.write(str(node[0])+'\t'+str(broken_long)+'\t'+str(trans)+'\t-'+str(round(node[1],5))+'\t0\t0\t0\n')
else:
for node in self.nodeselfloads:
trans = 0
for yolk in self.load_nodes:
if yolk == node[0]:
node[1] = node[1] + self.vertical_cable_load
trans = self.transverse_cable_load
for thong in self.ground_node:
if thong == node[0]:
node[1] = node[1] + self.vertical_ground_load
trans = self.transverse_ground_load
mesh.write(str(node[0])+'\t0\t'+str(trans)+'\t-'+str(round(node[1],5))+'\t0\t0\t0\n')
mesh.write('-10\nelement with distributed load in global beam y and z coordinates\n')
mesh.write('-10\nelement no. and gauss pt. no. with local stress vector xx and moment xx,yy,zz\n-10')
mesh.close()
|
[
"def writeInputFile(beam,lattice,fname='test.in'):\n if sum(beam.multi_charge.n_particles) != beam.n_particles:\n #print('input error <- sum(beam.multi_charge.n_particles) not qual to beam.n_particles')\n if beam.multi_charge.n_states == 1:\n #print(' ... enforcing beam.multi_charge.n_particles[0] to beam.n_particles')\n beam.multi_charge.n_particles[0]=beam.n_particles\n else:\n raise ValueError('program terminating...')\n \n if beam.multi_charge.n_states == 1 and beam.multi_charge.current[0] != beam.current :\n #print('input error <- beam.multi_charge.current[0] not qual to beam.current')\n #print(' ... enforcing beam.multi_charge.current[0] to beam.current')\n beam.multi_charge.current[0] = beam.current\n \n beamStr = _beam2str(beam)\n for i in range(len(beamStr)):\n beamStr[i].append('\\n')\n beamStr[i] = \" \".join(beamStr[i])\n \n latticeStr = []\n for i in range(len(lattice)):\n latticeStr.append(_elem2str(lattice[i]))\n latticeStr[i].append('/')\n latticeStr[i].append('\\n')\n latticeStr[i] = \" \".join(latticeStr[i])\n \n f=open(fname,'w') \n f.writelines(['!================= Beam & Control Parameters ================= \\n'])\n f.writelines(beamStr)\n f.writelines(['!========================== Lattice ========================== \\n'])\n f.writelines(latticeStr)\n f.close()",
"def output_fuse_txt(f_nb, FLOORS_NB, ed, out, mw, adui, awg, afg, NAME):\n\n out_name = 'ToolOutput/' + NAME + '/' + NAME\\\n + '_Weight_unc_module.out'\n OutputTextFile = open(out_name, 'w')\n OutputTextFile.write('###############################################')\n OutputTextFile.write('\\n###### UNCONVENTIONAL AIRCRAFT ######')\n OutputTextFile.write('\\n##### WEIGHT ESTIMATION MODULE OUTPUTS #####')\n OutputTextFile.write('\\n###############################################')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nAircraft: ' + NAME )\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\n')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nAircraft Geometry Values used------------------')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nNumber of fuselages [-]: ' + str(f_nb))\n OutputTextFile.write('\\nFuselage Length [m]: '\\\n + str(afg.fuse_length))\n OutputTextFile.write('\\nFuselage mean Width [m]: '\\\n + str(afg.fuse_mean_width))\n OutputTextFile.write('\\nWing span [m]: '\\\n + str(round(max(awg.wing_span),3)))\n OutputTextFile.write('\\nTotal main wings plantform area [m^2]: '\\\n + str(awg.wing_plt_area_main))\n if FLOORS_NB > 1:\n OutputTextFile.write('\\nThe aircraft has: ' + str(FLOORS_NB)\\\n + 'floors')\n else:\n OutputTextFile.write('\\nThe aircraft has 1 floor')\n OutputTextFile.write('\\n')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nResults ---------------------------------------')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nSeating estimation ----------------------------')\n OutputTextFile.write('\\nNumber of passengers: ' + str(out.pass_nb))\n OutputTextFile.write('\\nNumber of toilet: ' + str(int(out.toilet_nb)))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nSuggested crew members ------------------------')\n OutputTextFile.write('\\nTotal crew members: ' + str(out.crew_nb))\n OutputTextFile.write('\\nNumber of cabin crew members: '\n + str(out.cabin_crew_nb))\n OutputTextFile.write('\\nNumber of pilots: ' + str(adui.PILOT_NB))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nEngine estimation -----------------------------')\n OutputTextFile.write('\\nNumber of engines: ' + str(ed.NE))\n OutputTextFile.write('\\nSingle engine mass [kg]: ' + str(int(ed.en_mass)))\n OutputTextFile.write('\\nSingle engine maximum take off thrust [kN]: '\n + str(int(round(ed.max_thrust,0))))\n OutputTextFile.write('\\nThrust specific fuel consumption in cruise'\\\n + ' [1/hr]: ' + str(ed.TSFC_CRUISE))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nMasses estimation -----------------------------')\n OutputTextFile.write('\\nSystems mass [kg]: '\\\n + str(int(round(mw.mass_systems))))\n OutputTextFile.write('\\nStructure mass [kg]: '\\\n + str(int(round(mw.mass_structure))))\n OutputTextFile.write('\\nEngines mass [kg]: '\\\n + str(int(round(mw.mass_engines))))\n OutputTextFile.write('\\nMaximum payload mass [kg]: '\\\n + str(int(round(mw.mass_payload))))\n OutputTextFile.write('\\nMaximum passengers mass [kg]: '\\\n + str(int(round(mw.mass_pass))))\n OutputTextFile.write('\\nMaximum fuel mass with max passengers [kg]: '\\\n + str(int(round(mw.mass_fuel_maxpass))))\n OutputTextFile.write('\\nMaximum fuel mass with no passengers [kg]: '\\\n + str(int(round(mw.mass_fuel_max))))\n OutputTextFile.write('\\nMaximum fuel volume with no passengers [l]: '\\\n + str(int(round(mw.mass_fuel_max/0.8,3))))\n OutputTextFile.write('\\nMaximum take off mass [kg]: '\\\n + str(int(round(mw.maximum_take_off_mass))))\n OutputTextFile.write('\\nOperating empty mass [kg]: '\\\n + str(int(round(mw.operating_empty_mass))))\n OutputTextFile.write('\\nZero fuel mass [kg]: '\\\n + str(int(round(mw.zero_fuel_mass))))\n OutputTextFile.write('\\nWing loading [kg/m^2]: '\\\n + str(int(round(out.wing_loading))))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\n-----------------------------------------------')\n ### Closing Text File\n OutputTextFile.close()\n\n return()",
"def writeIMPACT(filename,beam,lattice=[]):\n beamStrList=beam2str(beam) \n latticeStrList=lattice2str(lattice)\n \n \n f=open(filename,'w') \n f.writelines(beamStrList)\n f.writelines(latticeStrList)\n f.close()",
"def writeToFile(self, outf):\n\t\t#We want to end up with lines of no more than 8 words, where each word\n\t\t#is in the form 0x1234, separated by commas. Each line is separated by\n\t\t#a new line and a tab, and started by a dat code.\n\t\tinf = open(self.real_path, 'rb')\n\t\toutf.write(self.labels.start + \":\\n\\tdat \")\n\t\tword_count = 0 #How many words are on the current line\n\t\tword = inf.read(2) #Read 16 bits at a time\n\t\twhile word:\n\t\t\tword = byte_to_hex(word) #Convert each word to hex\n\t\t\tl = len(word) \n\t\t\tif l < 4: #Is each word 4 characters long?\n\t\t\t\tword += \"0\" * (4-l) #If not, pad it out with 0s\n\t\t\toutf.write(\"0x\"+word)\n\t\t\tword_count += 1 #There's one more word on the line\n\t\t\t\n\t\t\tword = inf.read(2) #Read 16 more bits\n\t\t\tif word: #If we read anything from the file\n\t\t\t\tif word_count >= 8: #If it's the end of the line, write a new line\n\t\t\t\t\toutf.write(\"\\n\\tdat \")\n\t\t\t\t\tword_count = 0\n\t\t\t\telse: #Else it's the middle of a line\n\t\t\t\t\toutf.write(\", \")\n\t\tinf.close()\n\t\toutf.write(\"\\n\"+self.labels.end + \":\\n\\n\")",
"def save_file(self, out_file=\"out_file.bias\"):\n fid = open(out_file, \"w\")\n fid.writelines(\"# BIAS PARAMETER FORMAT\\n\")\n fid.writelines(\"# fragment leng: 5 (mean, std, sum_fl, sum_fl^2, reads), line 5\\n\")\n fid.writelines(\"# position bias: 5*20*4 (name, b5, b3, u5, u3), line 6-105\\n\")\n fid.writelines(\"# sequence bias: 744*4 (name, b5, b3, u5, u3), line 106-849\\n\")\n fid.writelines(\"%.2f\\t%.2f\\t%.2e\\t%.2e\\t%.0f\\n\" %(self.flen_mean, self.flen_std,\n self.flen_sum1, self.flen_sum2, self.read_num))\n for i in range(self.pos5_bias.shape[0]):\n for j in range(self.pos5_bias.shape[1]):\n aLine = (\"%.0f-%.0f|%d\\t%.2e\\t%.2e\\t%.2e\\t%.2e\\n\"\n %(self.percentile[i,0], self.percentile[i,1], j, self.pos5_bias[i,j], \n self.pos3_bias[i,j], self.pos5_unif[i,j], self.pos3_unif[i,j]))\n fid.writelines(aLine)\n for i in sorted(self.base_chain.keys(), key=float):\n for j in range(len(self.base_chain[i])):\n aLine = (\"%s|%s\\t%.2e\\t%.2e\\t%.2e\\t%.2e\\n\"\n %(i, self.base_chain[i][j], self.seq5_bias[i][j], \n self.seq3_bias[i][j], self.seq5_unif[i][j], self.seq3_unif[i][j]))\n fid.writelines(aLine)\n fid.close()",
"def writeOutputFull( filename, perf ):\n\twith open(filename, 'w') as fout:\n\t\tfout.write(\"MATB\\n\")\n\t\twriteOutputSingle( fout, perf['matb'] )\n\t\t\n\t\tfout.write('\\n')\n\t\tfout.write('\\n')\n\t\tfout.write('\\n')\n\t\t\n\t\tfout.write(\"RanTask\\n\")\n\t\twriteOutputSingle( fout, perf['rantask'] )",
"def to_struct_file(self, f):\n if isinstance(f, str):\n f = open(f,'w')\n f.write(\"STRUCTURE {0}\\n\".format(self.name))\n f.write(\" NUGGET {0}\\n\".format(self.nugget))\n f.write(\" NUMVARIOGRAM {0}\\n\".format(len(self.variograms)))\n for v in self.variograms:\n f.write(\" VARIOGRAM {0} {1}\\n\".format(v.name,v.contribution))\n f.write(\" TRANSFORM {0}\\n\".format(self.transform))\n f.write(\"END STRUCTURE\\n\\n\")\n for v in self.variograms:\n v.to_struct_file(f)",
"def write_flatsurf_file(input_file_name, output_input_file_name, phase1, phase2, T, IFT, IFT_write_length, phase_types, max_depth):\n max_depth_str = \"\"\n if phase_types[0] == \"S\" or phase_types[1] == \"S\":# or phase_types[0] == \"G\" or phase_types[1] == \"G\":\n max_depth_str = \"maxdepth={} \".format(max_depth)\n \n with open(input_file_name+\".inp\", \"r\") as file: # Read the inital input file\n lines = file.readlines()\n # Create output_input_file_name.inp file and write all lines except the last from initial file and write new last line\n with open(output_input_file_name+\".inp\", \"w\") as output: \n output.writelines(lines[0])\n output.writelines(max_depth_str+\" \"+lines[1])\n output.writelines(lines[2:-1]) # All lines except the last\n # Last line \n (output.write(\"tk={0} FLATSURF xf1={{{1}}} xf2={{{2}}} IGNORE_CHARGE IFT={3:.{4}f} \\n\".\n format(T, \" \".join(map(str,phase1)), \" \".join(map(str,phase2)), IFT, IFT_write_length)))\n return",
"def __shxRecords(self):\r\n f = self.__getFileObj(self.shx)\r\n f.seek(100)\r\n for i in range(len(self._shapes)):\r\n f.write(pack(\">i\", self._offsets[i]/2))\r\n f.write(pack(\">i\", self._lengths[i]))",
"def saveAsLM(self, path):\n if not path.endswith(\".lm\"):\n path += \".lm\"\n f = open(path, 'w', encoding=self.enc)\n f_lab = open(path+\".lab\", 'w', encoding=self.enc)\n f.write(\"#SpeechMark Landmark File\\n\")\n f.write(\"#SMPRODUCT: TGProcess.py\\n\")\n f.write(\"#SMVERSION: 1\\n\")\n f.write(\"#LMVERSION: 2013-03-26\\n\")\n f.write(\"#WAVEFORM NAME: \"+self.waveformName+\"\\n\")\n f.write(\"#WAVEFORM CHECKSUM: \"+self.waveformChecksum+\"\\n\")\n f.write(\"#FILE CREATED:\"+strftime(\"%m/%d/%Y %H:%M:%S\")+\"\\n\")\n f.write(\"#--------------------------------------------------------------\\n\")\n f.write(\"#\\n\")\n #condense tiers into single list\n items = [(item.mark.replace(\" \",\"_\"), \"%.3f\" % float(item.time)) for tier in self.tiers for item in tier if type(item)==Point]\n items.sort(key=lambda item: item[1])\n last_time = \"0\"\n #write items to both files\n for item in items:\n f.write(item[1]+\" \"+item[0]+\"\\n\")\n f_lab.write(last_time + \" \" + item[1] + \" \" + item[0]+\"\\n\")\n last_time = item[1]",
"def generateSymsFiles(self):\n\n self.isymsFileHandle = open(self.isymsFile, 'w')\n self.isymsFileHandle.write(\"- 0\")\n\n # FSAs have only one field, hence 2 by default is added\n self.fieldFileDict = {2: self.isymsFileHandle}\n\n # FSTs have two fields\n if self.fstype == \"fst\":\n self.osymsFileHandle = open(self.osymsFile, 'w')\n self.osymsFileHandle.write(\"- 0\")\n self.fieldFileDict[3] = self.osymsFileHandle\n\n # Read the raw text file\n with open(self.fpath, 'r') as fsfiletxt:\n lines = fsfiletxt.readlines()\n lines = [line.strip().split(' ') for line in lines]\n\n for index in self.fieldFileDict:\n fh = self.fieldFileDict[index]\n field_count = 1\n for line in lines:\n try:\n field = line[index]\n except:\n pass\n else:\n fh.write(\"{} {}\".format(field_count + 1, field))\n fh.close()",
"def write(s,filename,header=\"Opacity file written by optool.particle.write\"):\n\n if (s.np>1):\n raise TypeError('Writing is not supported for multi-particle objects')\n try:\n wfile = open(filename, 'w')\n except:\n raise RuntimeError('Cannot write to file: '+filename)\n\n headerlines = header.splitlines()\n for i in range(len(headerlines)):\n wfile.write(\"# %s\\n\" % headerlines[i])\n if s.scat:\n wfile.write(' 0\\n')\n wfile.write(' %d\\n' % s.nlam)\n wfile.write(' %d\\n' % s.nang)\n wfile.write('\\n')\n else:\n wfile.write(' 3\\n')\n wfile.write(' %d\\n' % s.nlam)\n \n for i in range(s.nlam):\n # write the lambda grid and the opacities\n wfile.write(' %15.5e %15.5e %15.5e %15.5e\\n' % (s.lam[i],s.kabs[0,i],s.ksca[0,i],s.gsca[0,i]))\n \n if s.scat:\n # we have a scattering matrix\n wfile.write('\\n')\n # Write the angular grid\n for i in range(s.nang):\n wfile.write(\"%9.2f\\n\" % s.scatang[i])\n wfile.write('\\n')\n # Write the scattering matrix\n for il in range(s.nlam):\n for ia in range(s.nang):\n wfile.write(' %15.5e %15.5e %15.5e %15.5e %15.5e %15.5e\\n' %\n (s.f11[0,il,ia],s.f12[0,il,ia],s.f22[0,il,ia],\n s.f33[0,il,ia],s.f34[0,il,ia],s.f44[0,il,ia]))\n wfile.close()",
"def saveToStream( self, out, html = False ):\n\n headings = ( 'bin', 'binStart', 'binEnd', 'binGap', 'count', 'countNormed', 'countCumul', 'countNormedCumul',\n 'hist', 'histCumul' )\n \n binRecords = []\n if not sum( self.bin2count.values() ):\n # if there are no data points, at least record\n # the bin size.\n binRecords = [ [ 0, self.binShift, self.binShift + self.binSize, 0, 0, 0.0, 0, 1.0, '*', '*' ] ]\n assert len( binRecords[0] ) == len( headings )\n else: # if we have at least one non-empty bin\n numVals =self.getNumVals()\n countCumulative = 0\n prevBinStart = None\n maxBin = max( self.bin2count.values() )\n for bin, count in sorted( self.bin2count.items() ):\n if not count: continue\n binStart = self.binShift + bin * self.binSize\n countCumulative += count\n numStars = int( 100.0 * ( count / maxBin ) )\n binRecord = [ bin, binStart, binStart + self.binSize,\n ( binStart - prevBinStart ) if prevBinStart != None else 0,\n count, '%.2f' % ( count / numVals ), countCumulative,\n '%.2f' % ( countCumulative / numVals ), '*' * numStars,\n '*' * int( 100.0 * ( countCumulative / numVals ) ) ]\n assert len( binRecord ) == len( headings )\n binRecords.append( binRecord )\n prevBinStart = binStart\n \n\n if html:\n with htag('table', border = 1):\n with htag('thead'):\n with htag('tr'):\n for heading in headings:\n with htag('th'):\n print(heading)\n with htag('tbody'):\n for r in binRecords:\n with htag('tr'):\n for val in map( str, r ):\n with htag('td'):\n print(val)\n else: \n\n tabwrite( out, *headings )\n isFirst = True\n for r in binRecords:\n if not isFirst: out.write( '\\n' )\n tabwriten( out, *r )\n isFirst = False",
"def writeSurfaces(self, fname):\n file = open(fname, 'a')\n file.write('// --- Wing line loops and surfaces ---\\n')\n for i in range(0, self.n-1):\n file.write('// -- Planform {0:d}\\n'.format(i))\n for j in range(0, self.surN[i].shape[0]):\n file.write('Line Loop({0:d}) = {{{1:d},{2:d},{3:d},{4:d}}};\\n'.format(self.surN[i][j], self.linaN[i][j], self.linpN[i][np.mod(j+1,self.linpN[i].shape[0])], -self.linaN[i+1][j], -self.linpN[i][j]))\n for j in range(0, self.surN[i].shape[0]):\n file.write('Surface({0:d}) = {{-{0:d}}};\\n'.format(self.surN[i][j]))\n file.write('\\n')\n file.close()",
"def output_txt(IS_DOUBLE_FLOOR, out, mw, ind, ui, NAME):\n out_name = 'ToolOutput/' + NAME + '/' + NAME\\\n + '_Weight_module.out'\n OutputTextFile = open(out_name, 'w')\n OutputTextFile.write('\\n###############################################')\n OutputTextFile.write('\\n###### AIRCRAFT WEIGHT ESTIMATION MODULE ######')\n OutputTextFile.write('\\n##### OUTPUTS #####')\n OutputTextFile.write('\\n###############################################')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nAircraft: ' + NAME )\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\n')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nAircraft Geometry Evaluated -------------------')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nNose length [m]: '\\\n + str(round(ind.nose_length,3)))\n OutputTextFile.write('\\nTail length [m]: '\\\n + str(round(ind.tail_length,3)))\n OutputTextFile.write('\\nCabin length [m]: '\\\n + str(round(ind.cabin_length,3)))\n OutputTextFile.write('\\nCabin width [m]: '\\\n + str(round(ind.cabin_width,3)))\n OutputTextFile.write('\\nCabin Area [m^2]: '\\\n + str(round(ind.cabin_area,3)))\n if IS_DOUBLE_FLOOR == 1:\n OutputTextFile.write('\\nThe aircraft has a full 2nd floor')\n elif IS_DOUBLE_FLOOR == 2:\n OutputTextFile.write('\\nThe aircraft has a small 2nd floor')\n else:\n OutputTextFile.write('\\nThe aircraft has 1 floor')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nUser Input and Default Values -----------------')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nseat length [m]: ' + str(ind.seat_length))\n OutputTextFile.write('\\nseat width [m]: ' + str(ind.seat_width))\n OutputTextFile.write('\\naisle width [m]: ' + str(ind.aisle_width))\n if ui.MAX_PAYLOAD > 0:\n OutputTextFile.write('\\nMaximum payload allowed [kg]: '\\\n + str(ui.MAX_PAYLOAD))\n if ui.MAX_FUEL_VOL > 0:\n OutputTextFile.write('\\nMaximum amount of fuel [kg]: '\\\n + str(ui.MAX_FUEL_VOL*ui.FUEL_DENSITY))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nResults ---------------------------------------')\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nSeating estimation ----------------------------')\n OutputTextFile.write('\\nNumber of abreasts: ' + str(out.abreast_nb))\n OutputTextFile.write('\\nNumber of row: ' + str(out.row_nb))\n OutputTextFile.write('\\nNumber of passengers: ' + str(out.pass_nb))\n OutputTextFile.write('\\nNumber of lavatory: ' + str(int(out.toilet_nb)))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nCrew estimation -------------------------------')\n OutputTextFile.write('\\nTotal crew members: ' + str(out.crew_nb))\n OutputTextFile.write('\\nNumber of cabin crew members: '\n + str(out.cabin_crew_nb))\n OutputTextFile.write('\\nNumber of pilots: ' + str(out.PILOT_NB))\n OutputTextFile.write('\\n-----------------------------------------------')\n OutputTextFile.write('\\nMasses estimation -----------------------------')\n OutputTextFile.write('\\nMaximum payload mass [kg]: '\\\n + str(int(round(mw.mass_payload,0))))\n OutputTextFile.write('\\nMaximum passengers mass [kg]: '\\\n + str(int(round(out.pass_nb * ui.MASS_PASS,0))))\n if mw.mass_cargo:\n OutputTextFile.write('\\nMaximum extra payload mass [kg]: '\\\n + str(int(round(mw.cargo,0))))\n OutputTextFile.write('\\nMaximum fuel mass with max passengers [kg]: '\\\n + str(int(round(mw.mass_fuel_maxpass,0))))\n OutputTextFile.write('\\nMaximum fuel mass with no passengers [kg]: '\\\n + str(int(round(mw.mass_fuel_max,))))\n OutputTextFile.write('\\nMaximum fuel volume with no passengers [l]: '\\\n + str(int(round(\\\n mw.mass_fuel_max/ui.FUEL_DENSITY*1000,0))))\n OutputTextFile.write('\\nMaximum take off mass [kg]: '\\\n + str(int(round(mw.maximum_take_off_mass,0))))\n OutputTextFile.write('\\nOperating empty mass [kg]: '\\\n + str(int(round(mw.operating_empty_mass,0))))\n OutputTextFile.write('\\nZero fuel mass [kg]: '\\\n + str(int(round(mw.zero_fuel_mass,0))))\n OutputTextFile.write('\\nWing loading [kg/m^2]: '\\\n + str(int(round(out.wing_loading))))\n\n ### Closing Text File\n OutputTextFile.close()\n\n return()",
"def write_spc(self):\n\n # Do some checks on the sener and wener arrays that should be set.\n if len(self.sener) == 0:\n print(\"Error: sener array not initialized yet.\")\n sys.exit(1)\n\n if len(self.sener) != self.neg:\n print(\"Error: sener array has an incorrect size.\")\n sys.exit(1)\n\n if len(self.wener) == 0:\n print(\"Error: wener array not initialized yet.\")\n sys.exit(1)\n\n if len(self.wener) != self.neg:\n print(\"Error: wener array has an incorrect size.\")\n sys.exit(1)\n\n # Open the output file\n try:\n f = open(self.fspc, 'w')\n except IOError:\n print(\"Error: unable to open output file.\")\n sys.exit(1)\n\n # Write the number of model bins to the output file\n f.write(str(self.neg) + '\\n')\n\n # Write the sener and wener columns to the output file\n for i in numpy.arange(self.neg):\n f.write(str(self.sener[i]) + ' ' + str(self.wener[i]) + '\\n')\n\n # Close the file\n f.close()\n return",
"def write_spice(self, cell, f=None):\n spice.Writer(cell).write()",
"def init_axsf(filename, niter, xsf) :\n axsf_file = open(filename, 'w')\n axsf_file.write('ANIMSTEPS ' + str(niter) + '\\n')\n axsf_file.write('CRYSTAL\\n')\n axsf_file.write('PRIMVEC\\n')\n for row in range(xsf.lat_vec.shape[0]) :\n axsf_file.write(str(xsf.lat_vec[row, 0]) + ' ' +\n str(xsf.lat_vec[row, 1]) + ' ' +\n str(xsf.lat_vec[row, 2]) + '\\n')\n return axsf_file",
"def dump_model_to_file(model):\n anchor_num = 100\n fields_num = 7\n table_name = ['dump_model/deep_fm_' + str(i) for i in range(anchor_num)]\n\n for i in range(anchor_num):\n print('dumping model {}'.format(i))\n with open(table_name[i], 'w') as f:\n # anchor point\n value = model.get('anchor_points')[i]\n for v in value.tolist():\n f.write(str(v) + ' ')\n f.write('\\n')\n\n # fm_bias\n value = model.get('bias').tolist()[i][0]\n f.write(str(value) + '\\n')\n\n # fm_embedding\n for j in range(fields_num):\n value = model.get('fm_second_order_embeddings.{}.{}.weight'.format(i, j))\n row, col = value.shape\n value = value.tolist()\n for m in range(row):\n for n in range(col):\n f.write(str(value[m][n]) + ' ')\n f.write('\\n')\n\n # input layer weight\n value = model.get('linear_1.{}.weight'.format(i))\n row, col = value.shape\n value = value.tolist()\n for m in range(row):\n for n in range(col):\n f.write(str(value[m][n]) + ' ')\n\n f.write('\\n')\n\n # input layer bias\n value = model.get('linear_1.{}.bias'.format(i))\n value = value.tolist()\n for j in range(len(value)):\n f.write(str(value[j]) + ' ')\n\n f.write('\\n')\n\n # hidden layer weight\n value = model.get('linear_2.{}.weight'.format(i))\n row, col = value.shape\n value = value.tolist()\n for m in range(row):\n for n in range(col):\n f.write(str(value[m][n]) + ' ')\n\n f.write('\\n')\n\n # hidden layer bias\n value = model.get('linear_2.{}.bias'.format(i))\n value = value.tolist()\n for j in range(len(value)):\n f.write(str(value[j]) + ' ')"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
generate and show mesh stresses using bmpost
|
def show_mesh(self):
self.create_graph()
self.assign_load_case()
# self.assign_wind_loads()
self.apply_stresses()
self.create_slf_file()
self.test_slf_file()
self.parse_results()
self.show_analysis()
|
[
"def exchange_bmesh_data(msh_active,msh_paste):\r\n import bmesh\r\n bm = bmesh.new()\r\n bm.from_mesh(msh_paste)\r\n bm.to_mesh(msh_active)\r\n msh_active.update()",
"def _generate_mesh(self):\n self._mesh_points = self._make_pos()",
"def create_mesh_data(self):\n\n # if len(self.physical_surfaces) > 1:\n # self.geom.boolean_union(self.physical_surfaces)\n\n self.__physical_surfaces__()\n\n directory = os.getcwd() + '/debug/gmsh/'\n\n mesh_file = '{}{}.msh'.format(directory, self.filename)\n geo_file = '{}{}.geo'.format(directory, self.filename)\n vtk_file = '{}{}.vtu'.format(directory, self.filename)\n\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n mesh_data = pygmsh.generate_mesh(\n self.geom, verbose=False, dim=2,\n prune_vertices=False,\n remove_faces=False,\n geo_filename=geo_file\n )\n\n # meshio.write(mesh_file, mesh_data)\n # meshio.write(vtk_file, mesh_data)\n\n return mesh_data",
"def edit_singularity_mesh(pattern):",
"def generate_stl(idx=None): \n run_cmd(\"vsp -script scripts/exportstl.vscript\")\n offset_zaxis(15.0)\n\n if idx == None:\n planename = \"planes/plane.png\"\n else:\n planename = \"planes/plane\"+str(idx)+\".png\"\n\n run_cmd(\"openscad scripts/genpng.scad --imgsize=500,500 -o \"+planename)",
"def trimesh_to_binary(mesh, shape):\n # Where to sample\n z_locs = np.arange(shape[2])+.5 \n # Take sections\n sections = [_take_2d_slice(mesh, z) for z in z_locs]\n # Convert to a binary z-stack \n z_stack = [_draw_Path2D(path, shape[:2]) for path in sections]\n z_stack = np.swapaxes(np.dstack(z_stack), 0, 1)\n return z_stack",
"def createBMesh(self):\n self.bm = bmesh.new()\n self.bm.from_mesh(self.mesh)\n bmesh.ops.triangulate(self.bm, faces=self.bm.faces)",
"def reconstruct_mesh(self):\n\n # NOTE: Before drawing the skeleton, create the materials once and for all to improve the\n # performance since this is way better than creating a new material per section or segment\n nmv.builders.create_skeleton_materials(builder=self)\n\n # Verify and repair the morphology, if required\n result, stats = nmv.utilities.profile_function(self.verify_morphology_skeleton)\n self.profiling_statistics += stats\n\n # Apply skeleton - based operation, if required, to slightly modify the skeleton\n result, stats = nmv.utilities.profile_function(\n nmv.builders.modify_morphology_skeleton, self)\n self.profiling_statistics += stats\n\n # Build the soma, with the default parameters\n result, stats = nmv.utilities.profile_function(nmv.builders.reconstruct_soma_mesh, self)\n self.profiling_statistics += stats\n\n # Build the arbors and connect them to the soma\n if self.options.mesh.soma_connection == nmv.enums.Meshing.SomaConnection.CONNECTED:\n\n # Build the arbors\n result, stats = nmv.utilities.profile_function(self.build_arbors, True)\n self.profiling_statistics += stats\n\n # Connect to the soma\n result, stats = nmv.utilities.profile_function(\n nmv.builders.connect_arbors_to_soma, self)\n self.profiling_statistics += stats\n\n # Build the arbors only without any connection to the soma\n else:\n # Build the arbors\n result, stats = nmv.utilities.profile_function(self.build_arbors, False)\n self.profiling_statistics += stats\n\n # Tessellation\n result, stats = nmv.utilities.profile_function(nmv.builders.decimate_neuron_mesh, self)\n self.profiling_statistics += stats\n\n # Surface roughness\n result, stats = nmv.utilities.profile_function(\n nmv.builders.add_surface_noise_to_arbor, self)\n self.profiling_statistics += stats\n\n # Add the spines\n result, stats = nmv.utilities.profile_function(nmv.builders.add_spines_to_surface, self)\n self.profiling_statistics += stats\n\n # Join all the objects into a single object\n result, stats = nmv.utilities.profile_function(\n nmv.builders.join_mesh_object_into_single_object, self)\n self.profiling_statistics += stats\n\n # Transform to the global coordinates, if required\n result, stats = nmv.utilities.profile_function(\n nmv.builders.transform_to_global_coordinates, self)\n self.profiling_statistics += stats\n\n # Collect the stats. of the mesh\n result, stats = nmv.utilities.profile_function(nmv.builders.collect_mesh_stats, self)\n self.profiling_statistics += stats\n\n # Done\n nmv.logger.header('Mesh Reconstruction Done!')\n nmv.logger.log(self.profiling_statistics)\n\n # Write the stats to file\n nmv.builders.write_statistics_to_file(builder=self, tag='skinning')",
"def __meshing__(self, unit_list):\n self.mesh_grid = []\n for key, zone in unit_list.items():\n counter = 0\n while counter < zone.L:\n self.mesh_grid.append(key)\n counter += 1",
"def copyBmesh(src, sca, rot, loc): \r\n options = bpy.types.Scene.dmh\r\n source = src.copy()\r\n start_index = len(options.LIST_VERT)\r\n \r\n # transform\r\n source.transform(matrix=sca)\r\n m = loc * rot\r\n source.transform(matrix=m)\r\n\r\n # copying data\r\n for v in source.verts:\r\n options.LIST_VERT.append((v.co.x,v.co.y,v.co.z))\r\n for f in source.faces:\r\n if len(f.verts) == 3:\r\n options.LIST_FACE.append([f.verts[0].index+start_index,f.verts[1].index+start_index,f.verts[2].index+start_index])\r\n elif len(f.verts) == 4:\r\n options.LIST_FACE.append([f.verts[0].index+start_index,f.verts[1].index+start_index,f.verts[2].index+start_index,f.verts[3].index+start_index])",
"def display_mesh_info(self):\n\n print(\"Mesh Statistics:\")\n print(\"--{0} nodes\".format(self.num_nodes))\n print(\"--{0} elements\".format(len(self.elements)))\n\n regions = max(self.mesh_attributes) + 1\n text = \"--{0} region\".format(regions)\n\n if regions == 1:\n text += \"\\n\"\n else:\n text += \"s\\n\"\n\n print(text)",
"def create_slf_file(self):\n mesh = open(self.name, 'w') \n mesh.write('numel numnp nmat nmode (This is for a beam bridge)\\n')\n mesh.write(str(len(self.edge_list))+'\\t'+str(len(self.node_list))\n + '\\t'+str(len(self.beams)) + '\\t0\\n')\n mesh.write('matl no., E mod, Poiss. Ratio,density, Area, Iy, Iz\\n')\n tables = open('./tables/CHSTables.txt', 'r')\n for i,beam in enumerate(self.beams):\n mesh.write(str(i)+' '+str(self.beams[i]['emod'])+'\\t0.3000\\t'\n + str(self.beams[i]['density'])+'\\t'+str(self.beams[i]['area'])\n + '\\t'+str(self.beams[i]['iy'])+'\\t'+str(self.beams[i]['ix']) + '\\n') \n mesh.write('el no.,connectivity, matl no, element type\\n')\n for i, edge in enumerate(self.edge_list): \n mesh.write(str(i)+'\\t'+str(edge['pt_a'])+'\\t'+str(edge['pt_b'])\n + '\\t'+str(edge['material'])+'\\t2 \\n')\n mesh.write('node no., coordinates\\n')\n for node in self.node_list:\n mesh.write(node['id']+'\\t'+str(node['x'])+'\\t'+str(node['y'])+'\\t'+str(node['z'])+\"\\n\")\n mesh.write(\"element with specified local z axis: x, y, z component\\n -10\\n\")\n mesh.write('prescribed displacement x: node disp value\\n')\n for node in self.fixed_list:\n# if node[1] == True: # un-comment when dealing with fixed-roller structures\n mesh.write(node[0]['id']+\"\\t0.0\\n\")\n mesh.write('-10\\nprescribed displacement y: node disp value\\n')\n for node in self.fixed_list:\n mesh.write(node[0]['id']+\"\\t0.0\\n\")\n mesh.write('-10\\nprescribed displacement z: node disp value\\n')\n for node in self.fixed_list:\n mesh.write(node[0]['id']+\"\\t0.0\\n\")\n mesh.write('-10\\nprescribed angle phi x: node angle value\\n')\n for node in self.fixed_list:\n# if node[1] == True: # un-comment when dealing with fixed-roller structures\n mesh.write(node[0]['id']+\"\\t0.0\\n\")\n mesh.write('-10\\nprescribed angle phi y: node angle value\\n')\n for node in self.fixed_list:\n mesh.write(node[0]['id']+\"\\t0.0\\n\")\n mesh.write('-10\\nprescribed angle phi z: node angle value\\n')\n for node in self.fixed_list:\n mesh.write(node[0]['id']+\"\\t0.0\\n\")\n mesh.write('-10\\nnode with point load x, y, z and 3 moments phi x, phi y, phi z\\n') \n if self.BROKEN:\n for node in self.nodeselfloads: \n trans = 0\n broken_long = 0\n for thing in self.load_nodes:\n if thing == node[0]:\n node[1] = node[1] + self.vertical_cable_load\n trans = self.transverse_cable_load \n if self.GROUND_BROKEN:\n for thing in self.ground_node:\n if thing == node[0]:\n node[1] = node[1] + self.vertical_ground_load_broken\n trans = self.transverse_ground_load\n broken_long = self.longitudinal_ground_load\n for thing in self.break_node:\n if thing == node[0]:\n node[1] = node[1] + self.vertical_cable_load_broken\n broken_long = self.longitudinal_cable_load\n trans = self.transverse_cable_load\n else:\n for thing in self.ground_node:\n if thing == node[0]:\n node[1] = node[1] + self.vertical_ground_load\n trans = self.transverse_ground_load\n for thing in self.break_node:\n if thing == node[0]:\n node[1] = node[1] + self.vertical_cable_load_broken\n broken_long = self.longitudinal_cable_load \n trans = self.transverse_cable_load\n mesh.write(str(node[0])+'\\t'+str(broken_long)+'\\t'+str(trans)+'\\t-'+str(round(node[1],5))+'\\t0\\t0\\t0\\n')\n else:\n for node in self.nodeselfloads: \n trans = 0\n for yolk in self.load_nodes:\n if yolk == node[0]:\n node[1] = node[1] + self.vertical_cable_load\n trans = self.transverse_cable_load\n for thong in self.ground_node:\n if thong == node[0]:\n node[1] = node[1] + self.vertical_ground_load\n trans = self.transverse_ground_load\n mesh.write(str(node[0])+'\\t0\\t'+str(trans)+'\\t-'+str(round(node[1],5))+'\\t0\\t0\\t0\\n')\n mesh.write('-10\\nelement with distributed load in global beam y and z coordinates\\n') \n mesh.write('-10\\nelement no. and gauss pt. no. with local stress vector xx and moment xx,yy,zz\\n-10')\n mesh.close()",
"def create_coord_map(obj):\n mesh = obj.data\n vert_list = mesh.vertices\n \n # vcos = [obj.matrix_world @ v.co for v in vert_list]\n \n # x, y, z = [[v[i] for v in vcos] for i in range(3)]\n # min_x, min_y, min_z = min(x), min(y), min(z)\n # max_x, max_y, max_z = max(x), max(y), max(z)\n # size_x, size_y, size_z = max(x) - min(x), max(y) - min(y), max(z) - min(z)\n \n # get the color map to create as coordinate map\n if mesh.vertex_colors:\n color_map = mesh.vertex_colors.active\n else:\n color_map = mesh.vertex_colors.new()\n\n\n # print(\"MINIMUMS\", min_x, min_y, min_z)\n # print(\"MAXIMUMS\", max_x, max_y, max_z)\n # print(\"SIZES\", size_x, size_y, size_z)\n \n max_r, max_g, max_b = 0, 0, 0\n\n allrgbs = []\n\n # apply the corresponding color to each vertex\n i = 0\n for poly in mesh.polygons:\n for idx in poly.loop_indices: #vertices\n loop = mesh.loops[idx]\n v = vert_list[loop.vertex_index]\n \n r = -v.co.y\n g = v.co.z # NOCS uses y up world\n b = -v.co.x\n \n # r = v.co.x\n # g = v.co.z # NOCS uses y up world\n # b = v.co.y\n color_map.data[i].color = (r,g,b,0) # rgba\n i += 1\n \n #print(\"Scales:\", 2*np.abs(max_r), 2*np.abs(max_g), 2*np.abs(max_b))\n #print(\"Scales:\", max_r - (1-max_r), max_g - (1-max_g), max_b - (1-max_b))\n mat = bpy.data.materials.new('nocs_material')\n \n # deactivate shadows\n mat.shadow_method = 'NONE'\n \n # set to vertex paint mode to see the result\n #bpy.ops.object.mode_set(mode='VERTEX_PAINT')\n \n obj.data.materials.clear()\n\n if mesh.materials:\n print(\"first material will be nocs: bad i think\")\n mesh.materials[0] = mat\n else:\n print(\"add material: good i think\")\n mesh.materials.append(mat)",
"def _unfiltered_background_mesh(self):\n self._bkg_stats = self.bkg_estimator(self._box_data, axis=1)\n return self._make_mesh_image(self._bkg_stats)",
"def execute(self, context):\n \n settings = data.settings('terrain')\n self.detail = settings.heightmap_detail\n \n \n print('\\nMirage: Making Terrain from Heightmap' + ('-'*70)) \n print('Adding Grid')\n \n bpy.ops.mesh.primitive_grid_add(\n x_subdivisions = self.detail,\n y_subdivisions = self.detail,\n radius = math.sqrt(settings.size) / 2\n )\n \n\n texture = bpy.data.textures['Heightmap Texture'] \n image_name = bpy.path.basename(settings.heightmap_filepath)\n image = bpy.data.images[image_name]\n texture.image = image\n\n obj = context.object\n\n print('UV Unwraping')\n # This is the kind of thing that makes me hate context\n bpy.ops.object.mode_set(mode='EDIT')\n bpy.ops.uv.unwrap()\n bpy.ops.object.mode_set(mode='OBJECT') \n\n # Set up terrain and modifier\n obj.name = 'Terrain'\n displace = obj.modifiers.new(name='Heightmap',type='DISPLACE')\n displace.texture = texture\n displace.direction = 'Z'\n displace.texture_coords = 'UV'\n displace.strength = settings.heightmap_strength \n\n # Shade smooth\n obj.select = True\n context.scene.objects.active = obj\n bpy.ops.object.shade_smooth()\n\n # Making Vgroups here:\n # 1. Duplicate\n # 2. Apply the modifier\n # 3. Build some vgroups from that\n # 4. Delete temp duplicate\n \n \n print('Generating Vertex group')\n bm = vgroups.get_bmesh_from_heightmap(obj, context)\n \n vgroups.generate_height(obj, bm)\n vgroups.generate_slope(obj, bm)\n vgroups.convert_to_color_group(obj, 'Height')\n vgroups.convert_to_color_group(obj, 'Slope')\n \n bpy.ops.object.delete()\n obj.select = False\n\n return {\"FINISHED\"}",
"def testrun_random_surface(filename=\"IGESFile_random_surface.igs\"):\n system = IGEStorage()\n system.StartSection.Prolog = \" \"\n system.GlobalSection.IntegerBits = int(32)\n system.GlobalSection.SPMagnitude = int(38)\n system.GlobalSection.SPSignificance = int(6)\n system.GlobalSection.DPMagnitude = int(38)\n system.GlobalSection.DPSignificance = int(15)\n system.GlobalSection.MaxNumberLineWeightGrads = int(8)\n system.GlobalSection.WidthMaxLineWeightUnits = float(0.016)\n system.GlobalSection.MaxCoordValue = float(71)\n\n scale = 2.5\n # data = numpy.zeros(shape=(3,5), dtype=int)\n data = numpy.random.rand(100, 100)\n # print(\"random data generated:\", data.shape)\n\n for line_nr in range(0, data.shape[0]-1):\n # print(\"line number:\", line_nr)\n for row_nr in range(0, data.shape[1]-1):\n x1 = line_nr * scale\n x2 = (line_nr + 1) * scale\n y1 = row_nr * scale\n y2 = (row_nr + 1) * scale\n p1 = IGESPoint(x1, y1, float(data[line_nr][row_nr]))\n p2 = IGESPoint(x2, y1, float(data[line_nr+1][row_nr]))\n p3 = IGESPoint(x1, y2, float(data[line_nr][row_nr+1]))\n p4 = IGESPoint(x2, y2, float(data[line_nr+1][row_nr+1]))\n square = IGES.IGESRationalBSplineSurface(p1, p2, p3, p4)\n system.Commit(square)\n\n # print(\"save file:\", line_nr)\n system.save(filename)",
"def generate_mesh(self):\n length = self.length\n Nx = self.Nx\n Nz = self.Nz\n self.mesh = RectangleMesh(Point(0,0), Point(length, 1), Nx, Nz, \"left/right\")\n\n # Now deform top and bottom based on surface and base profiles\n coordinates = self.mesh.coordinates()\n surf = self.surf_fun(coordinates[:,0])\n bot = self.bot_fun(coordinates[:,0])\n thick = surf-bot\n coordinates[:,1] = coordinates[:,1]*thick + bot\n self.mesh.bounding_box_tree().build(self.mesh)",
"def show_mesh(self, show=True, visu2d=True):\n import matplotlib.pyplot as plt\n import matplotlib.cm as cm\n from mpl_toolkits.mplot3d import Axes3D\n coordx = self.get_mesh_coord(1)\n coordy = self.get_mesh_coord(2)\n tri = self.get_mesh_connectivity()\n bottom = self.get_data_value('WATER DEPTH', 0)\n fig = plt.figure()\n if visu2d:\n plt.tripcolor(coordx, coordy, tri, bottom,\n shading='flat', edgecolor='w', cmap=cm.terrain)\n plt.colorbar()\n else:\n axe = Axes3D(fig)\n axe.plot_trisurf(coordx, coordy, tri, bottom,\n cmap=cm.terrain, linewidth=0.1)\n plt.title('2D mesh (%d triangles, %d nodes) \\\n with the bottom elevation (m)' % (self.get_mesh_nelem(),\n self.get_mesh_npoin()))\n plt.xlabel('X-coordinate (m)')\n plt.ylabel('Y-coordinate (m)')\n if show:\n plt.show()\n return fig",
"def main(pixels):\n print(f\"Generating Mandelbrot Set with {pixels} pixels...\")\n\n img = mandelbrot(int(pixels), max_steps=100)\n plt.imshow(img, cmap=\"plasma\")\n plt.axis(\"off\")\n plt.savefig(\"mandelbrot.png\")\n plt.show()\n\n print(\"Mandelbrot set successfully generated!\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return a list of unique fact values
|
def unique_factvalues(raw_facts):
factvalues = set([])
for fact in raw_facts:
factvalues.add(fact.value)
return factvalues
|
[
"def get_unique_items():\r\n sheet_data = read_sheet_data(config.get(\"sheet1_title_range\"))\r\n return set(chain.from_iterable(sheet_data[\"values\"]))",
"def get_unique_elements(self, field: str) -> list:\n return self.properties.distinct(field)",
"def get_unique_values_serie(s):\r\n\r\n if s.dtype.name == \"category\":\r\n return s.cat.categories\r\n return np.unique(s.values)",
"def get_unique_sports():\n\n return set(TABLE_BETS['sport'].unique())",
"def get_values(data, attribute):\n return data[attribute].unique()",
"def unique_column_values(rows, column_name):\n u=[]\n for item in rows:\n u.append(item[column_name])\n ucv = set(u)\n return ucv",
"def list_of_values(self,feature):\n values=[]\n for ex in self.data:\n if ex.features[feature] not in values:\n values+=[ex.features[feature]]\n return values",
"def distinct_values(self, columns):\n data = [tuple(r) for r in self[:, columns].array.tolist()]\n result = set(data)\n result = {d[0] if len(d) == 1 else d for d in result}\n return result",
"def get_unique_countries():\n\n return set(TABLE_BETS['country'].unique())",
"def unique_set(data: List[str]) -> List[str]:\n # TODO: Add the source code for method f7",
"def get_unique_values(self, colname):\n return self.get_cached_value('uniques', colname,\n self.calc_unique_values)",
"def unique_frequencies(self):\n return iterkeys(self.frequency_groups)",
"def distinct_prime_factors(dis):\n answer = []\n n = 2\n\n while len(answer) < dis:\n pfs = exponents(prime_factors(n))\n if len(pfs) == dis == len(set(pfs)):\n answer.append((n, pfs))\n else:\n answer = []\n n += 1\n return answer",
"def unique_scenarios():\n return pd.read_csv(io[\"scenario_geco_path\"]).Scenario.unique()",
"def unique(kernels):\n r, s = list(), set()\n for kernel in kernels:\n if isinstance(kernel.length, list):\n key = tuple(kernel.length) + (kernel.scheme,)\n else:\n key = (kernel.length, kernel.scheme)\n if key not in s:\n s.add(key)\n r.append(kernel)\n return r",
"def uniq(clues):\n results = []\n\n get_diff = lambda c: c.diff\n classified = classify(clues, get_digest, get_diff)\n\n for section in sections(classified):\n results.append(merge(section))\n\n return results",
"def collect_reducer_set(values):\n return sorted(list(set(values)))",
"def factor_singles(self):\n factor_indexes = dict()\n for fact in self.meta_data[self.drop_column].unique().tolist():\n factor_indexes[fact] = self.meta_data[self.drop_column] == fact\n return factor_indexes",
"def get_unique_sums(target, count):\n sums = get_sums(target, count)\n unique_sums = set()\n for sum in sums:\n unique_sums.add(tuple(sorted(sum)))\n return list(unique_sums)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Covert the facts generator from pypuppet into a simple dict().
|
def facts2dict(raw_facts):
facts = {}
for fact in raw_facts:
facts[fact.name] = fact.value
return facts
|
[
"def get_facts(self):\n output = self.device.facts\n\n uptime = self.device.uptime or -1\n\n interfaces = junos_views.junos_iface_table(self.device)\n interfaces.get()\n interface_list = interfaces.keys()\n\n return {\n \"vendor\": \"Juniper\",\n \"model\": str(output[\"model\"]),\n \"serial_number\": str(output[\"serialnumber\"]),\n \"os_version\": str(output[\"version\"]),\n \"hostname\": str(output[\"hostname\"]),\n \"fqdn\": str(output[\"fqdn\"]),\n \"uptime\": float(uptime),\n \"interface_list\": interface_list,\n }",
"def get_extended_facts(self):\n extended_facts = {\n \"connection\": {\n \"hostname\": self.hostname, # hostname used to connect to the device\n \"username\": self.username,\n \"transport\": self.transport,\n \"port\": self.netmiko_optional_args.get('port', None),\n \"profile\": self.profile,\n \"device_type\": self.device.device_type\n }\n }\n\n extended_facts['dns'] = self.get_dns()\n extended_facts['iphelpers'] = self.get_iphelpers()\n extended_facts['logging'] = self.get_logging()\n extended_facts['tacacs'] = self.get_tacacs()\n extended_facts['aaa'] = self.get_aaa()\n extended_facts['ssh'] = self.get_ssh()\n extended_facts['vty'] = self.get_vty()\n\n return extended_facts",
"def host_facts_load(self):\n facts = {}\n factfiles = self.host_fact_files()\n for f in factfiles:\n facts[f[0]] = self.json_load(f[1])\n return facts",
"def construct_ansible_facts(response, ansible_params, paramgram, *args, **kwargs):\n\n facts = {\n \"response\": response,\n \"ansible_params\": scrub_dict(ansible_params),\n \"paramgram\": scrub_dict(paramgram),\n }\n\n if args:\n facts[\"custom_args\"] = args\n if kwargs:\n facts.update(kwargs)\n\n return facts",
"def populate_facts(self, connection, ansible_facts, data=None):\n facts = {}\n objs = []\n\n if not data:\n data = self.get_config(connection)\n\n # remove address_family configs from bgp_global\n bgp_global_config = []\n start = False\n self._af = False\n for bgp_line in data.splitlines():\n if not start:\n bgp_global_config.append(bgp_line)\n if \"address-family\" in bgp_line:\n start = True\n self._af = True\n if start and \"!\" in bgp_line:\n start = False\n\n # parse native config using the Bgp_global template\n bgp_global_parser = Bgp_globalTemplate(lines=bgp_global_config)\n objs = bgp_global_parser.parse()\n\n if objs:\n global_vals = objs.get(\"vrfs\", {}).pop(\"vrf_\", {})\n for key, value in iteritems(global_vals):\n objs[key] = value\n\n if \"vrfs\" in objs:\n objs[\"vrfs\"] = list(objs[\"vrfs\"].values())\n for vrf in objs[\"vrfs\"]:\n if \"neighbor\" in vrf:\n vrf[\"neighbor\"] = list(vrf[\"neighbor\"].values())\n if \"network\" in vrf:\n vrf[\"network\"] = list(vrf[\"network\"].values())\n vrf[\"network\"] = sorted(\n vrf[\"network\"], key=lambda k: k[\"address\"]\n )\n if \"aggregate_address\" in vrf:\n vrf[\"aggregate_address\"] = sorted(\n vrf[\"aggregate_address\"],\n key=lambda k: k[\"address\"],\n )\n\n if \"neighbor\" in objs:\n objs[\"neighbor\"] = list(objs[\"neighbor\"].values())\n\n if \"network\" in objs:\n objs[\"network\"] = list(objs[\"network\"].values())\n objs[\"network\"] = sorted(\n objs[\"network\"], key=lambda k: k[\"address\"]\n )\n if \"aggregate_address\" in objs:\n objs[\"aggregate_address\"] = sorted(\n objs[\"aggregate_address\"], key=lambda k: k[\"address\"]\n )\n\n ansible_facts[\"ansible_network_resources\"].pop(\"bgp_global\", None)\n\n params = utils.remove_empties(\n utils.validate_config(self.argument_spec, {\"config\": objs})\n )\n\n facts[\"bgp_global\"] = params.get(\"config\", [])\n ansible_facts[\"ansible_network_resources\"].update(facts)\n\n return ansible_facts",
"def get_facts(self) -> dict:\n # FIXME - add better error handling here\n self.xapi.op(cmd='<show><system><info></info></system></show>')\n\n if self.xapi.status != 'success':\n print('We have a problem!')\n raise LoaderException('Could not get facts from device!')\n\n results_xml_str = self.xapi.xml_result()\n results = xmltodict.parse(results_xml_str)\n if 'system' in results:\n return results['system']",
"def metadata(self) -> dict[str, Any]:",
"def _create_descriptions(self, yaml_list):\n description_dict = {}\n for entry in yaml_list:\n num = entry.get('port')\n description = entry.get('description')\n if num is not None and description is not None:\n description_dict[num] = description\n return description_dict",
"def get_feed_dict(self):\n return {}",
"def parseFactsFromDict(self):\n\n # get known hosts\n self.parseHacl()\n\n super(FactGraph, self).__init__(self.data)\n self.__updateFG()",
"def __create_info_dict(self):\n d = ['mtype', 'stype', 'sval']\n keys = ['_'.join(i) for n in range(5) for i in itertools.permutations(d, n) if not len(i) == 0]\n out = {i: {} for i in keys}\n return out",
"def asdict(self):",
"def shodan_meta_extraction(raw: Union[dict, list]) -> dict:\n g = {}\n if isinstance(raw, dict):\n o = raw\n elif isinstance(raw, list):\n o = raw[0]\n else:\n raise TypeError(\"Given parameter 'raw' must be either a list or a dict.\")\n g.update(\n {\n \"as\": {\n \"number\": o.get(\"asn\", None),\n \"name\": o.get(\"isp\", None),\n \"location\": o.get(\"location\", {}).get(\"country_code\", None),\n \"prefix\": None, # Todo: Check of routed prefix is given in censys output\n },\n \"domains\": [],\n \"org\": o.get(\"org\", None),\n \"ip\": o.get(\"ip_str\"),\n }\n )\n for domain in o.get(\"domains\", []):\n g[\"domains\"].append({\"type\": \"shodan-domain\", \"value\": domain})\n for hostname in o.get(\"hostnames\", []):\n g[\"domains\"].append({\"type\": \"shodan-hostname\", \"value\": hostname})\n return g",
"def generate_known_facts_dict(keys, fact):\n fact_cnf = to_cnf(fact)\n mapping = single_fact_lookup(keys, fact_cnf)\n\n ret = {}\n for key, value in mapping.items():\n implied = set()\n rejected = set()\n for expr in value:\n if isinstance(expr, AppliedPredicate):\n implied.add(expr.function)\n elif isinstance(expr, Not):\n pred = expr.args[0]\n rejected.add(pred.function)\n ret[key.function] = (implied, rejected)\n return ret",
"def shodan_service_extraction(s: dict) -> dict:\n g = {}\n p = s[\"port\"]\n g.update(\n {\n p: {\n \"banner\": s[\"data\"],\n \"timestamp\": int(DateTime(s[\"timestamp\"])),\n \"timestamp_readable\": DateTime(s[\"timestamp\"]).ISO8601(),\n }\n }\n )\n k = s.keys()\n\n if \"http\" in k or s[\"data\"][:4] == \"HTTP\":\n g[p].update(shodan_http_extraction(s))\n if \"ssl\" in k:\n g[p].update(shodan_ssl_extraction(s))\n if \"ssh\" in k:\n g[p].update(shodan_ssh_extraction(s))\n return g",
"def to_dict(potentials):\n return {tup[0]: tup[1] for tup in [pe.get_kv() for p in potentials for pe in p.entries]}",
"def jsonify(self) -> Dict:\n returned_dict = []\n for entry in self.returned:\n simple_binding = {}\n for binding, value in entry.items():\n simple_binding[binding] = value[\"value\"]\n returned_dict.append(simple_binding)\n if len(returned_dict) >= 5:\n break\n\n result_dict = {\n \"name\": self.rule[\"id\"],\n \"sparql\": sparql_from(self.rule),\n \"pass\": self.passing,\n \"fail_mode\": self.rule[\"fail_mode\"],\n \"some_results\": returned_dict,\n \"violations_found\": len(self.returned)\n }\n return result_dict",
"def create_critic_variables(self) -> Dict[str, Dict[str, snt.Module]]:",
"def retGD():\n return omiDiction"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Check if the passed in query contains an OPERATOR This is used to work out if the raw input from the client is asking nfi to query puppetdb or customize how the results are passed back.
|
def has_operator(query):
for char in query:
if char in OPERATORS:
return True
return False
|
[
"def isop(tok):\n return tok in oplist",
"def isOp(s):\n return getOp(s) != None",
"def _isOperator(self, token):\n token = token.strip()\n \n if(token == \"+\"):\n return True\n\n if(token == \"*\"):\n return True\n \n return False",
"def parse_query(\n query\n) -> dict:\n token_value = dict()\n # If not operators defined in the query\n if (not (QUERY_SYMBOLS.OR in query)) and (not (QUERY_SYMBOLS.AND in query) and (not (QUERY_SYMBOLS.NOT in query))):\n print(\"No operator in usage\")\n tokens = [\n pipeline_input(token)\n for token in query.split()\n ]\n for token in tokens:\n token_value[token] = True\n return token_value\n # If some operator from {AND, OR, NOT} was defined\n word = str()\n isAND, isNOT, isOR = False, False, False\n symbols = list(query_pipeline(query))\n for index, symbol in enumerate(symbols):\n if symbol == DICTIONARY[QUERY_SYMBOLS.NOT]:\n isNOT = True\n elif symbol == DICTIONARY[QUERY_SYMBOLS.OR]:\n if isNOT:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.NOT\n else:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.OR\n word, isNOT, isOR = \"\", False, True\n elif symbol == DICTIONARY[QUERY_SYMBOLS.AND]:\n if isNOT:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.NOT\n else:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.AND\n word, isNOT, isAND = \"\", False, True\n else:\n word += symbol\n if index == len(symbols) - 1:\n if isAND:\n token_value[word] = QUERY_SYMBOLS.AND\n if isOR:\n token_value[word] = QUERY_SYMBOLS.OR\n if isNOT:\n token_value[word] = QUERY_SYMBOLS.NOT\n return token_value",
"def is_op(char):\n\n return char in OPS",
"def _isOperatorEval(self, token):\n token = token.strip()\n\n if (token == \"+\"):\n return True\n\n if (token == \"*\"):\n return True\n\n if (token == \"]\"):\n return True\n\n return False",
"def operation(self):\n return any",
"def check_operations(self, opstring, verbose=False):\n try:\n if re.search(opstring, self.operations):\n if verbose: print \"[INFO] Operation %s has already been conducted on dataset\" % opstring \n return True\n else:\n return False\n except:\n raise ValueError(\"The incoming class does not have an operations attribute\")",
"def checkNot(query):\n # if odd count of NOT then negate operator\n if query['wherenot'] == False:\n return query\n else:\n if query['whereop'] == \"=\":\n query['whereop'] = \"!\"\n elif query['whereop'] == \"<\":\n query['whereop'] = '>'\n elif query['whereop'] == \">\":\n query['whereop'] = \"<\"\n elif query['whereop'] == \"CONTAINS\":\n query['whereop'] = \"NCONTAINS\"\n return query",
"def validateQuery(self):\n possibleKey = ['metric_id', 'description', 'complex', 'hostname',\n 'name', 'id', 'parent_id', 'unit',\n 'moving_window_duration', 'interval', 'removable',\n 'hosts', 'metric_parent_id', 'metric_description',\n 'metric_parent_id', 'metric_unit', 'os', 'os_ver',\n 'session_id']\n errorCounter = 0\n if self.query:\n for orSplit in self.query.split(self.OR_SEPARATOR):\n for andSplit in orSplit.split(self.AND_SEPARATOR):\n if andSplit.find(self.K_V_SEPARATOR) != -1:\n key, value = andSplit.split(self.K_V_SEPARATOR)\n if key not in possibleKey:\n errorCounter += 1\n if not value:\n errorCounter += 1\n first = value.find(\"/\")\n if first != -1:\n second = value.find(\"/\", first + 1)\n if second != -1:\n if (second - first) == 1:\n errorCounter += 1\n else:\n errorCounter += 1\n else:\n errorCounter += 1\n if errorCounter == 0:\n validationResult = 1\n else:\n validationResult = 0\n\n return validationResult",
"def is_valid_operator(s):\n if s in calc_model._valid_operators:\n return True\n return False",
"def _is_select(self, op):\n return hasattr(op, \"select\") and getattr(op, \"select\") is not None",
"def _is_write_query(sql):\n action = sql.strip()[0:6].strip()\n if action == 'SELECT':\n return False\n if action in ['DELETE', 'UPDATE', 'INSERT', 'COMMIT', 'START',\n 'ALTER', 'TRUNCA', 'CREATE', 'DROP I', 'DROP T']:\n return True\n raise Exception(\"unknown action: {}\".format(sql))",
"def operation_exists(operation):\n is_operation_exist = True\n compType = 'Operation'\n queryString = 'name=\\'%s\\' and type=\\'%s\\'' % (operation, compType)\n op = acm.FComponent.Select01(queryString, '')\n if op == None:\n is_operation_exist = False\n return is_operation_exist",
"def is_op_return(data_item):\n return (len(data_item) == 1 and ('RETURN' in str(data_item[0])))",
"def parsequery(q, implicitOr=False):\n\n if implicitOr:\n return orParser.parse(q)\n else:\n # implicit AND\n return andParser.parse(q)",
"def _has_infix(self, statement):\n tokens = statement.split(' ')\n for ix, token in enumerate(tokens):\n for op, _ in ArithmeticHandler.INFIX_OPS:\n if op == token:\n return self._is_number_in(tokens[ix:]) and self._is_number_in(tokens[:ix])\n return False",
"def _parse_boolean_operator(self, op, criteria):\n if op == '$not':\n # This operator accepts a dict (not a list), which is a query object itself.\n # Validate\n if not isinstance(criteria, dict):\n raise InvalidQueryError('{}: $not argument must be an object'\n .format(self.query_object_section_name))\n\n # Recurse\n criterion = self._parse_criteria(criteria)\n\n # Done\n return self._BOOLEAN_EXPRESSION_CLS(op, criterion)\n else:\n # All other operators accept a list: $and, $or, $nor\n # Validate it's a list\n if not isinstance(criteria, (list, tuple)):\n raise InvalidQueryError('{}: {} argument must be a list'\n .format(self.query_object_section_name, op))\n\n # Because the argument of a boolean expression is always a list of other query objects,\n # we have to recurse here and parse it.\n # Example: { $or: [ {..}, {..}, {..} ]}\n # will have to call _parse_criteria() for every object within: recursion\n # Note that we never validate `s`, because _parse_criteria() will do it for us.\n criteria = [self._parse_criteria(s) for s in criteria] # type criteria: FilterExpressionBase\n\n # Done\n if len(criteria) == 0:\n return None # Empty criteria: { $or: [] } or something like this that does not make sense\n else:\n return self._BOOLEAN_EXPRESSION_CLS(op, criteria)",
"def get_sub_operation(self, query, operation=\"NOT\"):\n # print(query, operation)\n if operation == \"NOT\":\n # There are one operation in NOT: operation().\n m = re.match(r\"^\\((.*)\\)$\", query)\n if not self.check_regex_error(m, query): # If no error\n op_a = m.groups()[0]\n return op_a\n else:\n return \"\"\n else:\n # There are two individual operations in AND/OR. Each operation must have a pattern: operation(), and is\n # connected with \",\"\n m = re.match(r\"^\\((.+)\\)$\", query)\n if not self.check_regex_error(m, query, 1): # If no error\n need_split = m.groups()[0]\n ind = self.parentheses_counter(need_split) # ind is the index of the last \")\"\n op_a = need_split[: ind+1] # So use ind + 1 to cover the last \")\"\n op_b = need_split[ind+2:] # Use ind + 2 to remove the \",\"\n return op_a, op_b\n else:\n return \"\""
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Convert a very simple query into AST format for puppetdb This allows the client to pass in a very simple and have it converted into the AST format for puppetdb. for example. >>> simple2AST("hostname=bob") ["=", ["fact", "hostname"], "bob"] >>> simple2AST("hostname=bob hostname=fred") ["or", ["=", ["fact", "hostname"], "bob"] ["=", ["fact", "hostname"], "bob"] ] >>> simple2AST("hostname=bob kernel=Linux") ["and", ["=", ["fact", "hostname"], "bob"] ["=", ["fact", "kernel"], "linux"] ]
|
def simple2AST(queries):
# split up strings into a list of queries
if not isinstance(queries, str):
raise Exception("simple2AST only converts a single query")
# Make sure the query is a query
if not has_operator(queries):
raise Exception("simple2AST only converts queries: '%s'" % queries)
re_match = re.match(RE_OPERATOR, queries)
if not re_match:
raise Exception("simple2AST failed to split query: '%s'" % RE_OPERATOR)
# Pull apart the query...
fact = re_match.groupdict()['fact']
operator = re_match.groupdict()['operator']
value = re_match.groupdict()['value']
# Strip the strings
fact = fact.strip()
value = value.strip()
if not fact:
raise Exception("simple2AST failed to find fact: '%s'" % queries)
if not value:
raise Exception("simple2AST failed to find value: '%s'" % queries)
else:
ASTquery = '["%s", ["fact", "%s"], "%s" ]' % (operator, fact, value)
return ASTquery
|
[
"def simplify_query(query):\n query_list = []\n query = query.split()\n\n #Now that the query is split, all that needs to be done\n #is writing the desired elements to the list in order.\n query_list.append(list_of_select(query))\n query_list.append(list_of_from(query))\n\n #This conditional prevents errors if 'where' isn't present\n if 'where' in query:\n query_list.append(list_of_where(query))\n else:\n query_list.append([])\n\n return query_list",
"def parseQueryValues(query):\n\tfor q in query:\n\t\tquery[q] = basic.parseStrValue(query[q][0])\n\treturn query",
"def parse_query(query):\n text = \"\"\n if isinstance(query, Query):\n q = str(query.text.encode(\"utf8\")) if type(query.text) == unicode else str(query.text.decode(\"utf8\").encode(\"utf8\"))\n else:\n q = str(query.encode(\"utf8\")) if type(query) == unicode else str(query.decode(\"utf8\").encode(\"utf8\"))\n try:\n parsetree = splparse(q)\n except:\n logger.exception(\"Failed to parse query: \" + q)\n return None\n return parsetree",
"def parse_query(\n query\n) -> dict:\n token_value = dict()\n # If not operators defined in the query\n if (not (QUERY_SYMBOLS.OR in query)) and (not (QUERY_SYMBOLS.AND in query) and (not (QUERY_SYMBOLS.NOT in query))):\n print(\"No operator in usage\")\n tokens = [\n pipeline_input(token)\n for token in query.split()\n ]\n for token in tokens:\n token_value[token] = True\n return token_value\n # If some operator from {AND, OR, NOT} was defined\n word = str()\n isAND, isNOT, isOR = False, False, False\n symbols = list(query_pipeline(query))\n for index, symbol in enumerate(symbols):\n if symbol == DICTIONARY[QUERY_SYMBOLS.NOT]:\n isNOT = True\n elif symbol == DICTIONARY[QUERY_SYMBOLS.OR]:\n if isNOT:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.NOT\n else:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.OR\n word, isNOT, isOR = \"\", False, True\n elif symbol == DICTIONARY[QUERY_SYMBOLS.AND]:\n if isNOT:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.NOT\n else:\n token_value[pipeline_input(word)] = QUERY_SYMBOLS.AND\n word, isNOT, isAND = \"\", False, True\n else:\n word += symbol\n if index == len(symbols) - 1:\n if isAND:\n token_value[word] = QUERY_SYMBOLS.AND\n if isOR:\n token_value[word] = QUERY_SYMBOLS.OR\n if isNOT:\n token_value[word] = QUERY_SYMBOLS.NOT\n return token_value",
"def parse_query(expr, optimize_query=True):\n if not ast_support:\n raise NotImplementedError(\"Parsing of CQEs requires Python >= 2.6\")\n query = _AstParser(expr).parse()\n if optimize_query:\n query = optimize(query)\n return query",
"def get_query_sql(query=None, /, *, literal_binds: bool = True,\n pretty: bool = False):\n if query is None:\n from .. import queries\n\n query = queries.get_example_query()\n\n compiled = _backend.expression_compile(query, literal_binds=literal_binds)\n result = compiled.string\n\n if pretty and _backend.sqlparse is not None:\n result = _backend.sqlparse.format(result, reindent=True)\n return result",
"def _parse(self):\n conds = self._cond.split('__')\n value = self._value\n\n if conds[-1] in self._op:\n op = conds.pop()\n else:\n op = 'eq'\n\n if len(conds) == 2:\n table_name, field = conds\n elif len(conds) == 1:\n table_name, field = self._table_name, conds[0]\n\n if op == 'in' and len(value) == 0:\n # result should be always false\n sql, para = None, ()\n else:\n if value is None and op in ('eq', 'neq'):\n value = (op == 'eq')\n op = 'isnull'\n \n op_sql = self._op[op]\n para = (value,)\n\n if op in self._translate:\n op_sql, para = self._translate[op](op_sql, value)\n\n sql = '`%s`.`%s` %s' % (table_name, field, op_sql)\n\n self._sqls, self._para = sql, para\n\n if self._table_name != table_name:\n self._join_table = (table_name,)\n else:\n self._join_table = None",
"def to_advanced_query(query):\n\n # nothing to do\n if not query:\n return Eq(\"Title\", \"\")\n\n a_query = None\n\n def get_query_expression_for(value):\n # return the Advanced Query Expression\n if type(value) in (tuple, list):\n return In\n if type(value) is dict:\n return Generic\n return Eq\n\n for k, v in query.iteritems():\n exp = get_query_expression_for(v)\n # first loop, build the initial query expression\n if a_query is None:\n a_query = exp(k, v)\n else:\n a_query = a_query & exp(k, v)\n\n return a_query",
"def parse_query(self, line):\n query = {'bool': [], 'phrase': [], 'wild': []}\n self.line = re.sub(r'[_]|[^\\w\\s\"*]', ' ', line.strip().lower())\n query = self.parse_wildcard(query)\n query = self.parse_phrase(query)\n query = self.parse_boolean(query)\n return query",
"def q_transform():\n return astroid.parse(\n \"\"\"\n def Q(name_or_query='match_all', **params):\n # {\"match\": {\"title\": \"python\"}}\n if isinstance(name_or_query, collections_abc.Mapping):\n if params:\n raise ValueError('Q() cannot accept parameters when passing in a dict.')\n if len(name_or_query) != 1:\n raise ValueError('Q() can only accept dict with a single query ({\"match\": {...}}). '\n 'Instead it got (%r)' % name_or_query)\n name, params = name_or_query.copy().popitem()\n return Query.get_dsl_class(name)(_expand__to_dot=False, **params)\n\n # MatchAll()\n # if isinstance(name_or_query, Query):\n # if params:\n # raise ValueError('Q() cannot accept parameters when passing in a Query object.')\n # return name_or_query\n\n # s.query = Q('filtered', query=s.query)\n if hasattr(name_or_query, '_proxied'):\n return name_or_query._proxied\n\n # \"match\", title=\"python\"\n return Query.get_dsl_class(name_or_query)(**params)\n \"\"\"\n )",
"def parse_gql(query_string):\n gql_qry = gql.GQL(query_string)\n ancestor = None\n flt = gql_qry.filters()\n bindings = {}\n filters = []\n for ((name, op), values) in flt.iteritems():\n op = op.lower()\n if op == 'is' and name == gql.GQL._GQL__ANCESTOR:\n assert len(values) == 1\n [(func, args)] = values\n ancestor = _args_to_val(func, args, bindings)\n continue\n assert op in _OPS.values()\n for (func, args) in values:\n val = _args_to_val(func, args, bindings)\n filters.append(FilterNode(name, op, val))\n if filters:\n filters.sort() # For predictable tests.\n filters = ConjunctionNode(filters)\n else:\n filters = None\n orderings = gql_qry.orderings()\n orders = []\n for (name, direction) in orderings:\n orders.append(datastore_query.PropertyOrder(name, direction))\n if not orders:\n orders = None\n elif len(orders) == 1:\n orders = orders[0]\n else:\n orders = datastore_query.CompositeOrder(orders)\n qry = Query(kind=gql_qry._entity,\n ancestor=ancestor,\n filters=filters,\n orders=orders)\n offset = gql_qry.offset()\n if offset < 0:\n offset = None\n limit = gql_qry.limit()\n if limit < 0:\n limit = None\n options = QueryOptions(offset=offset, limit=limit)\n return qry, options, bindings",
"def build_ast(parse_tree):\n return builder.transform(parse_tree)",
"def _simple_clause_to_query(clause):\n # It's a regular clause\n mongo_clause = {}\n value = clause[\"value\"]\n if clause[\"field\"] == \"status\" and clause[\"value\"] in [\"DEAD\",\n \"RUNNING\"]:\n return PyMongoDataAccess. \\\n _status_filter_to_query(clause)\n if clause[\"operator\"] == \"==\":\n mongo_clause[clause[\"field\"]] = value\n elif clause[\"operator\"] == \">\":\n mongo_clause[clause[\"field\"]] = {\"$gt\": value}\n elif clause[\"operator\"] == \">=\":\n mongo_clause[clause[\"field\"]] = {\"$gte\": value}\n elif clause[\"operator\"] == \"<\":\n mongo_clause[clause[\"field\"]] = {\"$lt\": value}\n elif clause[\"operator\"] == \"<=\":\n mongo_clause[clause[\"field\"]] = {\"$lte\": value}\n elif clause[\"operator\"] == \"!=\":\n mongo_clause[clause[\"field\"]] = {\"$ne\": value}\n elif clause[\"operator\"] == \"regex\":\n mongo_clause[clause[\"field\"]] = {\"$regex\": value}\n return mongo_clause",
"def convert_query(query):\n columns = [col.split('=')[0] for col in query.split('&')]\n values = [val.split('=')[1] for val in query.split('&')]\n input_df = pd.DataFrame([values], columns=columns)\n return input_df",
"def _form_query_from_data(self, row, parsed):\n d = { k:row[k] for k in row.keys() }\n q = Query(row[\"text\"], row[\"time\"])\n q.__dict__.update(d)\n if parsed:\n q.parsetree = ParseTreeNode.loads(row[\"parsetree\"])\n return q",
"def parse_query(query):\n\n job=urlparse.parse_qs(query)\n idxpath=None\n field=None\n timestep=0\n box=None\n hz=-1\n if job.has_key(\"idx\"):\n idxpath=job[\"idx\"][0]\n if job.has_key(\"field\"):\n field=job[\"field\"][0]\n if job.has_key(\"time\"):\n timestep=int(job[\"time\"][0])\n if job.has_key(\"box\"):\n box=job[\"box\"][0]\n if job.has_key(\"hz\"):\n hz=int(job[\"hz\"][0])\n return idxpath,field,timestep,box,hz",
"def convert_old_catalog_query(query):\n for k,v in query.items():\n q_field = q_type = q_param = None\n if '_usage' in k:\n q_field = k.replace('_usage','')\n usage = v.split(':')\n q_type = usage[0].strip()\n q_param = ':'.join(usage[1:]).strip()\n elif '_operator' in k:\n q_field = k.replace('_operator','')\n q_type = 'operator'\n q_param = v\n if q_field:\n new_val = query[q_field]\n if not isinstance(v, dict):\n new_val = { 'query' : new_val }\n new_val[q_type] = q_param\n query[q_field] = new_val\n del query[k]\n return query",
"def prepare_scan():\n\n # Start a new grammar.\n grammar = LexicalGrammar()\n\n # Regular context.\n query = grammar.add_rule('query')\n\n # Whitespace characters and comments (discarded).\n query.add_token(r'''\n SPACE: [\\s]+ | [#] [^\\0\\r\\n]*\n ''', is_junk=True)\n\n # A sequence of characters encloses in single quotes.\n query.add_token(r'''\n STRING: ['] ( [^'\\0] | [']['] )* [']\n ''', unquote=(lambda t: t[1:-1].replace(\"''\", \"'\")))\n\n # An opening quote character without a closing quote.\n query.add_token(r'''\n BAD_STRING: [']\n ''', error=\"cannot find a matching quote mark\")\n\n # A number in exponential notation.\n query.add_token(r'''\n FLOAT: ( [0-9]+ ( [.] [0-9]* )? | [.] [0-9]+ ) [eE] [+-]? [0-9]+\n ''')\n\n # A number with a decimal point.\n query.add_token(r'''\n DECIMAL:\n [0-9]+ [.] [0-9]* | [.] [0-9]+\n ''')\n\n # An unsigned integer number.\n query.add_token(r'''\n INTEGER:\n [0-9]+\n ''')\n\n # A sequence of alphanumeric characters (not starting with a digit).\n query.add_token(r'''\n NAME: [\\w]+\n ''')\n\n # Operators and punctuation characters. The token code coincides\n # with the token value.\n query.add_token(r'''\n SYMBOL: [~] | [!][~] | [<][=] | [<] | [>][=] | [>] |\n [=][=] | [=] | [!][=][=] | [!][=] |\n [\\^] | [?] | [-][>] | [@] | [:][=] |\n [!] | [&] | [|] | [+] | [-] | [*] | [/] |\n [(] | [)] | [{] | [}] | [.] | [,] | [:] | [;] | [$]\n ''', is_symbol=True)\n\n # The `[` character starts an identity constructor.\n query.add_token(r'''\n LBRACKET:\n [\\[]\n ''', is_symbol=True, push='identity')\n\n # An unmatched `]`.\n query.add_token(r'''\n BAD_RBRACKET:\n [\\]]\n ''', error=\"cannot find a matching '['\")\n\n # The input end.\n query.add_token(r'''\n END: $\n ''', is_symbol=True, pop=1)\n\n # Identity constructor context.\n identity = grammar.add_rule('identity')\n\n # Whitespace characters (discarded).\n identity.add_token(r'''\n SPACE: [\\s]+\n ''', is_junk=True)\n\n # Start of a nested label group.\n identity.add_token(r'''\n LBRACKET:\n [\\[] | [(]\n ''', is_symbol=True, push='identity')\n\n # End of a label group or the identity constructor.\n identity.add_token(r'''\n RBRACKET:\n [\\]] | [)]\n ''', is_symbol=True, pop=1)\n\n # Label separator.\n identity.add_token(r'''\n SYMBOL: [.]\n ''', is_symbol=True)\n\n # Unquoted sequence of alphanumeric characters and dashes.\n identity.add_token(r'''\n LABEL: [\\w-]+\n ''')\n\n # A sequence of characters encloses in single quotes.\n identity.add_token(r'''\n STRING: ['] ( [^'\\0] | [']['] )* [']\n ''', unquote=(lambda t: t[1:-1].replace(\"''\", \"'\")))\n\n # An opening quote character without a closing quote.\n identity.add_token(r'''\n BAD_STRING: [']\n ''', error=\"cannot find a matching quote mark\")\n\n # A reference indicator.\n identity.add_token(r'''\n REFERENCE:\n [$]\n ''', is_symbol=True, push='name')\n\n # Unexpected end of input.\n identity.add_token(r'''\n END: $\n ''', error=\"cannot find a matching ']'\")\n\n # A context for an identifier following the `$` indicator\n # in an identity constructor. We need a separate rule because\n # `%NAME` and `%LABEL` productions intersect.\n name = grammar.add_rule('name')\n\n # Whitespace characters (discarded).\n name.add_token(r'''\n SPACE: [\\s]+\n ''', is_junk=True)\n\n # An integer number; not expected here, but ensures that the following\n # `%NAME` production does not start with a digit.\n name.add_token(r'''\n INTEGER:\n [0-9]+\n ''', pop=1)\n\n # A sequence of alphanumeric characters (not starting with a digit).\n name.add_token(r'''\n NAME: [\\w]+\n ''', pop=1)\n\n # Anything else.\n name.add_token(r'''\n OTHER: ()\n ''', is_junk=True, pop=1)\n\n # Add a `%DIRSIG` token in front of `+` and `-` direction indicators\n # to distinguish them from addition/subtraction operators.\n grammar.add_signal('''\n DIRSIG: ( `+` | `-` )+ ( `:` | `,` | `;` | `)` | `}` )\n ''')\n\n # Add `%PIPESIG` in front of `/:` pipe indicator to prevent it from\n # being recognized as a division operator.\n grammar.add_signal('''\n PIPESIG:\n `/` `:`\n ''')\n\n # Add `%LHSSIG` in front of a left-hand side of an assignment expression.\n grammar.add_signal('''\n LHSSIG: `$`? %NAME ( `.` `$`? %NAME )*\n ( `(` ( `$`? %NAME ( `,` `$`? %NAME )* `,`? )? `)` )?\n `:=`\n ''')\n\n # Generate and return the scanner.\n return grammar()",
"def tr_sql_parser(file_input):\n\n declares = []\n sets = []\n wheres = []\n comments = []\n output = []\n with open(file_input, \"r\") as f:\n data = f.read()\n\n for line in data.split('\\n'):\n if line.startswith('DECLARE'):\n declares.append(line)\n elif line.startswith('SET'):\n sets.append(line)\n elif line.startswith('WHERE'):\n wheres.append(line)\n elif line.startswith('--'):\n comments.append(line)\n else:\n output.append(line)\n fields = [field.split('@')[1] for field in wheres]\n return declares, sets, fields, comments, output"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Allow for simple query of facts.
|
def fact_query(db, raw_client_input=None):
if raw_client_input:
return nodes_query(db, raw_client_input)
else:
return fact_names(db)
|
[
"def find(self, **kwargs):\n q = self.compile_query(**kwargs)\n return [f for f in six.itervalues(self.facts) if q(f)]",
"def facts(name):\n cube = get_cube(name)\n result = cube.facts(fields=request.args.get('fields'),\n cuts=request.args.get('cut'),\n order=request.args.get('order'),\n page=request.args.get('page'),\n page_size=request.args.get('pagesize'))\n result['status'] = 'ok'\n return jsonify(result)",
"def query(session, Dim, **kwargs):\n return session.query(Dim).filter_by(**kwargs).first()",
"def find_one(self, **kwargs):\n q = self.compile_query(**kwargs)\n for f in six.itervalues(self.facts):\n if q(f):\n return f\n return None",
"async def facts(self, ctx, *, animal):\n\t\ttry:\n\t\t\tanimals = (\"cat\", \"dog\", \"fox\", \"koala\", \"panda\", \"bird\", \"racoon\", \"kangaroo\", \"elephant\", \"giraffe\", \"whale\")\n\t\t\tif not animal in animals:\n\t\t\t\tawait ctx.send(f\"{animal} is not a valid animal\\nValid animals are: cat, dog, fox, koala, panda, bird, racoon, kangaroo, elephant, giraffe, whale\")\n\t\t\t\t\n\t\t\tr = requests.get(f\"https://some-random-api.ml/facts/{animal}\")\n\t\t\tf = r.json()\n\t\t\tcont = f[\"fact\"]\n\t\t\tembed = discord.Embed(color=discord.Color.blurple(),description=cont)\n\t\t\tif animal == \"panda\":\n\t\t\t\tembed.title = f\":panda_face: panda fact\"\n\t\t\telif animal == \"racoon\":\n\t\t\t\tembed.title = f\":raccoon: racoon fact\"\n\t\t\telse:\n\t\t\t\tembed.title = f\":{animal}: {animal} fact\"\n\t\t\tawait ctx.send(embed=embed)\n\t\texcept Exception:\n\t\t\tpass",
"def query(self, sql):",
"def query(self) -> typing.Iterable[typing.Any]: # pragma: no cover\n pass",
"def test_query_fluctuation_reasons(self):\n pass",
"def test_query_feed_detail(self):\n pass",
"def query(self, **kwargs):\n return self.iterate('query', **kwargs)",
"def test_get_with_filter_person_factoid(mockclient_cl1):\n r = mockclient_cl1.get(TEST_URL + \"?size=100&f=F00062&p=P00063\")\n assert r.status_code == 200\n assert r.json[\"factoids\"][0][\"@id\"] == \"F00062\"\n r = mockclient_cl1.get(TEST_URL + \"?size=100&f=F00062&p=P00064\")\n assert r.status_code == 404",
"def test_simple_query():\n query = \"select * from (VALUES(1,2,3))\"\n connect_to_dremio_flight_server_endpoint(\"localhost\",\n \"32010\", \"dremio\", \"dremio123\", query, False, False, False)",
"def test_single_condition_query_parenned():\n rally = Rally(server=RALLY, user=RALLY_USER, password=RALLY_PSWD)\n qualifier = \"(State = Submitted)\"\n #qualifier = '(FormattedID = \"US100\")'\n response = rally.get('Defect', fetch=True, query=qualifier, limit=10)\n assert response.resultCount > 0",
"def _test_basic_query(self, schema, graphson):\n\n g = self.fetch_traversal_source(graphson)\n self.execute_graph(schema.fixtures.classic(), graphson)\n traversal = g.V().has('name', 'marko').out('knows').values('name')\n results_list = self.execute_traversal(traversal, graphson)\n self.assertEqual(len(results_list), 2)\n self.assertIn('vadas', results_list)\n self.assertIn('josh', results_list)",
"def test_get_schema_of_query(self):\n pass",
"def facts(self, cell=None, fields=None, order=None, page=None,\n page_size=None):\n\n cell = cell or Cell(self.cube)\n\n attributes = self.cube.get_attributes(fields)\n\n builder = QueryBuilder(self)\n builder.denormalized_statement(cell,\n attributes,\n include_fact_key=True)\n builder.paginate(page, page_size)\n order = self.prepare_order(order, is_aggregate=False)\n builder.order(order)\n\n cursor = self.execute_statement(builder.statement,\n \"facts\")\n\n return ResultIterator(cursor, builder.labels)",
"def test_list_queries(self):\n pass",
"def test_query_partial():\n results = run_filter('playsong', 'blue sky')\n nose.assert_equal(results[0]['title'], 'Mr. Blue Sky')",
"def _get_fact(self, dim_client, dim_platform):\n fact = self._fact.objects.filter(\n user_id__client_id=dim_client,\n )\n\n # our basic filter for each aggregate\n self._filter_fact = fact\n # the absolute filter we want internally for computation\n return fact.filter(event_utc_date_id__date=self._date).values(*self._values+('id',))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return a blastn object with initialized blast_records and hsp_records
|
def create_blastn_object(query_genes:str, db:str, qcov=False,id=PERC_ID_CUTOFF):
blastn_object = Blastn()
stdout_xml = blastn_query1(query_genes, db, qcov=qcov, id=id)
blastn_object.create_blast_records(stdout_xml)
blastn_object.create_hsp_objects(query_genes)
return blastn_object.hsp_objects
|
[
"def create_blastn_bsr_object(query_genes, db):\n blastn_object = Blastn()\n stdout_xml = blastn_query1(query_genes, db, qcov=True)\n blastn_object.create_blast_records(stdout_xml)\n blastn_object.create_hsp_objects(query_genes)\n return blastn_object",
"def construct_blast_query_object(x, parser_obj=None):\n if x.blast_type:\n program = x.blast_type\n version = x.blast_version\n date = x.blast_date\n else:\n program = parser_obj.blast_program\n version = parser_obj.blast_version\n date = parser_obj.blast_date\n \n\n query_sequence_name = x.query_header.query_name[0]\n query_sequence_name = query_sequence_name.replace(\"\\n\", \" \")\n\n query_sequence_length = x.query_header.query_size\n\n # in some BLAST output files, the database info is only contained\n # in the header, and each BLAST record does *not* have it. If that's\n # the case (i.e. no x.database_name) retrieve it from the parser object.\n\n if x.database_name:\n dbname = x.database_name[0].strip()\n database = BlastDatabase(dbname, x.database_num_seqs,\n x.database_total_seq)\n else:\n database = parser_obj.blast_database\n\n hitlines = x.query.hitlines\n hits = x.query.hits\n\n if hits:\n for (summary, hit) in zip(hitlines, hits):\n hit.total_expect = summary.total_expect\n hit.total_score = summary.total_score\n else:\n hitlines = []\n hits = []\n\n q = BlastQuery(program, version, date,\n query_sequence_name, query_sequence_length, database,\n hitlines, hits)\n\n return q",
"def recblast(seqfile, target_species, fw_blast_db='chromosome', infile_type=\"fasta\", output_type=\"fasta\",\n query_species=\"Homo sapiens\", blast_type='blastn', local_blast_1=False, local_blast_2=False,\n rv_blast_db=\"nt\", expect=10, perc_score=0.50, perc_ident=50, perc_length=0.5,\n megablast=True, email='', id_type='brute', fw_source=\"psql\", fw_id_db=\"\", batch_size=50,\n passwd='', fw_id_db_version='1.0', verbose=True, BLASTDB='/usr/db/blastdb', **kwargs):\n\n from pathlib import Path\n from Bio import SeqIO, __version__\n from Bio.Blast import NCBIXML\n from operator import itemgetter\n\n if verbose:\n print(\"Now starting RecBlast...\")\n print('BioPython Version: ', __version__)\n if isinstance(seqfile, list):\n seq_gen = ((index, seq_record) for (index, seq_record) in enumerate(seqfile))\n else:\n seqfile_path = Path(seqfile)\n if seqfile_path.exists() and seqfile_path.is_file():\n seq_gen = ((index, seq_record) for (index, seq_record) in enumerate(SeqIO.parse(str(seqfile_path),\n infile_type)))\n else:\n raise FileNotFoundError\n\n # First loop will iterate over each sequence in a file, preferably FASTA but also allows for GenBank\n for index, seq_record in seq_gen:\n if verbose:\n print(\"Forward BLAST - {}: {}\".format(index + 1, seq_record.name))\n forward_blast_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"{0}_{1}_{2}_to_{3}.xml\".format(blast_type, seq_record.name, query_species,\n target_species).replace(' ', '_'))\n\n forward_id_score_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"{0}_{1}_{2}_to_{3}.ID_Scores.tmp\".format(blast_type, seq_record.name,\n query_species,\n target_species).replace(' ', '_'))\n\n recblast_output_unanno = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"unannotated_{0}_{1}.tmp\".format(blast_type, seq_record.name).replace(' ', '_'))\n\n try:\n forward_blast_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n try:\n forward_id_score_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n try:\n recblast_output_unanno.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n\n # Forward Blast:\n if fw_blast_db == 'skip':\n if verbose:\n print(\"Skipping Forward Blast!\")\n pass\n else:\n blast(seq_record=seq_record, target_species=target_species, database=fw_blast_db,\n query_species=query_species, filetype=infile_type, blast_type=blast_type, local_blast=local_blast_1,\n expect=expect, megablast=megablast, blastoutput_custom=str(forward_blast_output), write=True,\n perc_ident=perc_ident, verbose=verbose, BLASTDB=BLASTDB, **kwargs)\n if verbose:\n print('Forward blast done!')\n # Easy part's over - now we need to get the top hits from the forward BLAST, ID them, then compile a new\n # FASTA file with sequences from Species 2 that will be annotated via the Reverse BLAST against Species 1.\n\n # First we load the primary BLAST XML results to a handle, read the file, then loop over all alignments\n # to get the top scoring HSPs for each (I don't trust NCBI to always give me a pre-sorted list beforehand).\n # In addition, to really get to the crux of what this script should be doing, I also need to get the query\n # start and end points for each HSP, to tile them over the query, in order to get the true query coverage.\n # Furthermore I need to do the same for subject start and end so I can get a specific subrange for the sequence.\n with forward_blast_output.open(\"r\") as forward_blasthits:\n if verbose:\n print('Opening Forward blast output: ', str(forward_blast_output.absolute()))\n blastrecord = NCBIXML.read(forward_blasthits)\n align_scorelist = []\n hsp_scorelist = []\n subject_range = []\n query_start_end = []\n for alignment in blastrecord.alignments:\n if verbose:\n print('Sorting through alignment\\'s HSPs to get top scores of all alignments...')\n subject_range_hsp = []\n query_start_end_hsp = []\n for hsp in alignment.hsps:\n hsp_scorelist.append(hsp.score)\n subject_range_hsp.append(hsp.sbjct_start)\n subject_range_hsp.append(hsp.sbjct_end)\n query_start_end_hsp.append((hsp.query_start, hsp.query_end))\n hsp_scorelist.sort(reverse=True)\n query_start_end.append(i for i in merge_ranges(query_start_end_hsp))\n subject_range.append((subject_range_hsp[0], subject_range_hsp[-1]))\n if verbose:\n print(\"HSP Score List: \\n\\t\", hsp_scorelist)\n align_scorelist.append(hsp_scorelist[0])\n if verbose:\n print(\"Alignment Score List: \\n\\t\", align_scorelist)\n if verbose:\n print('Done with sorting!')\n # Two parts to this next loop: first we loop for each alignment. Next, we look though the HSPs in each\n # alignment file. If the HSP being considered has a score above the thresholds, we note down the ID and\n # score of that HSP and corresponding alignment; once we do that for one HSP in the series, we update the\n # \"blast_got_hit\" variable and proceed to skip to the next alignment result. This goes on until all\n # alignments have been considered, and so we now have a complete list of putative orthologs.\n with forward_id_score_output.open(\"w\") as f_id_out:\n if verbose:\n print('Searching through alignments to get top-scoring hit IDs')\n has_written = False\n for align_index, alignment in enumerate(blastrecord.alignments):\n blast_got_hit = False # Every time we consider a new alignment\n for hsp in alignment.hsps:\n if blast_got_hit:\n break\n if ((hsp.score >= (perc_score * align_scorelist[align_index])) and (hsp.expect <= expect) and\n (sum([i[-1] - i[0] for i in query_start_end[align_index]]) / blastrecord.query_length\n >= perc_length)):\n if verbose:\n print('Found annotation above threshold!')\n f_id_out.write('{0}\\t{1}\\t{2}\\n'.format(alignment.title.replace('/t', ' '),\n ':{0}-{1}'.format(subject_range[align_index][0],\n subject_range[align_index][-1]),\n hsp.score))\n has_written = True\n blast_got_hit = True\n else:\n continue\n if not blast_got_hit:\n print('NOTE: FOR ALIGNMENT {}, NO HITS WERE FOUND!'.format(alignment.title))\n if not has_written:\n print('WARNING! FOR THIS RUN, NO HITS WERE WRITTEN TO FILE, CONTINUING TO NEXT SEQUENCE IN LIST!')\n continue\n # Now, equiped with the list of hits, we need to look these up on a database and get their sequences as a\n # FASTA file.\n if verbose:\n print('Fetching sequences for ID\\'ed hits...')\n try:\n fetchseq(id_file=str(forward_id_score_output), species=target_species, email=email, source=fw_source,\n output_type=output_type, output_name=str(recblast_output_unanno), db=fw_id_db, delim='\\t',\n id_type=id_type, batch_size=batch_size, passwd=passwd, version=fw_id_db_version, verbose=verbose)\n if verbose:\n print('Done with fetching!')\n except IndexError:\n print('WARNING! FETCHSEQ FAILED! SKIPPING THIS SEQUENCE!')\n continue\n # Little caveat: fetchseq by design appends a .[output_type] to the end of the file so we need to add that on:\n recblast_output_unanno = str(recblast_output_unanno) + '.{}'.format(output_type)\n # Now that we have the sequences we can do the Reverse BLAST:\n # Big caveat though: we need to do each target individually.\n if verbose:\n print('Preparing for Reverse BLAST...')\n recblast_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}.{2}\".format(blast_type, seq_record.name, output_type).replace(' ', '_'))\n try:\n recblast_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n for entry_index, entry_record in enumerate(SeqIO.parse(str(recblast_output_unanno), \"fasta\")):\n if entry_record.seq:\n pass\n else:\n print(Warning('Entry {0} in unnanotated recblast file {1} came '\n 'back empty'.format(entry_record.name,\n str(recblast_output_unanno))))\n continue\n if verbose:\n print(\"Entry #{} in unannotated RecBlast Hits:\\n\".format(entry_index))\n for item in [entry_record.id, entry_record.description, entry_record.seq]:\n print('\\t', item)\n reverse_blast_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"{0}_{1}_{3}_to_{2}_{4}.xml\".format(blast_type, seq_record.name,\n query_species, target_species,\n entry_index).replace(' ', '_'))\n try:\n reverse_blast_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n if verbose:\n print('Performing Reverse Blast:')\n if rv_blast_db == 'skip':\n pass\n elif rv_blast_db == 'stop':\n print('Not performing reverse blast!')\n continue\n else:\n blast(seq_record=entry_record, target_species=query_species, database=rv_blast_db,\n query_species=target_species, filetype=infile_type, blast_type=blast_type,\n local_blast=local_blast_2, write=True,\n expect=expect, megablast=megablast, blastoutput_custom=str(reverse_blast_output),\n perc_ident=perc_ident, BLASTDB=BLASTDB, **kwargs)\n if verbose:\n print('Done with Reverse Blast!')\n with reverse_blast_output.open(\"r\") as reverse_blast_hits:\n if verbose:\n print('Getting top scores for each alignment...')\n blastrecord2 = NCBIXML.read(reverse_blast_hits)\n align_scorelist2 = []\n hsp_scorelist2 = []\n subject_range2 = []\n query_start_end2 = []\n for alignment2 in blastrecord2.alignments:\n if verbose > 4:\n print('Sorting through alignment\\'s HSPs to get top scores of all alignments...')\n subject_range_hsp2 = []\n query_start_end_hsp2 = []\n for hsp2 in alignment2.hsps:\n hsp_scorelist2.append(hsp2.score)\n subject_range_hsp2.append(hsp2.sbjct_start)\n subject_range_hsp2.append(hsp2.sbjct_end)\n query_start_end_hsp2.append((hsp2.query_start, hsp2.query_end))\n hsp_scorelist2.sort(reverse=True)\n query_start_end2.append([i for i in merge_ranges(query_start_end_hsp2)])\n subject_range2.append((subject_range_hsp2[0], subject_range_hsp2[-1]))\n if verbose > 4:\n print(\"HSP Score List: \\n\\t\", hsp_scorelist2)\n align_scorelist2.append(hsp_scorelist2[0])\n if verbose > 4:\n print(\"Alignment Score List: \\n\\t\", align_scorelist2)\n print(\"Query_start_end: \\n\\t\", query_start_end2)\n print(\"Subject Range: \\n\\t\", subject_range2)\n if verbose:\n print('Done with sorting!')\n # Now we have a list of the top score of each alignment for the current entry_record.\n with recblast_output.open(\"w+\") as rb_out:\n if verbose:\n print('Annotating BLAST results')\n has_written2 = False\n reverse_blast_annotations = list()\n for align_index2, alignment2 in enumerate(blastrecord2.alignments):\n blast_got_hit2 = False\n for hsp2 in alignment2.hsps:\n if (hsp2.score >= (perc_score * align_scorelist2[align_index2])):\n if verbose > 4:\n print('hsp score above threshold')\n if (hsp2.expect <= expect):\n if verbose > 4:\n print('hsp expect below threshold')\n if verbose > 4:\n print('HSP Length: ', query_start_end2[align_index2])\n length_alignment = sum([i[-1] - i[0] for i in query_start_end2[align_index2]])\n align_len_threshold = blastrecord2.query_length * perc_length\n if verbose > 4:\n print(length_alignment)\n print(align_len_threshold)\n if length_alignment >= align_len_threshold:\n print('hsp perc length above threshold')\n if verbose:\n print('Found hit!')\n reverse_blast_annotations.append((alignment2.title, '[:{0}-{1}]'.format(\n subject_range2[align_index2][0],\n subject_range2[align_index2][0]),\n hsp2.score))\n has_written2 = True\n blast_got_hit2 = True\n else:\n print('WARNING HSP LENGTH BELOW THRESHOLD')\n print(length_alignment,\n ' not greater than ', align_len_threshold)\n else:\n print('WARNING HSP EXPECT ABOVE THRESHOLD')\n print(hsp2.expect, 'not less than', expect)\n else:\n print('WARNING HSP SCORE BELOW THRESHOLD')\n print(hsp2.score, ' not greater than ', (perc_score * align_scorelist2[align_index2]))\n # else:\n # continue\n if not blast_got_hit2:\n print('NOTE: Alignment {} was not used to annotate.'.format(alignment.title))\n if reverse_blast_annotations:\n sorted(reverse_blast_annotations, reverse=True, key=itemgetter(2))\n annotations = ['\\t||{0} {1} ({2})'.format(anno[0], anno[1], anno[2]) for anno\n in reverse_blast_annotations]\n if verbose:\n print('********************************************')\n print(annotations)\n print('********************************************')\n entry_record.description += ''.join(annotations)\n if verbose > 3:\n print(entry_record)\n SeqIO.write(entry_record, rb_out, output_type)\n if not has_written2:\n print(Warning('NONE OF THE REVERSE BLAST HITS FOR THIS RUN MET ANNOTATION CRITERIA!'))\n continue\n if verbose:\n print('DONE!!!!')\n # Done!",
"def blast(args):\n reffasta, queryfasta = args.ref_fasta, args.qry_fasta\n blastfile = get_outfile(reffasta, queryfasta)\n\n run_megablast(infile=queryfasta, outfile=blastfile, db=reffasta,\n wordsize=args.wordsize, pctid=args.pctid, evalue=args.evalue,\n hitlen=None, best=args.best, task=args.task, cpus=args.cpus)\n\n return blastfile",
"def __init__(self, db, mastertb, playermeta, pacmeta):\n\t\tself.tables = {}\n\t\tself.db = db\n\t\tself.tables['master'] = mastertb\n\t\tself.playermeta = playermeta\n\t\tself.pacmeta = pacmeta\n\t\tself.master = bflmaster.Master(self)\n self.start = time.time()",
"def load_BLAST_results( blast_result_file, match_sim_cutoff, match_len_cutoff, unique_score_ratio ):\n\t\n\t# --- load raw hits --- #\n\tblast_hits = {}\n\twith open( blast_result_file, \"r\" ) as f:\n\t\tline = f.readline()\n\t\twhile line:\n\t\t\tparts = line.strip().split('\\t')\n\t\t\tif int( parts[3] ) >= match_len_cutoff:\n\t\t\t\tif float( parts[2] ) >= match_sim_cutoff:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tblast_hits[ parts[0] ].append( { 'LG': parts[0].split('_%_')[0], 'cm': float( parts[0].split('_%_')[1] ), 'chr': parts[1], 'pos': ( int( parts[8] ) + int( parts[9] ) ) / 2.0, 'score': float( parts[-1] ) } )\n\t\t\t\t\texcept KeyError:\n\t\t\t\t\t\tblast_hits.update( { parts[0]: [ { 'LG': parts[0].split('_%_')[0], 'cm': float( parts[0].split('_%_')[1] ), 'chr': parts[1], 'pos': ( int( parts[8] ) + int( parts[9] ) ) / 2.0, 'score': float( parts[-1] ) } ] } )\n\t\t\tline = f.readline()\n\t\n\t# --- screen and clean --- #\n\tfinal_hits = []\n\tfor hits in blast_hits.values():\n\t\tif len( hits ) == 1:\n\t\t\tfinal_hits.append( hits[0] )\n\t\telse:\n\t\t\tsorted_hits = sorted( hits, key=itemgetter( 'score' ) )\n\t\t\tif sorted_hits[-2]['score'] / sorted_hits[-1]['score'] <= unique_score_ratio:\n\t\t\t\tfinal_hits.append( sorted_hits[-1] )\n\treturn final_hits",
"def do_blast(self):\n with open(self.query_file) as handle:\n fasta_string = handle.read()\n\n result_handle = NCBIWWW.qblast('blastn', 'nt', fasta_string)\n\n with open(self.output_file, 'w') as writer:\n writer.write(result_handle.read())\n result_handle.close()",
"def land_bank():\n table_name = 'caeser_landbank'\n service=('https://uasiportal.shelbycountytn.gov/arcgis/rest/services/'\\\n 'LandBank/LandBank/MapServer')\n lb = get('0',service=service)\n if not table_name in [k.split('.')[-1] for k in meta.tables.keys()]:\n table = Table(table_name, meta,\n Column('objectid', BIGINT, primary_key=True),\n Column('taxparcelidno', TEXT),\n Column('parcelidno', TEXT),\n Column('streetno', TEXT),\n Column('streetname', TEXT),\n Column('dimwidth', BIGINT),\n Column('dimdepth', BIGINT),\n Column('dimconfig', TEXT),\n Column('zipcode', TEXT),\n Column('sizeinacres', FLOAT),\n Column('improvement', TEXT),\n Column('zoning', TEXT),\n Column('taxsaleno', TEXT),\n Column('askingprice', FLOAT),\n Column('latitude', FLOAT),\n Column('longitude', FLOAT),\n Column('createdate', DATE),\n Column('status_1', TEXT),\n Column('lastdaytobid', DATE),\n Column('wkb_geometry', Geometry(geometry_type='Point',\n srid=2274)))\n table.create(engine)\n else:\n table = Table(table_name, meta)\n #convert time fields from epoch milliseconds to actual date \n def convert(t):\n import time\n \n if t:\n ct = time.localtime(t/1000.)\n yr, mon, day = ct[:3]\n date_string = '{0}/{1}/{2}'.format(mon, day, yr)\n #date_obj = datetime.date(yr, mon, day)\n return date_string#(date_string, date_obj)\n else:\n return None#(None,None)\n\n #convert date string to date object\n for row in lb:\n dates = convert(row['createdate'])\n row['createdate'] = dates#[1]\n row['lastdaytobid'] = convert(row['lastdaytobid'])#[1]\n \n current_lb = pd.read_sql(\"\"\"select * from caeser_landbank\"\"\", engine)\n new_lb = pd.DataFrame(lb)\n today = datetime.today()\n new_lb['load_date'] = '{0}-{1}-{2}'.format(today.year, \n today.month,today.day)\n\n #append new landbank records to caeser_landbank table\n new_lb.to_sql('caeser_landbank', engine, if_exists='append', index=False)\n \n #Update combined_table to refelct new records from landbank\n clean_lb = \"\"\"update combined_table \n set improvement = Null,\n taxsale = Null,\n askingprice = Null,\n acres = Null,\n load_date = current_date\n where taxsale is not Null\"\"\"\n conn.execute(clean_lb)\n update_lb = \"\"\"update combined_table\n set improvement = lb.imp,\n taxsale = lb.tax,\n askingprice = lb.price,\n acres = lb.acres,\n load_date = lb.today\n from (select parcelidno parid, improvement imp, taxsaleno tax, \n askingprice price, sizeinacres acres,\n load_date today\n from caeser_landbank where load_date = current_date) lb\n where combined_table.parid = lb.parid;\"\"\"\n conn.execute(update_lb)\n update_metadata(False, \"caeser_landbank\")",
"def fromblast(args):\n from jcvi.formats.blast import sort\n from jcvi.utils.range import range_distance\n\n p = OptionParser(fromblast.__doc__)\n p.add_option(\n \"--clique\",\n default=False,\n action=\"store_true\",\n help=\"Populate clique instead of linear path\",\n )\n p.add_option(\n \"--maxdist\",\n default=100000,\n type=\"int\",\n help=\"Create edge within certain distance\",\n )\n p.set_verbose(help=\"Print verbose reports to stdout\")\n opts, args = p.parse_args(args)\n\n if len(args) != 2:\n sys.exit(not p.print_help())\n\n blastfile, subjectfasta = args\n clique = opts.clique\n maxdist = opts.maxdist\n sort([blastfile, \"--query\"])\n blast = BlastSlow(blastfile, sorted=True)\n g = BiGraph()\n for query, blines in groupby(blast, key=lambda x: x.query):\n blines = list(blines)\n iterator = combinations(blines, 2) if clique else pairwise(blines)\n for a, b in iterator:\n asub, bsub = a.subject, b.subject\n if asub == bsub:\n continue\n\n arange = (a.query, a.qstart, a.qstop, \"+\")\n brange = (b.query, b.qstart, b.qstop, \"+\")\n dist, oo = range_distance(arange, brange, distmode=\"ee\")\n if dist > maxdist:\n continue\n\n atag = \">\" if a.orientation == \"+\" else \"<\"\n btag = \">\" if b.orientation == \"+\" else \"<\"\n g.add_edge(asub, bsub, atag, btag)\n\n graph_to_agp(g, blastfile, subjectfasta, verbose=opts.verbose)",
"async def get_sub_block_records(\n self,\n ) -> Tuple[Dict[bytes32, SubBlockRecord], Optional[bytes32]]:\n cursor = await self.db.execute(\"SELECT * from sub_block_records\")\n rows = await cursor.fetchall()\n await cursor.close()\n ret: Dict[bytes32, SubBlockRecord] = {}\n peak: Optional[bytes32] = None\n for row in rows:\n header_hash = bytes.fromhex(row[0])\n ret[header_hash] = SubBlockRecord.from_bytes(row[6])\n if row[7]:\n assert peak is None # Sanity check, only one peak\n peak = header_hash\n return ret, peak",
"def load_pp_biz(bdf):\n\n print \"PlatePal Businesses\"\n\n PlatePalBiz.query.delete()\n\n for row in bdf.iterrows():\n row_pd = row[1]\n yelp_biz_id = row_pd['business_id'] # unicode\n name = row_pd['name'] # unicode\n address = row_pd['full_address'] # unicode\n city = row_pd['city'] # unicode\n state = str(row_pd['state']) # unicode\n lat = row_pd['latitude'] # float\n lng = row_pd['longitude'] # float\n stars = row_pd['stars'] # float\n review_count = row_pd['review_count'] # int\n is_open = row_pd['open'] # bool\n if 'photo_url' in row_pd:\n photo_url = row_pd['photo_url']\n else:\n photo_url = None\n\n biz = PlatePalBiz(yelp_biz_id=yelp_biz_id,\n name=name,\n address=address,\n city=city,\n state=state,\n lat=lat,\n lng=lng,\n is_open=is_open,\n photo_url=photo_url,\n )\n\n db.session.add(biz)\n\n db.session.commit()",
"def read_local_blast_query(self, fn_path):\n # debug(\"read_local_blast_query\")\n query_dict = {}\n with open(fn_path, mode=\"r\") as infile:\n for lin in infile:\n sseqid, staxids, sscinames, pident, evalue, bitscore, sseq, stitle = lin.strip().split('\\t')\n gi_id = int(sseqid.split(\"|\")[1])\n gb_acc = sseqid.split(\"|\")[3]\n sseq = sseq.replace(\"-\", \"\")\n sscinames = sscinames.replace(\" \", \"_\").replace(\"/\", \"_\")\n pident = float(pident)\n evalue = float(evalue)\n bitscore = float(bitscore)\n # NOTE: sometimes there are seq which are identical and are combined in the local blast db, just get first one\n if len(staxids.split(\";\")) > 1:\n staxids = int(staxids.split(\";\")[0])\n sscinames = sscinames.split(\";\")[0]\n else:\n staxids = int(staxids)\n assert type(staxids) is int\n self.ids.spn_to_ncbiid[sscinames] = staxids\n if gb_acc not in self.ids.acc_ncbi_dict: # fill up dict with more information.\n self.ids.acc_ncbi_dict[gb_acc] = staxids\n if gb_acc not in query_dict and gb_acc not in self.newseqs_acc:\n query_dict[gb_acc] = {'^ncbi:gi': gi_id, 'accession': gb_acc, 'staxids': staxids,\n 'sscinames': sscinames, 'pident': pident, 'evalue': evalue,\n 'bitscore': bitscore, 'sseq': sseq, 'title': stitle}\n # debug(\"key in query\")\n for key in query_dict.keys():\n if float(query_dict[key][\"evalue\"]) < float(self.config.e_value_thresh):\n gb_acc = query_dict[key][\"accession\"]\n if gb_acc not in self.data.gb_dict: # skip ones we already have\n self.new_seqs[gb_acc] = query_dict[key][\"sseq\"]\n self.data.gb_dict[gb_acc] = query_dict[key]\n else:\n fn = open(\"{}/blast_threshold_not_passed.csv\".format(self.workdir), \"a\")\n fn.write(\"blast_threshold_not_passed:\\n\")\n fn.write(\"{}, {}, {}\".format(query_dict[key][\"sscinames\"], query_dict[key][\"accession\"], query_dict[key][\"evalue\"]))\n fn.close()",
"def _from_HDFBackend(self, b):\n self.lamb = b.lamb.read()\n self.seds = b.seds.read()\n self.grid = Table(b.grid.read())\n self._filters = b._filters[:]\n self._header = b.header\n self._aliases = b._aliases",
"def main(unparsed_args_list):\n # Parse command line arguments\n args = parse_args(unparsed_args_list)\n date = time.strftime(\"%Y%m%d\")\n args.output_folder = basic.set_path(args.output_folder, kind=\"dir\",\n expect=True)\n\n working_dir = pathlib.Path(f\"{date}_get_gb_records\")\n working_path = basic.make_new_dir(args.output_folder, working_dir,\n attempt=10)\n\n if working_path is None:\n print(f\"Invalid working directory '{working_dir}'\")\n sys.exit(1)\n ncbi_cred_dict = ncbi.get_ncbi_creds(args.ncbi_credentials_file)\n\n\n # Verify database connection and schema compatibility.\n print(\"Connecting to the MySQL database...\")\n engine = mysqldb.connect_to_db(args.database)\n mysqldb.check_schema_compatibility(engine, \"the get_gb_records pipeline\")\n\n\n # Create data sets\n print(\"Retrieving accessions from the database...\")\n accessions = mysqldb.get_distinct_data(engine, \"phage\", \"Accession\")\n engine.dispose()\n if \"\" in accessions:\n accessions.remove(\"\")\n if None in accessions:\n accessions.remove(None)\n\n get_genbank_data(working_path, accessions, ncbi_cred_dict)",
"def get_latest(cls) -> HTTPRecord:\n raise NotImplementedError",
"def load_banknote():\n module_path = dirname(__file__)\n data, target, target_names = load_data(module_path, 'banknote.csv')\n\n return Bunch(data=data, target=target,\n target_names=target_names)",
"def make_blastdb(self):\n index_params = self.db_params\n db_path = os.path.join(index_params['dbpath'])\n db_name = os.path.basename(index_params['dbname'])\n db_seqtype = index_params['seqtype']\n db_status = self.check_db_exists(db_path, db_seqtype)\n \n if db_status:\n #os.path.exists(os.path.join(db_path, db_name + \".phr\")) == True \\\n #and os.path.exists(os.path.join(db_path, db_name + \".pin\")) == True \\\n #and os.path.exists(os.path.join(db_path, db_name + \".psq\")) == True:\n logger.info(\"Index files exist for database {}\".format(db_name))\n pass\n else:\n logger.warning(\"Index files does not exist for database {}\".format(db_name))\n fastafile = os.path.abspath(os.path.join(index_params['dbpath'],index_params['dbname'])) \n logger.debug(\"Checking if fasta file {} exists\".format(fastafile))\n logger.debug(\"before setting dbpath path variable {}\".format(str(self.db_params)))\n \n if os.access(fastafile, os.R_OK) and is_fasta(fastafile):\n logger.info(\"Creating Blast database using fasta file\")\n self.set_db_indexpath(db_name, os.path.abspath(self.temp_dir))\n Database.src_cnt += 1\n\n if not os.path.exists(self.temp_dir):\n os.makedirs(self.temp_dir)\n \n script_file = os.path.join(self.temp_dir, \"s0\" + str(Database.src_cnt) + \"_makeblastdb.sh\")\n fh1 = open(script_file, 'w')\n cmd = 'makeblastdb -in {input_file} -dbtype {db_type} -out {output_dir} {stdout}' \\\n .format(\n input_file = fastafile, \n db_type = index_params['seqtype'], \n output_dir = os.path.join(os.path.abspath(self.temp_dir), db_name), \n stdout=self.stdout\n )\n\n logger.debug(\"cmd = {}\".format(str(cmd)))\n fh1.write(cmd + '\\n') \n fh1.close()\n os.system(cmd)\n \n #if os.stat(script_file).st_size > 0:\n # sbatch_params = '--mem=2000M'\n # run_sbatch_script(script_file, 1, self.temp_dir, sbatch_params)\n #else:\n # logger.error(\"The script file {} is empty. Exiting....\".format(script_file), exc_info=True)\n # sys.exit(1)\n else:\n logger.error(\"Either fasta file does not exists or is not in fasta format. Please check if filename ends with .fasta\")",
"def _group_bnds(calls: Dict[str, Union[str, int, float]]) -> Dict[str, Union[str, int, float]]:\n bnds: Dict[str, Union[str, int, float]] = {}\n\n for record in calls:\n if record['SVTYPE'] == 'BND':\n if 'MATEID' not in record:\n continue\n\n if record['MATEID'] in bnds:\n \n yield (record, bnds[record['MATEID']])\n bnds.pop(record['MATEID'])\n else:\n bnds[record['id']] = record\n else:\n yield record,\n\n assert len(bnds) == 0",
"async def get_block_records_close_to_peak(\n self, blocks_n: int\n ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]:\n\n peak = await self.get_peak()\n if peak is None:\n return {}, None\n\n ret: Dict[bytes32, BlockRecord] = {}\n async with self.db_wrapper.reader_no_transaction() as conn:\n async with conn.execute(\n \"SELECT header_hash, block_record,plot_info.plot_filter_info \"\n \"FROM full_blocks LEFT JOIN plot_info USING(header_hash) \"\n \"WHERE height >= ?\",\n (peak[1] - blocks_n,),\n ) as cursor:\n for row in await cursor.fetchall():\n header_hash = bytes32(row[0])\n block_record_db: BlockRecordDB = BlockRecordDB.from_bytes(row[1])\n if row[2] is None:\n # since we're adding this field lazily, it may not be\n # set. If so, fall back to the slow path\n plot_filter_info = await self.get_plot_filter_info(header_hash)\n else:\n plot_filter_info = PlotFilterInfo.from_bytes(row[2])\n\n block_record = block_record_db.to_block_record(\n plot_filter_info.pos_ss_cc_challenge_hash,\n plot_filter_info.cc_sp_hash,\n )\n ret[header_hash] = block_record\n\n return ret, peak[0]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a Blastn object with initialized blast_records and hsp_records with cutoff bsr.
|
def create_blastn_bsr_object(query_genes, db):
blastn_object = Blastn()
stdout_xml = blastn_query1(query_genes, db, qcov=True)
blastn_object.create_blast_records(stdout_xml)
blastn_object.create_hsp_objects(query_genes)
return blastn_object
|
[
"def bsr(blast_object:Blastn, max_bits_dict:dict):\n\n for hsp in blast_object.hsp_objects:\n hsp.bsr = hsp.bits / max_bits_dict[hsp.name]\n\n if hsp.bsr < MIN_BSR:\n blast_object.remove_hsp_object_all(hsp)",
"def create_blastn_object(query_genes:str, db:str, qcov=False,id=PERC_ID_CUTOFF):\n blastn_object = Blastn()\n stdout_xml = blastn_query1(query_genes, db, qcov=qcov, id=id)\n blastn_object.create_blast_records(stdout_xml)\n blastn_object.create_hsp_objects(query_genes)\n return blastn_object.hsp_objects",
"def construct_blast_query_object(x, parser_obj=None):\n if x.blast_type:\n program = x.blast_type\n version = x.blast_version\n date = x.blast_date\n else:\n program = parser_obj.blast_program\n version = parser_obj.blast_version\n date = parser_obj.blast_date\n \n\n query_sequence_name = x.query_header.query_name[0]\n query_sequence_name = query_sequence_name.replace(\"\\n\", \" \")\n\n query_sequence_length = x.query_header.query_size\n\n # in some BLAST output files, the database info is only contained\n # in the header, and each BLAST record does *not* have it. If that's\n # the case (i.e. no x.database_name) retrieve it from the parser object.\n\n if x.database_name:\n dbname = x.database_name[0].strip()\n database = BlastDatabase(dbname, x.database_num_seqs,\n x.database_total_seq)\n else:\n database = parser_obj.blast_database\n\n hitlines = x.query.hitlines\n hits = x.query.hits\n\n if hits:\n for (summary, hit) in zip(hitlines, hits):\n hit.total_expect = summary.total_expect\n hit.total_score = summary.total_score\n else:\n hitlines = []\n hits = []\n\n q = BlastQuery(program, version, date,\n query_sequence_name, query_sequence_length, database,\n hitlines, hits)\n\n return q",
"def get_bollinger_bands(rm, rstd):\n # TODO: Compute upper_band and lower_band\n upper_band = rm + rstd * 2\n lower_band = rm - rstd *2\n return upper_band, lower_band",
"def load_BLAST_results( blast_result_file, match_sim_cutoff, match_len_cutoff, unique_score_ratio ):\n\t\n\t# --- load raw hits --- #\n\tblast_hits = {}\n\twith open( blast_result_file, \"r\" ) as f:\n\t\tline = f.readline()\n\t\twhile line:\n\t\t\tparts = line.strip().split('\\t')\n\t\t\tif int( parts[3] ) >= match_len_cutoff:\n\t\t\t\tif float( parts[2] ) >= match_sim_cutoff:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tblast_hits[ parts[0] ].append( { 'LG': parts[0].split('_%_')[0], 'cm': float( parts[0].split('_%_')[1] ), 'chr': parts[1], 'pos': ( int( parts[8] ) + int( parts[9] ) ) / 2.0, 'score': float( parts[-1] ) } )\n\t\t\t\t\texcept KeyError:\n\t\t\t\t\t\tblast_hits.update( { parts[0]: [ { 'LG': parts[0].split('_%_')[0], 'cm': float( parts[0].split('_%_')[1] ), 'chr': parts[1], 'pos': ( int( parts[8] ) + int( parts[9] ) ) / 2.0, 'score': float( parts[-1] ) } ] } )\n\t\t\tline = f.readline()\n\t\n\t# --- screen and clean --- #\n\tfinal_hits = []\n\tfor hits in blast_hits.values():\n\t\tif len( hits ) == 1:\n\t\t\tfinal_hits.append( hits[0] )\n\t\telse:\n\t\t\tsorted_hits = sorted( hits, key=itemgetter( 'score' ) )\n\t\t\tif sorted_hits[-2]['score'] / sorted_hits[-1]['score'] <= unique_score_ratio:\n\t\t\t\tfinal_hits.append( sorted_hits[-1] )\n\treturn final_hits",
"def blast(args):\n reffasta, queryfasta = args.ref_fasta, args.qry_fasta\n blastfile = get_outfile(reffasta, queryfasta)\n\n run_megablast(infile=queryfasta, outfile=blastfile, db=reffasta,\n wordsize=args.wordsize, pctid=args.pctid, evalue=args.evalue,\n hitlen=None, best=args.best, task=args.task, cpus=args.cpus)\n\n return blastfile",
"def get_clossest_blast(blast_res):\n name_out = blast_res.replace('blast/blast_', '').replace('.txt', '')\n\n with open(blast_res,'r') as check_blast:\n blast_txt = check_blast.read()\n\n if (blast_txt == 'not enough genomic information\\n'):\n return [name_out, np.NaN]\n\n else:\n df = pd.read_csv(blast_res)\n df.columns = [0,1,2,3]\n\n filter_df = df.sort_values(by=[3], ascending=False).copy()\n\n best_score = filter_df[3].values[0]\n\n refs_best_score = filter_df[filter_df[3] == best_score][1].values\n subs_best = list(set([x.split('-')[0] for x in refs_best_score]))\n\n if len(subs_best) == 1:\n return [name_out, subs_best[0]]\n else:\n return [name_out, np.NaN]",
"def recblast(seqfile, target_species, fw_blast_db='chromosome', infile_type=\"fasta\", output_type=\"fasta\",\n query_species=\"Homo sapiens\", blast_type='blastn', local_blast_1=False, local_blast_2=False,\n rv_blast_db=\"nt\", expect=10, perc_score=0.50, perc_ident=50, perc_length=0.5,\n megablast=True, email='', id_type='brute', fw_source=\"psql\", fw_id_db=\"\", batch_size=50,\n passwd='', fw_id_db_version='1.0', verbose=True, BLASTDB='/usr/db/blastdb', **kwargs):\n\n from pathlib import Path\n from Bio import SeqIO, __version__\n from Bio.Blast import NCBIXML\n from operator import itemgetter\n\n if verbose:\n print(\"Now starting RecBlast...\")\n print('BioPython Version: ', __version__)\n if isinstance(seqfile, list):\n seq_gen = ((index, seq_record) for (index, seq_record) in enumerate(seqfile))\n else:\n seqfile_path = Path(seqfile)\n if seqfile_path.exists() and seqfile_path.is_file():\n seq_gen = ((index, seq_record) for (index, seq_record) in enumerate(SeqIO.parse(str(seqfile_path),\n infile_type)))\n else:\n raise FileNotFoundError\n\n # First loop will iterate over each sequence in a file, preferably FASTA but also allows for GenBank\n for index, seq_record in seq_gen:\n if verbose:\n print(\"Forward BLAST - {}: {}\".format(index + 1, seq_record.name))\n forward_blast_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"{0}_{1}_{2}_to_{3}.xml\".format(blast_type, seq_record.name, query_species,\n target_species).replace(' ', '_'))\n\n forward_id_score_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"{0}_{1}_{2}_to_{3}.ID_Scores.tmp\".format(blast_type, seq_record.name,\n query_species,\n target_species).replace(' ', '_'))\n\n recblast_output_unanno = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"unannotated_{0}_{1}.tmp\".format(blast_type, seq_record.name).replace(' ', '_'))\n\n try:\n forward_blast_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n try:\n forward_id_score_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n try:\n recblast_output_unanno.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n\n # Forward Blast:\n if fw_blast_db == 'skip':\n if verbose:\n print(\"Skipping Forward Blast!\")\n pass\n else:\n blast(seq_record=seq_record, target_species=target_species, database=fw_blast_db,\n query_species=query_species, filetype=infile_type, blast_type=blast_type, local_blast=local_blast_1,\n expect=expect, megablast=megablast, blastoutput_custom=str(forward_blast_output), write=True,\n perc_ident=perc_ident, verbose=verbose, BLASTDB=BLASTDB, **kwargs)\n if verbose:\n print('Forward blast done!')\n # Easy part's over - now we need to get the top hits from the forward BLAST, ID them, then compile a new\n # FASTA file with sequences from Species 2 that will be annotated via the Reverse BLAST against Species 1.\n\n # First we load the primary BLAST XML results to a handle, read the file, then loop over all alignments\n # to get the top scoring HSPs for each (I don't trust NCBI to always give me a pre-sorted list beforehand).\n # In addition, to really get to the crux of what this script should be doing, I also need to get the query\n # start and end points for each HSP, to tile them over the query, in order to get the true query coverage.\n # Furthermore I need to do the same for subject start and end so I can get a specific subrange for the sequence.\n with forward_blast_output.open(\"r\") as forward_blasthits:\n if verbose:\n print('Opening Forward blast output: ', str(forward_blast_output.absolute()))\n blastrecord = NCBIXML.read(forward_blasthits)\n align_scorelist = []\n hsp_scorelist = []\n subject_range = []\n query_start_end = []\n for alignment in blastrecord.alignments:\n if verbose:\n print('Sorting through alignment\\'s HSPs to get top scores of all alignments...')\n subject_range_hsp = []\n query_start_end_hsp = []\n for hsp in alignment.hsps:\n hsp_scorelist.append(hsp.score)\n subject_range_hsp.append(hsp.sbjct_start)\n subject_range_hsp.append(hsp.sbjct_end)\n query_start_end_hsp.append((hsp.query_start, hsp.query_end))\n hsp_scorelist.sort(reverse=True)\n query_start_end.append(i for i in merge_ranges(query_start_end_hsp))\n subject_range.append((subject_range_hsp[0], subject_range_hsp[-1]))\n if verbose:\n print(\"HSP Score List: \\n\\t\", hsp_scorelist)\n align_scorelist.append(hsp_scorelist[0])\n if verbose:\n print(\"Alignment Score List: \\n\\t\", align_scorelist)\n if verbose:\n print('Done with sorting!')\n # Two parts to this next loop: first we loop for each alignment. Next, we look though the HSPs in each\n # alignment file. If the HSP being considered has a score above the thresholds, we note down the ID and\n # score of that HSP and corresponding alignment; once we do that for one HSP in the series, we update the\n # \"blast_got_hit\" variable and proceed to skip to the next alignment result. This goes on until all\n # alignments have been considered, and so we now have a complete list of putative orthologs.\n with forward_id_score_output.open(\"w\") as f_id_out:\n if verbose:\n print('Searching through alignments to get top-scoring hit IDs')\n has_written = False\n for align_index, alignment in enumerate(blastrecord.alignments):\n blast_got_hit = False # Every time we consider a new alignment\n for hsp in alignment.hsps:\n if blast_got_hit:\n break\n if ((hsp.score >= (perc_score * align_scorelist[align_index])) and (hsp.expect <= expect) and\n (sum([i[-1] - i[0] for i in query_start_end[align_index]]) / blastrecord.query_length\n >= perc_length)):\n if verbose:\n print('Found annotation above threshold!')\n f_id_out.write('{0}\\t{1}\\t{2}\\n'.format(alignment.title.replace('/t', ' '),\n ':{0}-{1}'.format(subject_range[align_index][0],\n subject_range[align_index][-1]),\n hsp.score))\n has_written = True\n blast_got_hit = True\n else:\n continue\n if not blast_got_hit:\n print('NOTE: FOR ALIGNMENT {}, NO HITS WERE FOUND!'.format(alignment.title))\n if not has_written:\n print('WARNING! FOR THIS RUN, NO HITS WERE WRITTEN TO FILE, CONTINUING TO NEXT SEQUENCE IN LIST!')\n continue\n # Now, equiped with the list of hits, we need to look these up on a database and get their sequences as a\n # FASTA file.\n if verbose:\n print('Fetching sequences for ID\\'ed hits...')\n try:\n fetchseq(id_file=str(forward_id_score_output), species=target_species, email=email, source=fw_source,\n output_type=output_type, output_name=str(recblast_output_unanno), db=fw_id_db, delim='\\t',\n id_type=id_type, batch_size=batch_size, passwd=passwd, version=fw_id_db_version, verbose=verbose)\n if verbose:\n print('Done with fetching!')\n except IndexError:\n print('WARNING! FETCHSEQ FAILED! SKIPPING THIS SEQUENCE!')\n continue\n # Little caveat: fetchseq by design appends a .[output_type] to the end of the file so we need to add that on:\n recblast_output_unanno = str(recblast_output_unanno) + '.{}'.format(output_type)\n # Now that we have the sequences we can do the Reverse BLAST:\n # Big caveat though: we need to do each target individually.\n if verbose:\n print('Preparing for Reverse BLAST...')\n recblast_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}.{2}\".format(blast_type, seq_record.name, output_type).replace(' ', '_'))\n try:\n recblast_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n for entry_index, entry_record in enumerate(SeqIO.parse(str(recblast_output_unanno), \"fasta\")):\n if entry_record.seq:\n pass\n else:\n print(Warning('Entry {0} in unnanotated recblast file {1} came '\n 'back empty'.format(entry_record.name,\n str(recblast_output_unanno))))\n continue\n if verbose:\n print(\"Entry #{} in unannotated RecBlast Hits:\\n\".format(entry_index))\n for item in [entry_record.id, entry_record.description, entry_record.seq]:\n print('\\t', item)\n reverse_blast_output = Path(\"{0}_recblast_out\".format(target_species).replace(' ', '_') + '/' +\n \"{0}_{1}_tmp\".format(blast_type, seq_record.name).replace(' ', '_') + '/' +\n \"{0}_{1}_{3}_to_{2}_{4}.xml\".format(blast_type, seq_record.name,\n query_species, target_species,\n entry_index).replace(' ', '_'))\n try:\n reverse_blast_output.absolute().parent.mkdir(parents=True)\n except FileExistsError:\n pass\n if verbose:\n print('Performing Reverse Blast:')\n if rv_blast_db == 'skip':\n pass\n elif rv_blast_db == 'stop':\n print('Not performing reverse blast!')\n continue\n else:\n blast(seq_record=entry_record, target_species=query_species, database=rv_blast_db,\n query_species=target_species, filetype=infile_type, blast_type=blast_type,\n local_blast=local_blast_2, write=True,\n expect=expect, megablast=megablast, blastoutput_custom=str(reverse_blast_output),\n perc_ident=perc_ident, BLASTDB=BLASTDB, **kwargs)\n if verbose:\n print('Done with Reverse Blast!')\n with reverse_blast_output.open(\"r\") as reverse_blast_hits:\n if verbose:\n print('Getting top scores for each alignment...')\n blastrecord2 = NCBIXML.read(reverse_blast_hits)\n align_scorelist2 = []\n hsp_scorelist2 = []\n subject_range2 = []\n query_start_end2 = []\n for alignment2 in blastrecord2.alignments:\n if verbose > 4:\n print('Sorting through alignment\\'s HSPs to get top scores of all alignments...')\n subject_range_hsp2 = []\n query_start_end_hsp2 = []\n for hsp2 in alignment2.hsps:\n hsp_scorelist2.append(hsp2.score)\n subject_range_hsp2.append(hsp2.sbjct_start)\n subject_range_hsp2.append(hsp2.sbjct_end)\n query_start_end_hsp2.append((hsp2.query_start, hsp2.query_end))\n hsp_scorelist2.sort(reverse=True)\n query_start_end2.append([i for i in merge_ranges(query_start_end_hsp2)])\n subject_range2.append((subject_range_hsp2[0], subject_range_hsp2[-1]))\n if verbose > 4:\n print(\"HSP Score List: \\n\\t\", hsp_scorelist2)\n align_scorelist2.append(hsp_scorelist2[0])\n if verbose > 4:\n print(\"Alignment Score List: \\n\\t\", align_scorelist2)\n print(\"Query_start_end: \\n\\t\", query_start_end2)\n print(\"Subject Range: \\n\\t\", subject_range2)\n if verbose:\n print('Done with sorting!')\n # Now we have a list of the top score of each alignment for the current entry_record.\n with recblast_output.open(\"w+\") as rb_out:\n if verbose:\n print('Annotating BLAST results')\n has_written2 = False\n reverse_blast_annotations = list()\n for align_index2, alignment2 in enumerate(blastrecord2.alignments):\n blast_got_hit2 = False\n for hsp2 in alignment2.hsps:\n if (hsp2.score >= (perc_score * align_scorelist2[align_index2])):\n if verbose > 4:\n print('hsp score above threshold')\n if (hsp2.expect <= expect):\n if verbose > 4:\n print('hsp expect below threshold')\n if verbose > 4:\n print('HSP Length: ', query_start_end2[align_index2])\n length_alignment = sum([i[-1] - i[0] for i in query_start_end2[align_index2]])\n align_len_threshold = blastrecord2.query_length * perc_length\n if verbose > 4:\n print(length_alignment)\n print(align_len_threshold)\n if length_alignment >= align_len_threshold:\n print('hsp perc length above threshold')\n if verbose:\n print('Found hit!')\n reverse_blast_annotations.append((alignment2.title, '[:{0}-{1}]'.format(\n subject_range2[align_index2][0],\n subject_range2[align_index2][0]),\n hsp2.score))\n has_written2 = True\n blast_got_hit2 = True\n else:\n print('WARNING HSP LENGTH BELOW THRESHOLD')\n print(length_alignment,\n ' not greater than ', align_len_threshold)\n else:\n print('WARNING HSP EXPECT ABOVE THRESHOLD')\n print(hsp2.expect, 'not less than', expect)\n else:\n print('WARNING HSP SCORE BELOW THRESHOLD')\n print(hsp2.score, ' not greater than ', (perc_score * align_scorelist2[align_index2]))\n # else:\n # continue\n if not blast_got_hit2:\n print('NOTE: Alignment {} was not used to annotate.'.format(alignment.title))\n if reverse_blast_annotations:\n sorted(reverse_blast_annotations, reverse=True, key=itemgetter(2))\n annotations = ['\\t||{0} {1} ({2})'.format(anno[0], anno[1], anno[2]) for anno\n in reverse_blast_annotations]\n if verbose:\n print('********************************************')\n print(annotations)\n print('********************************************')\n entry_record.description += ''.join(annotations)\n if verbose > 3:\n print(entry_record)\n SeqIO.write(entry_record, rb_out, output_type)\n if not has_written2:\n print(Warning('NONE OF THE REVERSE BLAST HITS FOR THIS RUN MET ANNOTATION CRITERIA!'))\n continue\n if verbose:\n print('DONE!!!!')\n # Done!",
"def fromblast(args):\n from jcvi.formats.blast import sort\n from jcvi.utils.range import range_distance\n\n p = OptionParser(fromblast.__doc__)\n p.add_option(\n \"--clique\",\n default=False,\n action=\"store_true\",\n help=\"Populate clique instead of linear path\",\n )\n p.add_option(\n \"--maxdist\",\n default=100000,\n type=\"int\",\n help=\"Create edge within certain distance\",\n )\n p.set_verbose(help=\"Print verbose reports to stdout\")\n opts, args = p.parse_args(args)\n\n if len(args) != 2:\n sys.exit(not p.print_help())\n\n blastfile, subjectfasta = args\n clique = opts.clique\n maxdist = opts.maxdist\n sort([blastfile, \"--query\"])\n blast = BlastSlow(blastfile, sorted=True)\n g = BiGraph()\n for query, blines in groupby(blast, key=lambda x: x.query):\n blines = list(blines)\n iterator = combinations(blines, 2) if clique else pairwise(blines)\n for a, b in iterator:\n asub, bsub = a.subject, b.subject\n if asub == bsub:\n continue\n\n arange = (a.query, a.qstart, a.qstop, \"+\")\n brange = (b.query, b.qstart, b.qstop, \"+\")\n dist, oo = range_distance(arange, brange, distmode=\"ee\")\n if dist > maxdist:\n continue\n\n atag = \">\" if a.orientation == \"+\" else \"<\"\n btag = \">\" if b.orientation == \"+\" else \"<\"\n g.add_edge(asub, bsub, atag, btag)\n\n graph_to_agp(g, blastfile, subjectfasta, verbose=opts.verbose)",
"def set_bfield_for_s0(self, s0):\n if not (s0 > 0):\n raise ValueError(\"must have s0 > 0; got %r\" % (s0,))\n\n B0 = 2 * np.pi * cgs.me * cgs.c * self.in_vals[IN_VAL_FREQ0] / (cgs.e * s0)\n self.in_vals[IN_VAL_B] = B0\n return self",
"def do_blast(self):\n with open(self.query_file) as handle:\n fasta_string = handle.read()\n\n result_handle = NCBIWWW.qblast('blastn', 'nt', fasta_string)\n\n with open(self.output_file, 'w') as writer:\n writer.write(result_handle.read())\n result_handle.close()",
"def insert_bals(self, qsowave, qsoflux, qsoredshift, balprob=0.12,\n seed=None, verbose=False, qsoid=None):\n from desiutil.log import get_logger, DEBUG\n from desispec.interpolation import resample_flux\n from astropy.table import Table\n\n\n if verbose:\n log = get_logger(DEBUG)\n else:\n log = get_logger()\n\n rand = np.random.RandomState(seed)\n\n if balprob < 0:\n log.warning('Balprob {} is negative; setting to zero.'.format(balprob))\n balprob = 0.0\n if balprob > 1:\n log.warning('Balprob {} cannot exceed unity; setting to 1.0.'.format(balprob))\n balprob = 1.0\n\n nqso, nwave = qsoflux.shape\n if len(qsoredshift) != nqso:\n log.fatal('Dimensions of qsoflux and qsoredshift do not agree!')\n raise ValueError\n \n if qsowave.ndim == 2: # desisim.QSO(resample=True) returns a 2D wavelength array\n w_nqso, w_nwave = qsowave.shape\n if w_nwave != nwave or w_nqso != nqso:\n log.fatal('Dimensions of qsoflux and qsowave do not agree!')\n raise ValueError\n else:\n if len(qsowave) != nwave:\n log.fatal('Dimensions of qsoflux and qsowave do not agree!')\n raise ValueError\n \n # Determine which QSO spectrum has BAL(s) and then loop on each. \n hasbal = rand.random_sample(nqso) < balprob\n ihasbal = np.where(hasbal)[0]\n # Should probably return a BAL metadata table, too.\n if len(ihasbal) == 0:\n #Return a fully empy balmeta table\n balmeta=Table(names=('TARGETID','Z','BAL_PROB','BAL_TEMPLATEID'), dtype=('i4', 'f4', 'f4','i4'))\n return qsoflux, balmeta\n\n balindx = rand.choice( len(self.balmeta), len(ihasbal) )\n #before it was convenient to have the balmeta of size nqso's and remove non-BALs after. Now I think is easier to return the balmeta for BALs only.\n balmeta = self.template_balmeta(balindx)\n balmeta['Z'] = qsoredshift[ihasbal]\n balmeta['BAL_TEMPLATEID'] = balindx\n balmeta['TARGETID'] = qsoid[ihasbal]\n\n bal_qsoflux = qsoflux.copy()\n if qsowave.ndim == 2:\n for ii, indx in zip( ihasbal, balindx ):\n thisbalflux = resample_flux(qsowave[ii, :], self.balwave*(1 + qsoredshift[ii]),\n self.balflux[indx, :], extrapolate=True)\n bal_qsoflux[ii, :] *= thisbalflux\n else:\n for ii, indx in zip( ihasbal, balindx ):\n thisbalflux = resample_flux(qsowave, self.balwave*(1 + qsoredshift[ii]),\n self.balflux[indx, :], extrapolate=True)\n bal_qsoflux[ii, :] *= thisbalflux\n\n return bal_qsoflux, balmeta",
"def create_baseline_classifier():\n return GradientBoostingClassifier(min_samples_leaf=2,\n n_estimators=100,\n random_state=0)",
"def __init__(\n self, grb_save_file_name, threshold=4.5, simul_trigger_window=0.5, max_n_dets=12\n ):\n\n super(GBMTrigger, self).__init__(grb_save_file_name, instrument=\"GBM\")\n\n self._threshold = threshold\n self._simul_trigger_window = simul_trigger_window\n self._max_n_dets = max_n_dets\n\n self._triggered_times = []\n self._triggered_detectors = []\n self._triggered_time_scales = []\n\n # sort the detectors by their distance to the\n # grb\n self._setup_order_by_distance()",
"def bollinger_bands(self, n_days, shift=0, delta=2):\n \n self.data['boll_bands_upper_band'] = self.data['SMA_{0}_close'.format(n_days, shift)] + delta * self.data['HV_{0}_days'.format(n_days)] * self.data['SMA_{0}_close'.format(n_days, shift)] \n self.data['boll_bands_lower_band'] = self.data['SMA_{0}_close'.format(n_days, shift)] - delta * self.data['HV_{0}_days'.format(n_days)] * self.data['SMA_{0}_close'.format(n_days, shift)]",
"def __init__(self, maxnodepts: 'int const'=64, initsize: 'int const'=4):\n this = _coin.new_SbBSPTree(maxnodepts, initsize)\n try:\n self.this.append(this)\n except __builtin__.Exception:\n self.this = this",
"def __init__(self,\n first: 'LoadBalancerCollectionFirst',\n limit: int,\n load_balancers: List['LoadBalancer'],\n total_count: int,\n *,\n next: 'LoadBalancerCollectionNext' = None) -> None:\n self.first = first\n self.limit = limit\n self.load_balancers = load_balancers\n self.next = next\n self.total_count = total_count",
"def get_instance(self, payload):\n return BrandsInformationInstance(self._version, payload, )",
"def __init__(self, maxnodepts = 64, initsize = 4):\n this = _coin.new_SbBSPTree(maxnodepts, initsize)\n try: self.this.append(this)\n except: self.this = this"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Assigns valid attributes to first and second hsp object.
|
def valid_strands(first_hsp_object: HSP, second_hsp_object: HSP) -> None :
if first_hsp_object.name == second_hsp_object.name:
if (first_hsp_object.strand or second_hsp_object.strand) \
and not (first_hsp_object.strand and second_hsp_object.strand):
first_hsp_object.valid = True
second_hsp_object.valid = True
else:
first_hsp_object.valid = False
second_hsp_object.valid = False
|
[
"def set_attributes(self, model_1, obj_1, obj_2, overwrite=True):\n for (\n attr\n ) in (\n obj_2.traits()\n ): # Iterate through all attributes in obj_2. These should be the same traits as obj_1 assuming the precondition\n class_name = str(type(obj_2.traits()[attr])).strip(\"<>'\").split(\".\")[-1]\n # TODO: check for reactance tuples: str(obj_2.traits()[attr]._trait.klass).strip(\"<>'\").split('.')[-1] != (Int,Int,Int):\n\n if class_name == \"List\":\n phase_order = {\n \"A\": 0,\n \"B\": 1,\n \"C\": 2,\n \"N\": 3,\n } # Should only have to deal with 3 phases.\n #\n # BUG WARNING: The order of objects in the list is important and is used to determine the changes that are made\n # Try to ensure that phases are specified to avoid this problem\n # If number of elements in obj_1 is 0, all elements of obj_2 are added\n # If number of elements is the same, they are modified with a 1-1 comparison\n # If number of elements in obj_2 is < obj_1, set the first values of obj_1 as obj_2\n # If number of elements in obj_2 is > obj_1, set the all the values in obj_1 in the order they'r in obj_2 and append the extras\n # This will fail if obj_1 is (A, B, C) and obj_2 is (A, C), as it'll assign phase C to phase B.\n # This will also fail if obj_1 is (C) and obj_2 is (A,B,C) as C will have A assigned to it.\n # This will also fail if obj_1 is (A,B) and obj_2 is (A,C) as B will have C assigned to it.\n list_1 = getattr(obj_1, attr)\n list_2 = getattr(obj_2, attr)\n if list_1 is None or len(list_1) == 0:\n result_list = []\n for element in list_2:\n result_list.append(self.copy(model_1, element))\n setattr(obj_1, attr, result_list)\n continue\n elif list_2 is None or len(list_2) == 0:\n continue\n\n # Almost all Lists are of objects which have phases. Exceptions being windings, reactances and positions\n # Require the phases to be specified in both systems to modify based on phase\n has_phases = True\n for i in range(len(list_1)):\n if not (\n hasattr(list_1[0], \"phase\") and list_1[0].phase is not None\n ):\n has_phases = False\n for i in range(len(list_2)):\n if not (\n hasattr(list_2[0], \"phase\") and list_2[0].phase is not None\n ):\n has_phases = False\n if has_phases and len(list_1) > 0 and len(list_2) > 0:\n # Firstly sort the lists so they're in correct order by phase.\n list_1.sort(key=lambda x: phase_order[x.phase])\n list_2.sort(key=lambda x: phase_order[x.phase])\n list_1_phase = phase_order[list_1[0].phase]\n list_2_phase = phase_order[list_2[0].phase]\n list_1_idx = 0\n list_2_idx = 0\n while list_1_idx < len(list_1) and list_2_idx < len(list_2):\n if list_1_idx < len(list_1):\n list_1_phase = phase_order[list_1[list_1_idx].phase]\n else:\n list_1_phase = 1000000\n if list_2_idx < len(list_2):\n list_2_phase = phase_order[list_2[list_2_idx].phase]\n else:\n list_2_phase = 1000001\n\n # i.e. recurse\n if list_1_phase == list_2_phase:\n self.set_attributes(\n model_1,\n list_1[list_1_idx],\n list_2[list_2_idx],\n overwrite,\n )\n list_1_idx = list_1_idx + 1\n list_2_idx = list_2_idx + 1\n elif list_1_phase < list_2_phase:\n list_1_idx = (\n list_1_idx + 1\n ) # e.g. obj_1 = (A, B, C) and obj_2 = (B). We don't update this phase\n\n else:\n getattr(obj_1, attr).append(list_2[list_2_idx])\n list_2_idx = list_2_idx + 1\n\n elif len(list_1) == len(list_2):\n for i in range(len(list_1)):\n self.set_attributes(model_1, list_1[i], list_2[i], overwrite)\n\n elif len(list_1) > len(list_2):\n for i in range(len(list_2)):\n self.set_attributes(model_1, list_1[i], list_2[i], overwrite)\n\n else: # i.e. len(list_1) < len(list_2):\n for i in range(len(list_2)):\n if i < len(list_1):\n self.set_attributes(\n model_1, list_1[i], list_2[i], overwrite\n )\n else:\n getattr(obj_1, attr).append(list_2[i])\n\n else:\n value = getattr(obj_2, attr)\n if value is not None:\n if getattr(obj_1, attr) is not None and overwrite == False:\n continue\n setattr(obj_1, attr, value)",
"def _combine_site_props(attr_1, attr_2, len_1, len_2):\n if attr_1 is None and attr_2 is None:\n return None\n if attr_1 is None or attr_2 is None:\n new_site_properties = []\n if attr_1 is None:\n new_site_properties.extend([None for i in range(len_1)])\n elif len(attr_1) == 1:\n new_site_properties.extend([attr_1[0] for i in range(len_1)])\n elif len(attr_1) > 1:\n new_site_properties.extend(attr_1)\n\n if attr_2 is None:\n new_site_properties.extend([None for i in range(len_2)])\n elif len(attr_2) == 1:\n new_site_properties.extend([attr_2[0] for i in range(len_2)])\n elif len(attr_2) > 1:\n new_site_properties.extend(attr_2)\n\n return new_site_properties\n if len(attr_1) == 1 and len(attr_2) == 1:\n # If both properties lists are do not change within their respective trajectory\n if attr_1 == attr_2:\n # If both site_properties are the same, only store one\n return attr_1\n new_site_properties = [attr_1[0] for i in range(len_1)]\n new_site_properties.extend([attr_2[0] for i in range(len_2)])\n return new_site_properties\n if len(attr_1) > 1 and len(attr_2) > 1:\n # Both properties have site properties that change within the trajectory, concat both together\n return [*attr_1, *attr_2]\n\n new_site_properties = []\n if attr_1 is None:\n new_site_properties.extend([None for i in range(len_1)])\n elif len(attr_1) == 1:\n new_site_properties.extend([attr_1[0] for i in range(len_1)])\n elif len(attr_1) > 1:\n new_site_properties.extend(attr_1)\n\n if attr_2 is None:\n new_site_properties.extend([None for i in range(len_2)])\n elif len(attr_2) == 1:\n new_site_properties.extend([attr_2[0] for i in range(len_2)])\n elif len(attr_2) > 1:\n new_site_properties.extend(attr_2)\n\n return new_site_properties",
"def test_set_good_attributes(self):\r\n\r\n _values = (0, 1, 'Test Method', 'Test Boundary Conditions',\r\n 'Test Remarks')\r\n\r\n (_error_code,\r\n _error_msg) = self.DUT.set_attributes(_values)\r\n self.assertEqual(_error_code, 0)",
"def merge_hparams(hparams_1, hparams_2):\r\n hparams_map = hparams_1.values()\r\n hparams_map.update(hparams_2.values())\r\n return tf.contrib.training.HParams(**hparams_map)",
"def __checkIncludedHSPLs(self, hspl1, hspl2):\n subject1 = hspl1.findtext(\"{%s}subject\" % getHSPLNamespace())\n subject2 = hspl2.findtext(\"{%s}subject\" % getHSPLNamespace())\n action1 = hspl1.findtext(\"{%s}action\" % getHSPLNamespace())\n action2 = hspl2.findtext(\"{%s}action\" % getHSPLNamespace())\n object1 = hspl1.findtext(\"{%s}object\" % getHSPLNamespace())\n object2 = hspl2.findtext(\"{%s}object\" % getHSPLNamespace())\n trafficConstraints1 = hspl1.find(\"{%s}traffic-constraints\" % getHSPLNamespace())\n trafficConstraints2 = hspl2.find(\"{%s}traffic-constraints\" % getHSPLNamespace())\n\n m1 = re.match(\"(\\d+\\.\\d+\\.\\d+\\.\\d+(/\\d+)?)(:(\\d+|\\*|any))?\", object1)\n m2 = re.match(\"(\\d+\\.\\d+\\.\\d+\\.\\d+(/\\d+)?)(:(\\d+|\\*|any))?\", object2)\n objectCheck = False\n if m1 and m2:\n address1 = ip_network(m1.group(1))\n address2 = ip_network(m2.group(1))\n n1 = int(address1.network_address) >> (32 - address1.prefixlen)\n n2 = int(address2.network_address) >> (32 - address1.prefixlen)\n port1 = m1.group(4)\n port2 = m2.group(4)\n if n1 == n2 and (port1 == port2 or port1 == \"*\" or port1 == \"any\"):\n objectCheck = True\n\n if subject1 == subject2 and action1 == action2 and objectCheck and self.__checkEqualXML(trafficConstraints1, trafficConstraints2):\n return True\n\n return False",
"def test_class_attributes(self):\n self.cut2 = util.get_maas(self.token, tenant_name=self.tenant)\n\n self.assertEqual(self.cut.get_address(self.token),\n self.cut2.get_address(self.token),\n \"Class variable address not correctly set!\")\n self.assertEquals(self.cut.get_location(), self.cut2.get_location(),\n \"Class variable location not correctly set!\")\n self.assertEquals(self.cut.get_tenant(), self.cut2.get_tenant(),\n \"Class variable tenant not correctly set!\")",
"def ensure_compatible(left, right):\n conflicts = list(conflicting_pairs(left, right))\n if conflicts:\n raise ValueError('conflicting values for object/property pairs:'\n f' {conflicts!r}')",
"def copy_metadata(self, other):\n # LOG.debug(f'Copying metadata from {other}') # BUG: Causes infinite recursion!\n if isinstance(other, Variable):\n for name in self._metadata:\n value = getattr(self, name, None)\n if value is None:\n value = getattr(other, name, None)\n object.__setattr__(self, name, value)",
"def test_assignment(self):\n\t\tinstanceDict = object.__getattribute__(self.p, '__dict__')\n\n\t\tself.p.name = 'Testing'\n\t\tself.p.foo = 'Bar'\n\n\t\tself.assertIn('name', instanceDict)\n\t\tself.assertIn('foo', instanceDict)\n\t\t# should have id, name, foo, language\n\t\tself.assertEqual(len(instanceDict['pppending']), 4)\n\t\tself.assertSameElements(instanceDict['pppending'], ['id','language','name','foo'])\n\n\t\tself.assertTrue(self.p.name == 'Testing')\n\t\tself.assertTrue(self.p.foo == 'Bar')",
"def __init_properties(self, names):\n\n def get(self, name):\n return self.hsgp_attributes[name]\n\n for name in names:\n get_partial = partial(get, name=name)\n setattr(self.__class__, name, property(get_partial))",
"def set_variables(fasta1,fasta2):\n l1 = len(fasta1)\n l2 = len(fasta2)\n print(\"lengths: l1={} l2={}\".format(l1, l2))\n if l1 >= l2:\n s1 = fasta1 #longer length goes to s1 and shorter to s2\n s2 = fasta2\n else:\n s1 = fasta2\n s2 = fasta1\n l1, l2 = l2, l1 # swap the two lengths aside from the sequence variables\n return s1,s2,l1,l2",
"def blendTwoAttr(objects, time=(), blender=\"string\", controlPoints=bool, attribute0=\"string\", attribute1=\"string\", shape=bool, driver=int, attribute=\"string\", name=\"string\"):\n pass",
"def test_build_params_instances_do_not_share_values(self):\n bps1 = self.BuildParams()\n bps2 = self.BuildParams()\n\n bps1.x = 1\n assert bps1.x == 1\n assert bps2.x is None\n\n bps2.x = 2\n assert bps2.x == 2\n assert bps1.x == 1",
"def test_two_hosts_swap_priorities(self):\n self.basic_flow()\n testflow.step(\n \"Swapping SPM priorities between host %s and %s\",\n self.high_spm_priority_host, self.low_spm_priority_host\n )\n self.high_spm_priority_host, self.low_spm_priority_host = (\n self.low_spm_priority_host, self.high_spm_priority_host\n )\n self.hosts = [self.high_spm_priority_host, self.low_spm_priority_host]\n self.basic_flow()",
"def test_attributes_belong_to_agreement_or_mixin(self):\n # m1 & m2 just mixins containing some attributes\n self.entity.mixins.append(test_data.m1)\n self.entity.mixins.append(test_data.m2)\n\n self.entity.attributes = {\"occi.agreement.effectiveFrom\": \"14001245\",\n \"unknown\": \"whatever\"}\n\n self.assertRaises(AttributeError, self.agree_back.create, self.entity,\n self.extras)\n LOG.info(\"Agreement wont allow unknown attributes\")",
"def set_attributes_all_required(instance, attrs, res):\r\n for attr in attrs:\r\n attr_val = res.get(attr)\r\n # all attributes are required\r\n if not attr_val:\r\n print(attr)\r\n abort(400)\r\n setattr(instance, attr, attr_val)\r\n return instance",
"def test_that_required_attributes_are_used(self):\n # m3 has required attributes \n self.entity.mixins.append(test_data.m3)\n\n self.entity.attributes = {\"occi.agreement.effectiveFrom\": \"14001245\",\n \"os\": \"ubuntu\", \"vm_cores\": \"4\"}\n self.assertRaises(AttributeError, self.agree_back.create, self.entity,\n self.extras)\n LOG.info(\"Agreement ensures use of required variables\")",
"def field_pairs(n1, n2, empty=\"\"):\n\n def normalize_field(value):\n \"\"\"\n For now, just make everything lowercase and strip white space.\n \"\"\"\n return value.lower().replace(\" \", \"\")\n\n\n def normalize_email(email):\n \"\"\"\n Only return the username, not the domain.\n \"\"\"\n return normalize_field(email.split(\"@\")[0])\n\n\n normalize = defaultdict(lambda: normalize_field)\n normalize['email'] = normalize_email\n\n fields = set(n1._fields) | set(n2._fields)\n for field in fields:\n norm = normalize[field]\n yield norm(getattr(n1, field, empty)), norm(getattr(n2, field, empty))",
"def set_sp(self, sp_entity_id):\n self.sp = {'id': sp_entity_id}\n self.sp['config'] = get_idp_sp_config().get(sp_entity_id)\n\n sp = ServiceProvider.objects.filter(entity_id=sp_entity_id).first()\n\n if not self.sp['config']:\n self.sp['config'] = copy.deepcopy(settings.DEFAULT_SPCONFIG)\n self.sp['config']['force_attribute_release'] = False\n\n if not sp:\n # TODO: get these information from sp's metadata\n sp = ServiceProvider.objects.create(entity_id=sp_entity_id,\n display_name=sp_entity_id,\n is_valid=True,\n is_active=True,\n last_seen=timezone.localtime())\n elif not sp.is_active:\n msg = _(\"{} was disabled. \"\n \"Please contact technical staff for informations.\")\n raise DisabledSP(msg.format(sp_entity_id))\n else:\n sp.last_seen = timezone.localtime()\n sp.save()\n\n if self.sp['config']['force_attribute_release']:\n # IdP ignores what SP requests for and release what you configured\n return\n\n # check if SP asks for required attributes\n req_attrs = self.IDP.config.metadata.attribute_requirement(\n sp_entity_id)\n if not req_attrs:\n return\n\n # clean up unrequested attributes\n # TODO a bettere generalization with SAML2 attr mapping here\n req_attr_list = [entry['name'] for entry in req_attrs['required']]\n opt_attr_list = [entry['name'] for entry in req_attrs['optional']]\n\n # conversion: avoids that some attrs have identifiers instead of names\n conv_req_attr_list = self.convert_attributes(req_attr_list)\n conv_opt_attr_list = self.convert_attributes(opt_attr_list)\n\n attr_list = [attr for attr in conv_req_attr_list]\n attr_list.extend(conv_opt_attr_list)\n\n # updates newly requested attrs\n for attr in attr_list:\n # if there's some configuration about mapping ...\n if attr in settings.DEFAULT_SPCONFIG['attribute_mapping']:\n self.sp['config']['attribute_mapping'][attr] = settings.DEFAULT_SPCONFIG['attribute_mapping'][attr]\n # .. otherwise map it as it come from sp's metadata\n else:\n self.sp['config']['attribute_mapping'][attr] = [attr]\n\n # clean up unrequired\n to_be_removed = []\n for attr in self.sp['config']['attribute_mapping']:\n if attr not in attr_list:\n to_be_removed.append(attr)\n for attr in to_be_removed:\n del self.sp['config']['attribute_mapping'][attr]\n\n # update SP's attribute map\n sp.attribute_mapping = json.dumps(self.sp['config']['attribute_mapping'],\n indent=2)\n sp.save()\n\n # check if some required are unavailable...\n if req_attrs['required']:\n msg = _(\"{} requested unavailable attribute '{}' to this IdP. \"\n \"Please contact SP technical staff for support.\")\n # if some required attributes are unavailable the IdP give this warning\n for req in conv_req_attr_list:\n if req not in self.sp['config']['attribute_mapping']:\n logger.info(msg)\n raise UnavailableRequiredAttributes(\n msg.format(sp_entity_id, req))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Extends any missing bp's in the hsp_object that was removed by blastn by comparing to the respective sbjct's sequence.
|
def extend_sbjct(hsp_object, database, primer_dict):
start = hsp_object.start
end = hsp_object.end
query_start = hsp_object.query_start
query_end = hsp_object.query_end
len_missing = len(primer_dict[hsp_object.name]) - abs(end - start)
begin_missing = query_start - 1
end_missing = abs(len(primer_dict[hsp_object.name]) - query_end)
if len_missing > 0:
with open(database, 'r') as fasta:
for contig in SeqIO.parse(fasta, 'fasta'):
if contig.name == hsp_object.contig_name:
if start > end:
seq_found = contig[end - end_missing - 1: start + begin_missing]
hsp_object.sbjct = seq_found.reverse_complement().seq
else:
seq_found = contig[start - begin_missing - 1: end + end_missing]
hsp_object.sbjct = seq_found.seq
|
[
"def test_remove_h_bonds(self):\n test_mol = self.mHBonds.generate_h_bonded_structures()[0]\n test_mol.remove_h_bonds()\n\n for i, atm1 in enumerate(test_mol.atoms):\n for j, atm2 in enumerate(test_mol.atoms):\n if j < i and test_mol.has_bond(atm1, atm2):\n bd = test_mol.get_bond(atm1, atm2)\n self.assertNotAlmostEqual(bd.order, 0.1)",
"def test_Blist():\n B1 = Bead(1,'TES','T',1,np.array([0.,0.,0.]),np.array([0.,0.,0.]),'T')\n B2 = Bead(1,'TES','T',2,np.array([0.,0.,0.]),np.array([0.,0.,0.]),'T')\n B3 = Bead(1,'TES','T',3,np.array([0.,0.,0.]),np.array([0.,0.,0.]),'T')\n Blst = Blist()\n B12 = Bond([B1,B2],[])\n B23 = Bond([B2,B3],[])\n B13 = Bond([B1,B3],[])\n Blst.append(B12)\n Blst.append(B23)\n Blst.append(B13)\n assert Blst[0] == B12\n Blst.remove(B12)\n assert Blst[0] == B23\n Blst.append(B12)\n Blst.removeByIndex(3)\n assert Blst[0] == B12",
"def update_ss_bridges(self):\n sg_partners = {}\n for residue in self.residues:\n if isinstance(residue, aa.CYS):\n atom = residue.get_atom(\"SG\")\n if atom is not None:\n sg_partners[atom] = []\n for atom in sg_partners:\n for partner, value in sg_partners.items():\n if atom == partner or sg_partners[atom] != []:\n continue\n dist = util.distance(atom.coords, partner.coords)\n if dist < BONDED_SS_LIMIT:\n sg_partners[atom].append(partner)\n value.append(atom)\n for atom in sg_partners:\n res1 = atom.residue\n numpartners = len(sg_partners[atom])\n if numpartners == 1:\n partner = sg_partners[atom][0]\n res2 = partner.residue\n res1.ss_bonded = True\n res1.ss_bonded_partner = partner\n self.apply_patch(\"CYX\", res1)\n _LOGGER.debug(f\"{res1} - {res2}\")\n elif numpartners > 1:\n error = f\"WARNING: {res1} has multiple potential \"\n error += \"SS-bridge partners\"\n _LOGGER.warning(error)\n elif numpartners == 0:\n _LOGGER.debug(f\"{res1} is a free cysteine\")",
"def remove(self, bno):\r\n del self.breakpt[bno]",
"def open_breathing_helices(seq, ss, free=6):\n nbrs = set()\n pt = ril.make_pair_table(ss, base=0)\n\n # mutable secondary structure \n nbr = list(ss)\n\n rec_fill_nbrs(nbrs, ss, nbr, pt, (0, len(ss)), free)\n\n nbrs.add(''.join(nbr))\n\n return nbrs",
"def prune_non_seg(self):\n self.fullsequence = self.sequence # First back up the original sequence\n self.fullvariantset = self.variantset\n self.fullvariants = self.variants\n self.sequence = MultipleSeqAlignment([]) # Blank the sequence to be worked on\n\n print \"\\nPruning non-segregating sites...\"\n locs = []\n for curvar in self.variantset:\n locs.append(curvar)\n locs.sort()\n\n stripped = {}\n seqnames = []\n for seq in self.fullsequence:\n stripped[seq.name] = []\n seqnames.append(seq.name)\n\n for i in xrange(len(locs)):\n loc = locs[i]\n self.pruned_to_full.append(loc)\n seqbits = self.fullsequence[:, loc]\n name = 0\n for seqbit in seqbits:\n stripped[seqnames[name]].append(seqbit)\n name += 1\n\n for strip in stripped.keys():\n self.sequence.append(SeqRecord(Seq(''.join(stripped[strip])), name=strip, id=strip))\n\n self.variantset = set()\n self.variants = {}\n self.variants_from_sequence() # Re-run on stripped sequence",
"def bsr(blast_object:Blastn, max_bits_dict:dict):\n\n for hsp in blast_object.hsp_objects:\n hsp.bsr = hsp.bits / max_bits_dict[hsp.name]\n\n if hsp.bsr < MIN_BSR:\n blast_object.remove_hsp_object_all(hsp)",
"def addGapsToHMMSeqs(self):\n for seq in self.records:\n seq.seq.insertAllGaps(self.total_gaps)",
"def remove_identical_seqs(self):\n debug(\"remove identical seqs\")\n if len(self.new_seqs_otu_id) > 0:\n if _DEBUG:\n sys.stdout.write(\"running remove identical twice in a row\"\n \"without generating new alignment will cause errors. skipping\\n\")\n return\n tmp_dict = dict((taxon.label, self.data.aln[taxon].symbols_as_string()) for taxon in self.data.aln)\n old_seqs = tmp_dict.keys()\n # Adding seqs that are different, but needs to be maintained as diff than aln that the tree has been run on\n avg_seqlen = sum(self.data.orig_seqlen) / len(self.data.orig_seqlen) # HMMMMMMMM\n assert self.config.seq_len_perc <= 1\n seq_len_cutoff = avg_seqlen * self.config.seq_len_perc\n for gb_id, seq in self.new_seqs.items():\n if gb_id.split(\".\") == 1:\n debug(gb_id)\n if self.blacklist is not None and gb_id in self.blacklist:\n debug(\"gb_id in blacklist, not added\")\n pass\n elif gb_id in self.newseqs_acc: # added to increase speed. often seq was found in another blast file\n debug(\"passed, was already added\")\n pass\n else:\n if len(seq.replace(\"-\", \"\").replace(\"N\", \"\")) > seq_len_cutoff:\n if self.config.blast_loc != \"remote\":\n tax_name = None\n # ######################################################\n # ### new implementation of rank for delimitation\n if type(self.mrca_ncbi) is int:\n mrca_ncbi = self.mrca_ncbi\n elif len(self.mrca_ncbi) == 1:\n mrca_ncbi = list(self.mrca_ncbi)[0]\n else:\n debug(self.mrca_ncbi)\n debug(\"think about something to do!\")\n rank_mrca_ncbi = self.ids.ncbi_parser.get_rank(mrca_ncbi)\n # get rank to delimit seq to ingroup_mrca\n # get name first\n if gb_id[:6] == \"unpubl\":\n debug(\"unpubl data\")\n debug(self.data.gb_dict[gb_id])\n tax_name = self.data.gb_dict[gb_id][u\"^ot:ottTaxonName\"]\n ncbi_id = self.data.gb_dict[gb_id][u\"^ncbi:taxon\"]\n if tax_name is None:\n tax_name = self.data.gb_dict[gb_id][u'^user:TaxonName']\n if ncbi_id is None:\n debug(tax_name.split(\" \")[0])\n tax_lin_name = tax_name.split(\" \")[0]\n tax_lin_name = tax_lin_name.split(\"_\")[0]\n print(tax_lin_name)\n ncbi_id = self.ids.ncbi_parser.get_id_from_name(tax_lin_name)\n # ncbi_id = 00000\n elif len(gb_id.split(\".\")) >= 2:\n if gb_id in self.data.gb_dict.keys() and 'staxids' in self.data.gb_dict[gb_id].keys():\n tax_name = self.data.gb_dict[gb_id]['sscinames']\n ncbi_id = self.data.gb_dict[gb_id]['staxids']\n else:\n tax_name = self.ids.find_name(acc=gb_id)\n if tax_name is None:\n sys.stderr.write(\"no species name returned for {}\".format(gb_id))\n ncbi_id = self.ids.map_acc_ncbi(gb_id)\n assert tax_name is not None\n assert ncbi_id is not None\n tax_name = str(tax_name).replace(\" \", \"_\")\n input_rank_id = self.ids.ncbi_parser.get_downtorank_id(ncbi_id, rank_mrca_ncbi)\n # #######################################################\n if input_rank_id == mrca_ncbi: # belongs to ingroup mrca -> add to data, if not, leave it out\n # debug(\"input belongs to same mrca\")\n self.newseqs_acc.append(gb_id)\n otu_id = self.data.add_otu(gb_id, self.ids)\n self.seq_dict_build(seq, otu_id, tmp_dict)\n else:\n self.newseqs_acc.append(gb_id)\n otu_id = self.data.add_otu(gb_id, self.ids)\n self.seq_dict_build(seq, otu_id, tmp_dict)\n old_seqs_ids = set()\n for tax in old_seqs:\n old_seqs_ids.add(tax)\n assert old_seqs_ids.issubset(tmp_dict.keys())\n for tax in old_seqs:\n del tmp_dict[tax]\n self.new_seqs_otu_id = tmp_dict # renamed new seq to their otu_ids from GI's, but all info is in self.otu_dict\n debug(\"len new seqs dict after remove identical\")\n debug(len(self.new_seqs_otu_id))\n with open(self.logfile, \"a\") as log:\n log.write(\"{} new sequences added from genbank after removing identical seq, \"\n \"of {} before filtering\\n\".format(len(self.new_seqs_otu_id), len(self.new_seqs)))\n self.data.dump()",
"def remove_bases(self, base_list):\n self._logger.debug(\"========== remove bases ==========\")\n self._logger.debug(\"Number of staple bases %d\" % len(self.staple_bases))\n self._logger.debug(\"Number of bases to remove %d\" % len(base_list))\n\n # Create base helix position maps.\n removed_staple_bases = set()\n removed_scaffold_bases = set()\n for base in base_list:\n if base.is_scaf:\n removed_scaffold_bases.add(base.p)\n else:\n removed_staple_bases.add(base.p)\n #__for base in base_list\n\n # Modify list of staple bases.\n remaining_staple_bases = []\n for base in self.staple_bases:\n if base.p not in removed_staple_bases:\n remaining_staple_bases.append(base)\n else:\n base.up = None\n base.down = None\n if base.across: \n base.across.across = None \n base.across = None \n #__if base.p not in removed_staple_bases\n #__for base in self.staple_bases\n\n self._logger.debug(\"Number of bases removed %d\" % (len(self.staple_bases) - len(remaining_staple_bases)))\n self._logger.debug(\"Number of bases remaining %d\" % len(remaining_staple_bases)) \n self.staple_bases = remaining_staple_bases\n for base in self.staple_bases:\n self._logger.debug(\"Base ID %d h %d p %d\" % (base.id, base.h, base.p))\n\n # Regenerate the helix coordinate and reference frame \n # arrays from the modified list of bases.\n self.regenerate_coordinate_arrays()\n\n # Rebuild base position maps.\n self.build_base_pos_maps()",
"def if_add_H_bs(self):\n if 'H' not in self.bs_dict and 'h' not in self.bs_dict and 1 not in self.bs_dict:\n for atom in self.cluster:\n if atom.element.upper() == 'H':\n return True\n return False",
"def non_gap_bases(baselist):\n\treturn [b for b in baselist if b not in gaps]",
"def _find_correct_slot(self, obj):\n if self._n == 0:\n self.append(obj)\n return\n\n for k in range(0, self._n):\n if self._A[k] < obj:\n self._insert(obj, k)\n return\n self.append(obj)",
"def smoothByOldest(self, BB=None, removeNANs=True, skipEmpty=True):\n if not self.finished:\n raise Exception('Regitration has not been done/finished!!!!')\n# if BB is None:\n# BB = n.array([self.BBs[:,0].min(), self.BBs[:,1].min(), self.BBs[:,2].max(), self.BBs[:,3].max()])\n for i in xrange(self.num):\n idx = self.processOrder[i]\n if BB is not None:\n BBs = n.vstack((BB, self.BBs[idx]))\n overlaping = r.getOverlapingBoundingBoxes(BBs, 0, 1)\n if overlaping.size!=1:\n raise Exception('This means that the code in getOverlapingBoundingBoxes() has to be carefully vectorized')\n if not overlaping:\n if not skipEmpty:\n yield n.zeros((0,3))\n continue\n newxyz = self.loadVar('xyzs', idx)\n if removeNANs and self.nanflags[idx]:\n nan = self.loadVar('nans', idx)\n newxyz = newxyz[n.logical_not(nan.ravel()),:]\n if (i>0) and (newxyz.size>0):\n older = self.processOrder[:i]\n #find overlaping bounding boxes with previously registered clouds\n overlaping = r.getOverlapingBoundingBoxes(self.BBs, idx, older)\n older = [old for old,ovl in zip(older, overlaping) if ovl]\n toRemove = n.zeros((newxyz.shape[0],),dtype=n.bool)\n for old in older:\n toRemove = n.logical_or(toRemove, r.testPoinstInsidePolygon(newxyz, self.rectangles[old]))\n newxyz = newxyz[n.logical_not(toRemove),:]\n if (BB is not None) and (newxyz.size>0):\n newxyz = newxyz[n.logical_and(n.logical_and(newxyz[:,0]>=BB[0], newxyz[:,1]>=BB[1]),\n n.logical_and(newxyz[:,0]<=BB[2], newxyz[:,1]<=BB[3]) ),\n :]\n if skipEmpty and (newxyz.size==0):\n continue\n yield newxyz",
"def complement(self, freeblock):\n copy = self.normalized()\n comp = Agenda()\n desc = freeblock.desc\n cur_time = freeblock.begin #arrow date and time \n for appt in copy.appts:\n if appt < freeblock:\n continue\n if appt > freeblock:\n if cur_time < freeblock.end:\n comp.append(Appt(cur_time,freeblock.end, desc))\n cur_time = freeblock.end\n break\n if cur_time < appt.begin:\n # print(\"Creating free time from\", cur_time, \"to\", appt.begin)\n comp.append(Appt(cur_time, appt.begin, desc))\n cur_time = max(appt.end,cur_time)\n \n if cur_time < freeblock.end:\n # print(\"Creating final free time from\", cur_time, \"to\", freeblock.end)\n comp.append(Appt(cur_time, freeblock.end, desc))\n return comp",
"def test_remove_job_no(self):\r\n # todo add larger sample and more removes\r\n remove_job_no(2)\r\n self.assertEqual(self.h_1.heap[1], self.j_1_1)\r\n remove_job_no(1)\r\n self.assertEqual(self.h_1.heap[0], self.j_1_1)",
"def _need_to_remove_head(self, signal):\n if self._is_empty():\n return False\n elif self._is_too_big():\n return True\n elif self._is_head_too_old(signal):\n return True\n else:\n return False",
"def bridge(path):\n remove_flag = [False] * len(path) # hop flag to one meant to be removed\n asn_path = [hop.asn for hop in path]\n holes = find_holes(asn_path) # indexes of None (ASN) sub-sequences\n last_idx = len(path) - 1\n for start, end in holes:\n # only check the sub-sequences having type dbtools.AddrType.Others hops\n if start > 0 and end < last_idx and db.AddrType.Others in [hop.type for hop in path[start:end+1]]:\n # if there is known relation between the two ASes wrapping the None sub-sequence\n left_asn = path[start-1].asn\n right_asn = path[end+1].asn\n if left_asn == right_asn or as_rel.has_relation((left_asn, right_asn)) is not None:\n # remove only the hop of type dbtools.AddrType.Others\n for idx in range(start, end+1):\n if path[idx].type == db.AddrType.Others:\n remove_flag[idx] = True\n return [path[idx] for idx in range(last_idx+1) if not remove_flag[idx]]",
"def fixMissingNodes(self):\n\t\tprint(' bVascularTracing.fixMissingNodes() numNodes:', self.numNodes())\n\t\tfor edgeIdx, edge in enumerate(self.edgeIter()):\n\t\t\tpreNode = edge['preNode']\n\t\t\tif preNode is None:\n\t\t\t\tslabIdx = edge['slabList'][0]\n\t\t\t\tx = self.x[slabIdx]\n\t\t\t\ty = self.y[slabIdx]\n\t\t\t\tz = self.z[slabIdx]\n\t\t\t\tx = float(x)\n\t\t\t\ty = float(y)\n\t\t\t\tz = float(z)\n\t\t\t\tnewNodeIdx = self.newNode(x,y,z)\n\t\t\t\tself.nodeDictList[newNodeIdx]['edgeList'] = [edgeIdx]\n\t\t\t\tself.nodeDictList[newNodeIdx]['nEdges'] = 1\n\t\t\t\t#\n\t\t\t\tedge['preNode'] = newNodeIdx\n\t\t\t\t#print('edge:', edgeIdx, 'after adding preNode', newNodeIdx, 'edge:', self.edgeDictList[edgeIdx])\n\n\t\t\tpostNode = edge['postNode']\n\t\t\tif postNode is None:\n\t\t\t\tslabIdx = edge['slabList'][-1]\n\t\t\t\tx = self.x[slabIdx]\n\t\t\t\ty = self.y[slabIdx]\n\t\t\t\tz = self.z[slabIdx]\n\t\t\t\tx = float(x)\n\t\t\t\ty = float(y)\n\t\t\t\tz = float(z)\n\t\t\t\tnewNodeIdx = self.newNode(x,y,z)\n\t\t\t\tself.nodeDictList[newNodeIdx]['edgeList'] = [edgeIdx]\n\t\t\t\tself.nodeDictList[newNodeIdx]['nEdges'] = 1\n\t\t\t\t#\n\t\t\t\tedge['postNode'] = newNodeIdx\n\n\t\tprint(' done fixMissingNodes() numNodes:', self.numNodes())"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculate the hamming distance between two sequences
|
def hamming_dist(seq1, seq2):
dist = sum(x != y for x, y in zip(seq1, seq2))
return(dist)
|
[
"def hamming_distance(bits1: str, bits2: str) -> int:\n bits1 = [int(b) for b in bits1]\n bits2 = [int(b) for b in bits2]\n return hamming(bits1, bits2) * len(bits1)",
"def hammingDistance( s1, s2 ):\n strLen = len( s1 )\n count = 0\n for i in range( strLen ):\n if s1[i] != s2[i]:\n count += 1\n return count",
"def test_get_hamming_distance(self):\r\n\r\n str_1 = \"GATCAACCGGTAC\"\r\n str_2 = \"GACTAAGGGGTAC\"\r\n\r\n hamming_distance = rau.get_hamming_distance(str_1, str_2)\r\n\r\n self.assertEqual(hamming_distance, 4)\r\n\r\n str_1 = \"GATCAACCGGTA\"\r\n str_2 = \"GATCAACCGGTAC\"\r\n\r\n hamming_distance = rau.get_hamming_distance(str_1, str_2)\r\n\r\n self.assertEqual(hamming_distance, -1)",
"def hash_distance(left_hash, right_hash):\n if len(left_hash) != len(right_hash):\n raise ValueError('Hamming distance requires two strings of equal length')\n dist = sum(map(lambda x: 0 if -3 <= int(x[0], 16) - int(x[1], 16) <= 3 else 1, zip(left_hash, right_hash)))\n # print \"\\nDistance is: \" + str(dist)\n return dist",
"def hamming_distance(i, j):\n # TODO: Find something better than this.\n return bin(i ^ j).count('1')",
"def calcHammDist(a, b):\n if (type(a) != bytes and type(a) != bytearray):\n raise TypeError(\"a is not a bytes object\")\n if (type(b) != bytes and type(b) != bytearray):\n raise TypeError(\"b is not a bytes object\")\n if len(a) != len(b):\n raise ValueError(\"Bytes objects must be of equal length.\")\n if len(a) == 0:\n return 0\n dist = 0\n for (i, j) in zip(a, b):\n dist += bin(i ^ j).count(\"1\")\n return dist",
"def hamming_distance(s1, s2, shortest=False):\n if len(s1) != len(s2):\n if shortest:\n length = min(len(s1), len(s2))\n s1 = s1[:length]\n s2 = s2[:length]\n else:\n raise ValueError('strings of unequal length')\n return sum(ch1 != ch2 for ch1, ch2 in zip(s1, s2))",
"def hamdist(str1, str2):\n diffs = 0\n if len(str1) != len(str2):\n return max(len(str1),len(str2))\n for ch1, ch2 in zip(str1, str2):\n if ch1 != ch2:\n\t diffs += 1\n return diffs",
"def hamdist(self,str1, str2):\n diffs = 0\n for ch1, ch2 in zip(str1, str2):\n if ch1 != ch2:\n diffs += 1\n return diffs",
"def hamming_distance(X_val, X_train):\r\n\tX_train_trans = np.transpose(X_train).astype(int)\r\n\treturn X_val.shape[1] - X_val @ X_train_trans - (1 - X_val) @ (1 - X_train_trans)",
"def hamming_distance(this: dict, other: dict) -> int:\n\n return len([k for k, v in this.items() if k in other and other[k] != v])",
"def hamming_d(a: bytes, b: bytes) -> int:\n assert len(a) == len(b)\n return sum(\n count_set_bits(a[i] ^ b[i])\n for i in range(len(a))\n )",
"def hamming_distance(x: Union[np.ndarray, np.void],\n y: Union[np.ndarray, np.void],\n **kwargs: bool) -> Union[int, float]:\n # pylint: disable=invalid-name\n if not fuav.is_1d_like(x):\n raise IncorrectShapeError('The x array should be 1-dimensional.')\n if not fuav.is_1d_like(y):\n raise IncorrectShapeError('The y array should be 1-dimensional.')\n\n # Transform the arrays to unstructured\n x_array = fuat.as_unstructured(x).reshape(-1)\n y_array = fuat.as_unstructured(y).reshape(-1)\n\n if not fuav.is_textual_array(x_array):\n raise ValueError('The x array should be textual.')\n if not fuav.is_textual_array(y_array):\n raise ValueError('The y array should be textual.')\n\n if x_array.shape[0] != y_array.shape[0]:\n raise IncorrectShapeError('The x and y arrays should have the same '\n 'length.')\n\n def kw_hamming_distance(vec):\n return hamming_distance_base(vec[0], vec[1], **kwargs)\n\n distance = np.apply_along_axis(kw_hamming_distance, 0,\n np.vstack((x_array, y_array)))\n distance = distance.sum()\n return distance",
"def manDist(A, B):\n return abs(A[0] - B[0]) + abs(A[1] - B[1])",
"def distance(image_path, other_image_path):\n image_hash = average_hash(image_path)\n other_image_hash = average_hash(other_image_path)\n\n return _hamming_distance(image_hash, other_image_hash)",
"def hamming_search(input_data, db_data):\n import scipy\n input_data = (input_data != 0)\n db_data = (db_data != 0)\n dist = scipy.spatial.distance.hamming(input_data, db_data)\n return dist",
"def _dameraulevenshtein(seq1, seq2):\n # codesnippet:D0DE4716-B6E6-4161-9219-2903BF8F547F\n # Conceptually, this is based on a len(seq1) + 1 * len(seq2) + 1 matrix.\n # However, only the current and two previous rows are needed at once,\n # so we only store those.\n oneago = None\n thisrow = list(range(1, len(seq2) + 1)) + [0]\n for x in range(len(seq1)):\n # Python lists wrap around for negative indices, so put the\n # leftmost column at the *end* of the list. This matches with\n # the zero-indexed strings and saves extra calculation.\n twoago, oneago, thisrow = oneago, thisrow, [0] * len(seq2) + [x + 1]\n for y in range(len(seq2)):\n delcost = oneago[y] + 1\n addcost = thisrow[y - 1] + 1\n subcost = oneago[y - 1] + (seq1[x] != seq2[y])\n thisrow[y] = min(delcost, addcost, subcost)\n # This block deals with transpositions\n if (x > 0 and y > 0 and seq1[x] == seq2[y - 1]\n and seq1[x - 1] == seq2[y] and seq1[x] != seq2[y]):\n thisrow[y] = min(thisrow[y], twoago[y - 2] + 1)\n return thisrow[len(seq2) - 1]",
"def sumOfPairsDistanceMSA(msa1, msa2):\n\n\ttempfilein1 = \"temp/dis_msa1.fasta\"\n\ttempfilein2 = \"temp/dis_msa2.fasta\"\n\tio.exportGroupOfSequencesToFASTA(msa1,tempfilein1)\n\tio.exportGroupOfSequencesToFASTA(msa2,tempfilein2)\n\tcommand = \"./t_coffee -other_pg aln_compare -al1 \"+tempfilein1+\" -al2 \"+tempfilein2+\" >temp/tcoffee_dis.txt\"\n\tos.system(command)\n\ttxt = readTxtFile(\"temp/tcoffee_dis.txt\")\n\ttxt = txt.split(\"\\n\")\n\tline = txt[2].split(\" \")\n\tvalues = []\n\tfor item in line:\n\t\ttry:\n\t\t\tvalues.append(float(item))\n\t\texcept ValueError:\n\t\t\ti = 1\n\tscore = values[-1]\n\treturn score",
"def manhattan_distance(x, y):\n\n return sum(abs(a - b) for a, b in zip(x, y))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Modifies hsp object to determine if facing end of contig and within MAX_PERC_END of the amp len from the end of the contig.
|
def valid_dir(hsp: HSP):
dist_end = abs((hsp.db_length + 1) - hsp.start - hsp.amp_len)
dist_start = abs(hsp.start - hsp.amp_len)
if dist_end <= (MAX_PERC_END * hsp.amp_len):
hsp.location = True
hsp.end_dist = dist_end
elif dist_start <= (MAX_PERC_END * hsp.amp_len):
hsp.location = True
hsp.end_dist = dist_start
else:
hsp.location = False
|
[
"def in_endcaps(c: Cylinder, p : np.array)->bool:\n close = np.isclose(np.array([p[2],p[2]]), np.array([c.zmin, c.zmax]), atol=1e-06)\n return close.any()",
"def H2UpperLimit(self):\n f = not self.normview\n if f:\n self.normalize()\n\n def recalcfit(self):\n self.s.prepareFit(-1, all=True)\n self.s.calcFit(-1, redraw=True)\n self.s[self.s.ind].mask.set((self.s[self.s.ind].fit.inter(self.s[self.s.ind].spec.x()) < 0.99) * (self.s[self.s.ind].spec.y() - 1 < 2 * self.s[self.s.ind].spec.err()))\n self.s[self.s.ind].set_fit_mask()\n\n z_grid = np.linspace(self.fit.sys[0].z.min, self.fit.sys[0].z.max, int((self.fit.sys[0].z.max-self.fit.sys[0].z.min)/self.fit.sys[0].z.step)+1)\n N_grid = np.linspace(self.fit.sys[0].Ntot.min, self.fit.sys[0].Ntot.max, int((self.fit.sys[0].Ntot.max - self.fit.sys[0].Ntot.min) / self.fit.sys[0].Ntot.step)+1)\n z_save = self.fit.sys[0].z.val\n for N in N_grid:\n self.fit.setValue('Ntot_0', N)\n print(N)\n for z in z_grid:\n self.fit.setValue('z_0', z)\n recalcfit(self)\n print(z, np.sum(self.s[self.s.ind].mask.x()), np.sum(self.s.chi() > 0), np.sum(self.s.chi() > 2))\n if np.sum(self.s[self.s.ind].mask.x()) < 40 or (np.sum(self.s.chi() > 0) * 0.0455 > np.sum(self.s.chi() > 2)):\n break\n if z == z_grid[-1]:\n break\n else:\n z_save = z\n else:\n print('Limit is not reached. Please checked Ntot boundaries.')\n return\n self.fit.setValue('z_0', z_save)\n self.fit.setValue('Ntot_0', N_grid[np.argwhere(N_grid == N)[0]-1])\n recalcfit(self)\n print(np.sum(self.s[self.s.ind].mask.x()), np.sum(self.s.chi() > 0), np.sum(self.s.chi() > 2))\n\n self.statusBar.setText('H2 upper limit is: ' + str(self.fit.sys[0].Ntot.val))",
"def is_endcap_electrode(self, coords: CoordsVar) -> bool:\r\n if self._in_electrode_endcap(coords) == 0:\r\n # if in encap\r\n if self._in_other_electrode(coords) <= 0:\r\n # and inside the trap\r\n return True\r\n return False",
"def within_waypoint(self):\n\t\tif len(self.waypoints) == 0:\n\t\t\treturn None\n\t\twaypoint_location = self.get_relative_position(self.waypoints[0])\n\t\tif self.chord_length(waypoint_location) > waypoint_location[0] and not self.waypoints[0].flipped:\n\t\t\tself.waypoints[0].flip_fields()\n\t\telif self.chord_length(waypoint_location) < waypoint_location[0] and self.waypoints[0].flipped:\n\t\t\tself.waypoints[0].flip_fields()\n\t\tif waypoint_location[0] < self.waypoints[0].margin:\n\t\t\tdel(self.waypoints[0])",
"def _h_stop(hit_seq, h_seq, h_strand):\n h_seq = h_seq.replace(\"_\", \"\")\n h_len = len(h_seq)\n if h_strand == 1:\n h_start = hit_seq.find(h_seq)\n h_stop = h_start + h_len - 1\n elif h_strand == -1:\n rc_h_seq = dna_reverse_complement(h_seq)\n rc_hit_seq = dna_reverse_complement(hit_seq)\n h_stop = rc_hit_seq.find(rc_h_seq)\n else:\n raise ValueError('h_strand must be 1 or -1')\n return(h_stop)",
"def calc_mph_chl(mph_0, mph_1, sipf, sicf, bair, ndvi, rmax_1, lambda_rmax_1, lambda_rmax_0, rmax_0, mph_floatthres, mph_cyanomax):\n \n if lambda_rmax_1 == 753:\n print('Right side of if-condition.')\n # MPH >= 0.02 or NDVI >0.2\n if (mph_1 >= 0.02 or ndvi >= 0.2):\n float_flag = 1\n adj_flag = 0\n # SICF < 0 and SIPF > 0\n if (sicf < 0 and sipf > 0):\n cyano_flag=1\n print('Flag: floating cyanobacteria true')\n chl_mph = 22.44 * math.exp(35.79 * mph_1)\n print('CHL MPH is: ' + str(chl_mph))\n if chl_mph > mph_floatthres:\n float_flag=1\n print('Floating cyanobacteria')\n else:\n print('Immersed cyanobacteria')\n # SICF >=0 or SIPF <=0 \n elif (sicf >= 0 or sipf <= 0):\n cyano_flag = 0\n chl_mph = np.nan\n print('Floating vegetation')\n \n # Continuation right side\n elif (mph_1 < 0.02 and ndvi < 0.2):\n float_flag = 0\n adj_flag = 1\n print('Flag: adjacent true')\n cyano_flag = 0\n print('Immersed eukaryotes')\n \n chl_mph = 5.24 * 10 ** 9 * mph_0 ** 4 - 1.95 * 10 ** 8 * mph_0 ** 3 + 2.46 * 10 ** 6 * mph_0 ** 2 + 4.02 * 10 ** 3 * mph_0 + 1.97\n \n # Left side of if-condition\n else:\n print('Left side of if-condition.')\n float_flag = 0\n adj_flag = 0\n \n # Left side of 2nd if-condition\n if (sicf >= 0 or sipf <= 0 or bair <= 0.002):\n print('Left 2nd if-condition')\n cyano_flag=0\n print('Immersed eukaryotes')\n chl_mph = 5.24 * 10 ** 9 * mph_0 ** 4 - 1.95 * 10 ** 8 * mph_0 ** 3 + 2.46 * 10 ** 6 * mph_0 ** 2 + 4.02 * 10 ** 3 * mph_0 + 1.97\n \n # Right side of 2nd if-condition\n elif (sicf <= 0 and sipf > 0 and bair > 0.002):\n print('Right 2nd if-condition')\n cyano_flag = 1\n print('Flag: cyanobacteria true')\n chl_mph = 22.44 * math.exp(35.79 * mph_1)\n if chl_mph > mph_floatthres:\n float_flag = 1\n print('Floating cyanobacteria')\n if chl_mph > mph_cyanomax:\n chl_mph = mph_cyanomax\n print('MPH chl maximum reached.')\n\n return chl_mph, cyano_flag, float_flag, adj_flag",
"def level_end_sp(self, level_end_sp):\n\n self._level_end_sp = level_end_sp",
"def CanEast(self):\n return (not self.HWLimit) or (self.LimOverride and self.WestLim)",
"def tcs_seg(self,start,end,SD=None,allcs=0,opf=1):\n S = self.M.allocState({})\n self.M.propagate(S, 0, 1)\n \n if SD == None :\n # set initial beam data\n S.ref_IonZ = self.refIonZ\n S.IonZ = self.IonZ_io\n\n S.moment0 = self.BC0\n S.moment1 = self.ENV0\n\n S.ref_IonEk = self.refIonEk\n \n S.phis = S.moment0[PS_S,:]\n S.IonEk = S.moment0[PS_PS,:]*MeVtoeV + S.ref_IonEk\n \n else :\n # set initial beam data\n S.ref_IonZ = SD.refIonZ\n S.IonZ = SD.IonZ_io\n\n S.moment0 = SD.BC0\n S.moment1 = SD.ENV0\n\n S.ref_phis = SD.refphis\n S.phis = SD.phis_io\n\n S.ref_IonEk = SD.refIonEk\n S.IonEk = SD.BC0[PS_PS,:]*MeVtoeV + SD.refIonEk\n S.pos = SD.pos\n\n phis_ini = S.ref_phis\n\n #S.clng = self.clng\n\n fin = end - start + 1\n RD = numpy.zeros((fin,8))\n\n #if allcs: AD = [[[0.0]*6 for i in range(fin)] for j in range(len(S.IonZ))]\n if allcs: AD = numpy.zeros((len(S.IonZ),fin,8))\n\n # store initial beam data\n RD[0][0] = S.pos\n RD[0][1] = S.moment0_env[0]\n RD[0][2] = S.moment0_env[2]\n RD[0][3] = S.moment0_env[4]\n RD[0][4] = S.moment0_rms[0]\n RD[0][5] = S.moment0_rms[2]\n RD[0][6] = S.ref_phis - phis_ini\n RD[0][7] = S.ref_IonEk\n\n if allcs:\n for k in range(len(S.IonZ)):\n AD[k][0][0] = S.pos\n AD[k][0][1] = S.moment0[0,k]\n AD[k][0][2] = S.moment0[2,k]\n AD[k][0][3] = S.moment0[4,k]\n AD[k][0][4] = numpy.sqrt(S.moment1[0,0,k])\n AD[k][0][5] = numpy.sqrt(S.moment1[2,2,k])\n \n # propagate step by step and store beam data\n for (j,i) in enumerate(range(start,end)):\n self.M.propagate(S, i+1, 1)\n\n RD[j+1][0] = S.pos\n RD[j+1][1] = S.moment0_env[0]\n RD[j+1][2] = S.moment0_env[2]\n RD[j+1][3] = S.moment0_env[4]\n RD[j+1][4] = S.moment0_rms[0]\n RD[j+1][5] = S.moment0_rms[2]\n RD[j+1][6] = S.ref_phis - phis_ini\n RD[j+1][7] = S.ref_IonEk\n\n if allcs:\n for k in range(len(S.IonZ)):\n AD[k][j+1][0] = S.pos\n AD[k][j+1][1] = S.moment0[0,k]\n AD[k][j+1][2] = S.moment0[2,k]\n AD[k][j+1][3] = S.moment0[4,k]\n AD[k][j+1][4] = numpy.sqrt(S.moment1[0,0,k])\n AD[k][j+1][5] = numpy.sqrt(S.moment1[2,2,k])\n\n if opf: numpy.savetxt('ldata.txt',RD)\n\n if allcs: return (S,RD,AD)\n else : return (S,RD)",
"def ehyb_both_prim_found(blast, f_hsp, r_hsp):\n\n lo_hsp_ehybrid_qcov = ehyb(blast) # assigns ehybrid attributes to each hsp from amp vs db\n ehybrid_qcov_pass = [hsp for hsp in lo_hsp_ehybrid_qcov if hsp.ehybrid == True]\n ehybrid_qcov_fail = [hsp for hsp in lo_hsp_ehybrid_qcov if hsp.ehybrid == False]\n\n for hsp in ehybrid_qcov_pass:\n # if f_hsp.name in hsp.name and r_hsp.name == hsp.name:\n if abs(f_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n or abs(r_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n or abs(f_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n or abs(r_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n and f_hsp.contig_name == hsp.contig_name:\n f_hsp.ehybrid, r_hsp.ehybrid = True, True\n f_hsp.amp_len, r_hsp.amp_len = hsp.length, hsp.length\n f_hsp.amp_sbjct, r_hsp.amp_sbjct = hsp.sbjct, hsp.sbjct\n f_hsp.amp_query, r_hsp.amp_query = hsp.query, hsp.query\n for hsp in ehybrid_qcov_fail:\n # if f_hsp.name in hsp.name and r_hsp.name in hsp.name:\n if abs(f_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n or abs(r_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n or abs(f_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n or abs(r_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \\\n and r_hsp.contig_name == hsp.contig_name:\n f_hsp.ehybrid, r_hsp.ehybrid = False, False\n f_hsp.amp_len, r_hsp.amp_len = hsp.length, hsp.length\n f_hsp.amp_sbjct, r_hsp.amp_sbjct = hsp.sbjct, hsp.sbjct\n f_hsp.amp_query, r_hsp.amp_query = hsp.query, hsp.query",
"def is_end_of_game(self):\n pass",
"def is_ccw(ring):\n return signed_area(ring) >= 0.0",
"def storage_interval_end_upper_bound_rule(_m, g):\r\n\r\n # Existing units\r\n if g in m.G_E_STORAGE:\r\n return m.q[g, m.T.last()] <= m.EXISTING_STORAGE_ENERGY_CAPACITY[g]\r\n\r\n # Candidate units\r\n elif g in m.G_C_STORAGE:\r\n return m.q[g, m.T.last()] <= m.b[g]",
"def boundary_check(self):\r\n if self.position[0] < 0:\r\n self.velocity[0] *= -.8\r\n self.position[0] = 0\r\n elif self.position[0] > size[0] - self.offset:\r\n self.velocity[0] *= -.8\r\n self.position[0] = size[0] - self.offset\r\n if self.position[1] < 0:\r\n self.velocity[1] *= -.8\r\n self.position[1] = 0\r\n elif self.position[1] > size[1] - self.offset:\r\n self.velocity[1] *= -.8\r\n self.position[1] = size[1] - self.offset",
"def check_end_length(self):\n return self._check_end_length",
"def has_EH_CFI(self):\r\n return self.eh_frame_sec is not None",
"def defend_rule_2(bot,commander,knowledgeBase):\n if(knowledgeBase.ourFlagCaptured):\n campspot = knowledgeBase.findCampSpot(commander.game.enemyTeam.flagScoreLocation)\n diff = commander.game.team.flag.position - campspot\n left = Vector2(-diff.y,diff.x)\n right = Vector2(diff.y,-diff.x)\n toClosestEnemy = knowledgeBase.predictNearestEnemy(bot).position - campspot\n looks = [(diff,1.0)]\n if(knowledgeBase.canShootTo(left,bot.position)):\n looks.append((left,1.0))\n if(knowledgeBase.canShootTo(right,bot.position)):\n looks.append((right,1.0))\n if(knowledgeBase.canShootTo(toClosestEnemy,bot.position)):\n looks.append((toClosestEnemy,1.0))\n if(bot.position.distance(campspot) > 2):\n commander.issue(orders.Charge,bot,campspot,description = \"Defender \" + bot.name + \" camp enemy flag score location\")\n else:\n commander.issue(orders.Defend,bot,looks,description = \"Defender \" + bot.name + \" camping enemy flag score location\")\n return True\n return False",
"def _is_in_tilt_bounds(self, tilt):\n return self.neck_tilt_bounds[0] <= tilt and self.neck_tilt_bounds[1] >= tilt",
"def kcdetect(data, sf, proba_thr, amp_thr, hypno, nrem_only, tmin, tmax,\n kc_min_amp, kc_max_amp, fmin=.5, fmax=4., delta_thr=.75,\n smoothing_s=20, spindles_thresh=2., range_spin_sec=20,\n min_distance_ms=500.):\n # Find if hypnogram is loaded :\n hyploaded = True if np.unique(hypno).size > 1 and nrem_only else False\n\n # PRE DETECTION\n # Compute delta band power using wavelet\n freqs = np.array([0.1, 4., 8., 12., 16., 30.])\n delta_npow = morlet_power(data, freqs, sf, norm=True)[0]\n delta_nfpow = smoothing(delta_npow, smoothing_s * sf)\n idx_no_delta = np.where(delta_nfpow < delta_thr)[0]\n idx_loc_delta = np.where(delta_npow > np.median(delta_npow))[0]\n\n # MAIN DETECTION\n # Bandpass filtering\n sig_filt = filt(sf, np.array([fmin, fmax]), data)\n # Taiger-Keaser energy operator\n sig_tkeo = tkeo(sig_filt)\n # Define hard and soft thresholds\n hard_thr = np.nanmean(sig_tkeo) + amp_thr * np.nanstd(sig_tkeo)\n soft_thr = 0.8 * hard_thr\n\n with np.errstate(divide='ignore', invalid='ignore'):\n idx_hard = np.where(sig_tkeo > hard_thr)[0]\n idx_soft = np.where(sig_tkeo > soft_thr)[0]\n\n # Find threshold-crossing indices of soft threshold\n idx_zc_soft = _events_to_index(idx_soft).flatten()\n\n if idx_hard.size == 0:\n return np.array([], dtype=int)\n\n # Initialize K-complexes index vector\n idx_kc = np.array([], dtype=int)\n # Fill gap between events separated by less than min_distance_ms\n idx_hard = _events_distance_fill(idx_hard, min_distance_ms, sf)\n # Get where K-complex start / end :\n idx_start, idx_stop = _events_to_index(idx_hard).T\n\n # Find true beginning / end using soft threshold\n for s in idx_start:\n d = s - idx_zc_soft\n soft_beg = d[d > 0].min()\n soft_end = np.abs(d[d < 0]).min()\n idx_kc = np.append(idx_kc, np.arange(s - soft_beg, s + soft_end))\n\n # Check if spindles are present in range_spin_sec\n idx_spin = spindlesdetect(data, sf, spindles_thresh, hypno, False)[0]\n idx_start, idx_stop = _events_to_index(idx_kc).T\n spin_bool = np.array([], dtype=np.bool)\n\n for idx, val in enumerate(idx_start):\n step = 0.5 * range_spin_sec * sf\n is_spin = np.in1d(np.arange(val - step, val + step, 1),\n idx_spin, assume_unique=True)\n spin_bool = np.append(spin_bool, any(is_spin))\n\n kc_spin = np.where(spin_bool)[0]\n idx_kc_spin = _index_to_events(np.c_[idx_start, idx_stop][kc_spin])\n\n # Compute probability\n proba = np.zeros(shape=data.shape)\n proba[idx_kc] += 0.1\n proba[idx_no_delta] += 0.1\n proba[idx_loc_delta] += 0.1\n proba[idx_kc_spin] += 0.1\n\n if hyploaded:\n proba[hypno == -1] += -0.1\n proba[hypno == 0] += -0.2\n proba[hypno == 1] += 0\n proba[hypno == 2] += 0.1\n proba[hypno == 3] += -0.1\n proba[hypno == 4] += -0.2\n\n # Smooth and normalize probability vector\n proba = proba / 0.5 if hyploaded else proba / 0.4\n proba = smoothing(proba, sf)\n # Keep only proba >= proba_thr (user defined threshold)\n idx_kc = np.intersect1d(idx_kc, np.where(proba >= proba_thr)[0], True)\n\n if idx_kc.size == 0:\n return np.array([], dtype=int)\n\n # Morphological criteria\n idx_start, idx_stop = _events_to_index(idx_kc).T\n duration_ms = (idx_stop - idx_start) * (1000 / sf)\n\n # Remove events with bad duration\n good_dur = np.where(np.logical_and(duration_ms > tmin,\n duration_ms < tmax))[0]\n idx_kc = _index_to_events(np.c_[idx_start, idx_stop][good_dur])\n\n # Remove events with bad amplitude\n idx_start, idx_stop = _events_to_index(idx_kc).T\n amp = np.zeros(shape=idx_start.size)\n for i, (start, stop) in enumerate(zip(idx_start, idx_stop)):\n amp[i] = np.ptp(data[start:stop])\n good_amp = np.where(np.logical_and(amp > kc_min_amp,\n amp < kc_max_amp))[0]\n\n return np.c_[idx_start, idx_stop][good_amp]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Determines if the missing cj0181 sequence is the chimeric primer sequence for its exception
|
def cj0181_missing_seq(hsp_object, primer_dict, database, chimeric_seq) -> bool:
start = hsp_object.start
end = hsp_object.end
query_start = hsp_object.query_start
query_end = hsp_object.query_end
len_missing = len(primer_dict[hsp_object.name]) - abs(end - start) - 1
begin_missing = query_start - 1
end_missing = abs(len(primer_dict[hsp_object.name]) - query_end)
if len_missing == 7:
with open(database, 'r') as fasta:
for contig in SeqIO.parse(fasta, 'fasta'):
if contig.name == hsp_object.contig_name:
if start > end:
seq_found = contig[end - end_missing - 1: start + begin_missing]
hsp_object.sbjct = str(seq_found.reverse_complement().seq)
else:
seq_found = contig[start - begin_missing - 1: end + end_missing]
hsp_object.sbjct = str(seq_found.seq)
missing_seq_found = hsp_object.sbjct[ : 7]
r_comp_chimeric_seq = str(Seq(chimeric_seq).reverse_complement())
return missing_seq_found == chimeric_seq or missing_seq_found == r_comp_chimeric_seq
|
[
"def check_seq(self):\n nuc_list = ['A', 'T', 'C', 'G']\n global check, error_details\n for row_index, row in self.primer_df.iterrows():\n for letter in row['Primer_seq'].strip():\n if letter not in nuc_list:\n check += 1\n error = \"Invalid DNA primer sequence, see row %s in file\" % (row_index + 4)\n error_details.append(error)",
"def ePCR(target_seq, primer5, primer3):\n valid_sequence(target_seq)\n ix = False\n pcr_seq = ''\n for seq in (target_seq, reverse_seq(target_seq)):\n cnt5, cnt3 = seq.count(primer5), seq.count(primer3)\n if cnt5 > 1 or cnt3 > 1:\n raise ValueError('Multiple location of primer pair in target sequence. Primer5: %d, Primer3: %d.' % (cnt5, cnt3))\n pos5 = seq.find(primer5)\n pos3 = reverse_seq(seq).find(primer3)\n pos3 = len(seq) - pos3 - 1 if pos3 != -1 else -1\n if pos3 >= pos5 != -1:\n pcr_seq = seq[pos5:(pos3+1)]\n ix = True\n return ix, pcr_seq",
"def test_C_consistency():\n for p in Particle.all():\n if not (p.is_unflavoured_meson and p.three_charge == 0):\n continue\n elif _digit(p.pdgid, Location.N) == 9:\n continue\n elif p.pdgid == 22: # Special case of the photon\n assert p.C == -1\n elif p.pdgid in [130, 310]: # Special case of the KS and KL\n assert p.C == Parity.u\n else:\n assert p.C == (-1) ** (p.L + p.S)",
"def seq_validator(sequence):\n\n # checks for ascii characters that should not appear in a fasta sequence\n seq_val = re.compile(\"[^ATKMBVCNSWD-GUYRHatkbbvcnswdguyrh]\")\n\n # if any illegal characters found return False\n if seq_val.search(sequence):\n return False\n\n return True",
"def is_valid_sequence(sequence):\n try:\n numbers = map(int, sequence)\n if len(numbers) == 0:\n return False\n return all(x >= 0 and x <= 3 for x in numbers)\n except:\n return False",
"def precheck(atoms, i, j, Hs, As, Ds, fsc0):\n ei, ej = atoms[i].element, atoms[j].element\n altloc_i = atoms[i].parent().altloc\n altloc_j = atoms[j].parent().altloc\n resseq_i = atoms[i].parent().parent().resseq\n resseq_j = atoms[j].parent().parent().resseq\n one_is_Hs = ei in Hs or ej in Hs\n other_is_acceptor = ei in As or ej in As\n is_candidate = one_is_Hs and other_is_acceptor and \\\n altloc_i == altloc_j and resseq_i != resseq_j\n if(ei in Hs):\n bound_to_h = fsc0[i]\n if(not bound_to_h): # exclude 'lone' H\n is_candidate = False\n elif(atoms[bound_to_h[0]].element not in Ds): # Use only first atom bound to H\n is_candidate = False\n if(ej in Hs):\n bound_to_h = fsc0[j]\n if(not bound_to_h):\n is_candidate = False\n elif(atoms[bound_to_h[0]].element not in Ds):\n is_candidate = False\n return is_candidate",
"def test_rec_ct_missing_serial(self):\n in_file = open(os.path.join(RESOURCE_PATH,\n 'SBE37-IM_20110101_missing_serial.hex'), 'r')\n parser = CtdmoGhqrRecoveredCtParser(self.config_rec_ct, in_file, self.exception_callback)\n\n # Not expecting any particles.\n expected_results = []\n\n # Try to get one particle and verify we didn't get any.\n result = parser.get_records(1)\n self.assertEqual(result, expected_results)\n\n in_file.close()\n self.assertEqual(self.exception_callback_value, [])",
"def get_is_transcript_disrupting_consequence_SV(consequence):\n\n # get the consequences\n consequences_set = set(consequence.split(\",\"))\n\n # define whether the consequences are prot_alterring\n if len(consequences_set.intersection(SVs_TRANSCRIPT_DISRUPTING_MUTATIONS))>0: return True\n elif len(consequences_set.difference(SVs_NON_TRANSCRIPT_DISRUPTING_MUTATIONS))==0: return False\n else: raise ValueError(\"%s contains non-described vars\"%consequences_set)",
"def test_reading_ucsc_mm9_chr10_bad(self):\n path = \"MAF/ucsc_mm9_chr10_bad.maf\"\n alignments = Align.parse(path, \"maf\")\n self.assertEqual(alignments.metadata[\"MAF Version\"], \"1\")\n self.assertEqual(alignments.metadata[\"Scoring\"], \"autoMZ.v1\")\n next(alignments)\n next(alignments)\n next(alignments)\n next(alignments)\n next(alignments)\n next(alignments)\n with self.assertRaises(ValueError) as cm:\n next(alignments)\n self.assertEqual(\n str(cm.exception), \"sequence size is incorrect (found 219, expected 319)\"\n )",
"def test_invalid_cb_for_3bytes_seq(self):\n FFFD = '�'\n FFFDx2 = FFFD * 2\n sequences = [('E0 00', FFFD + '\\x00'), ('E0 7F', FFFD + '\\x7f'), (\n 'E0 80', FFFDx2), ('E0 9F', FFFDx2), ('E0 C0', FFFDx2), (\n 'E0 FF', FFFDx2), ('E0 A0 00', FFFD + '\\x00'), ('E0 A0 7F', \n FFFD + '\\x7f'), ('E0 A0 C0', FFFDx2), ('E0 A0 FF', FFFDx2), (\n 'E0 BF 00', FFFD + '\\x00'), ('E0 BF 7F', FFFD + '\\x7f'), (\n 'E0 BF C0', FFFDx2), ('E0 BF FF', FFFDx2), ('E1 00', FFFD +\n '\\x00'), ('E1 7F', FFFD + '\\x7f'), ('E1 C0', FFFDx2), ('E1 FF',\n FFFDx2), ('E1 80 00', FFFD + '\\x00'), ('E1 80 7F', FFFD +\n '\\x7f'), ('E1 80 C0', FFFDx2), ('E1 80 FF', FFFDx2), (\n 'E1 BF 00', FFFD + '\\x00'), ('E1 BF 7F', FFFD + '\\x7f'), (\n 'E1 BF C0', FFFDx2), ('E1 BF FF', FFFDx2), ('EC 00', FFFD +\n '\\x00'), ('EC 7F', FFFD + '\\x7f'), ('EC C0', FFFDx2), ('EC FF',\n FFFDx2), ('EC 80 00', FFFD + '\\x00'), ('EC 80 7F', FFFD +\n '\\x7f'), ('EC 80 C0', FFFDx2), ('EC 80 FF', FFFDx2), (\n 'EC BF 00', FFFD + '\\x00'), ('EC BF 7F', FFFD + '\\x7f'), (\n 'EC BF C0', FFFDx2), ('EC BF FF', FFFDx2), ('ED 00', FFFD +\n '\\x00'), ('ED 7F', FFFD + '\\x7f'), ('ED A0', FFFDx2), ('ED BF',\n FFFDx2), ('ED C0', FFFDx2), ('ED FF', FFFDx2), ('ED 80 00', \n FFFD + '\\x00'), ('ED 80 7F', FFFD + '\\x7f'), ('ED 80 C0',\n FFFDx2), ('ED 80 FF', FFFDx2), ('ED 9F 00', FFFD + '\\x00'), (\n 'ED 9F 7F', FFFD + '\\x7f'), ('ED 9F C0', FFFDx2), ('ED 9F FF',\n FFFDx2), ('EE 00', FFFD + '\\x00'), ('EE 7F', FFFD + '\\x7f'), (\n 'EE C0', FFFDx2), ('EE FF', FFFDx2), ('EE 80 00', FFFD + '\\x00'\n ), ('EE 80 7F', FFFD + '\\x7f'), ('EE 80 C0', FFFDx2), (\n 'EE 80 FF', FFFDx2), ('EE BF 00', FFFD + '\\x00'), ('EE BF 7F', \n FFFD + '\\x7f'), ('EE BF C0', FFFDx2), ('EE BF FF', FFFDx2), (\n 'EF 00', FFFD + '\\x00'), ('EF 7F', FFFD + '\\x7f'), ('EF C0',\n FFFDx2), ('EF FF', FFFDx2), ('EF 80 00', FFFD + '\\x00'), (\n 'EF 80 7F', FFFD + '\\x7f'), ('EF 80 C0', FFFDx2), ('EF 80 FF',\n FFFDx2), ('EF BF 00', FFFD + '\\x00'), ('EF BF 7F', FFFD +\n '\\x7f'), ('EF BF C0', FFFDx2), ('EF BF FF', FFFDx2)]\n for seq, res in sequences:\n self.assertCorrectUTF8Decoding(self.to_bytestring(seq), res,\n 'invalid continuation byte')",
"def checkAlphabet(self, sequence):\n ok = [ch for ch in sequence if ch in self.E]\n if len(ok) < len(sequence):\n return False \n return True",
"def test_comp_error(self):\n with self.assertRaises(ValueError) as cm:\n stem.complement(\"X\")\n self.assertEqual('Unknown bases passed: %s' % ', '.join(\"X\"),\n str(cm.exception))",
"def test_wrong_sequences(test_input):\n assert not check_fibonacci(test_input)",
"def test_first_37(self):\n for sv in range(1, 38):\n prn_seq = prn.PRN(sv)\n for i in range(10):\n prn_seq.next()\n prn_test = prn.prn_info['first_ten_chips'][str(sv)]\n prn_test = bin(int(prn_test, 8))[2:]\n for i in range(10):\n self.assertEqual(prn_test[i], str(prn_seq.ca[i]))",
"def check_sequence(self, sequence: list) -> bool:\n\n state = self.initial_state\n for symbol in sequence:\n if symbol not in self.alphabet:\n raise SymbolNotInAlphabetError(symbol, self.alphabet)\n\n state, state_changed = self.__get_next_state(symbol, state)\n if not state_changed:\n raise TransitionNotFoundError(symbol, state)\n\n if state not in self.final_states:\n return False\n\n return True",
"def test_baum_sweet_sequence_first_15(self):\n\t\tfirst_15 = baum_sweet_sequence(15)\n\t\texcepted_output = [1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0]\n\t\tself.assertEquals(first_15, excepted_output)",
"def is_defecient(n):\n return sod(n) < 2*n and n > 0",
"def is_cm_j_invariant(j):\n from sage.rings.all import NumberFieldElement\n if not isinstance(j, NumberFieldElement) and not j in QQ:\n raise NotImplementedError(\"is_cm_j_invariant() is only implemented for number field elements\")\n if not j.is_integral():\n return False, None\n jpol = PolynomialRing(QQ,'x')([-j,1]) if j in QQ else j.absolute_minpoly()\n h = jpol.degree()\n if h>100:\n raise NotImplementedError(\"CM data only available for class numbers up to 100\")\n for d,f in cm_orders(h):\n if jpol == hilbert_class_polynomial(d*f**2):\n return True, (d,f)\n return False, None",
"def test_missing_codons():\n\n rv, out = getstatusoutput('{} {}'.format(prg, dna))\n assert rv > 0\n assert re.match(\"usage\", out, re.IGNORECASE)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Removes hsp object from blast_object if BSR is not >= MIN_BSR
|
def bsr(blast_object:Blastn, max_bits_dict:dict):
for hsp in blast_object.hsp_objects:
hsp.bsr = hsp.bits / max_bits_dict[hsp.name]
if hsp.bsr < MIN_BSR:
blast_object.remove_hsp_object_all(hsp)
|
[
"def remove_oldest(own):\n if own.capacityOf==0 or own.capacityOf<0:\n print(\"Capacity of RingBuffer is 0 or less than 1. Can't use this RingBuffer\")\n return\n return own.removeFirst()",
"def remove(self, hspl):\n\n hsplObject = hspl.findtext(\"{%s}object\" % getHSPLNamespace())\n m = re.match(\"(\\d+\\.\\d+\\.\\d+\\.\\d+(/\\d+)?)(:(\\d+|\\*|any))?\", hsplObject)\n\n if m:\n key = self.__getHash(hspl)\n address = ip_network(m.group(1))\n port = m.group(4)\n if port == \"any\":\n port = \"*\"\n prefixLength = address.prefixlen\n number = int(address.network_address)\n mapPrefixes = self.__map[key]\n for i in range(0, prefixLength + 1):\n if i in mapPrefixes:\n mapAddresses = mapPrefixes[i]\n n = (number >> (32 - i)) << (32 - i)\n if n in mapAddresses:\n mapPort = mapAddresses[n]\n if port in mapPort:\n mapPort[port].remove(hspl)\n if port != \"*\" and \"*\" in mapPort:\n mapPort[\"*\"].remove(hspl)\n\n if hspl in self.__hspls:\n self.__hspls.remove(hspl)",
"def removeRsNode() :\n\tnode1 = mc.ls(type = 'RedshiftMeshParameters')\n\t# node2 = mc.ls(type = 'VRayObjectProperties')\n \n\tif node1 : \n\t\tmc.delete(node1)",
"def collide_brick_top_bottom(self):\n # Check the top side.\n if self.obj1() is not None and self.obj2() is not None:\n # if the upper two point of the ball collides the same brick, just remove once.\n if self.obj1() == self.obj2():\n self.window.remove(self.obj2())\n self.brick_cnt -= 1\n # if the upper two point of the ball collides different bricks, remove both bricks.\n else:\n self.window.remove(self.obj1())\n self.brick_cnt -= 1\n self.window.remove(self.obj2())\n self.brick_cnt -= 1\n return True\n # Check the bottom side.\n if self.obj3() is not None and self.obj4() is not None:\n if self.obj3() == self.obj4():\n self.window.remove(self.obj4())\n self.brick_cnt -= 1\n else:\n self.window.remove(self.obj3())\n self.brick_cnt -= 1\n self.window.remove(self.obj4())\n self.brick_cnt -= 1\n return True",
"def removeFirst(self):\n\t\tself.head = self.head.after",
"def deactiveHeadTrack(self):\n\t\t# get objects\n\t\tplayer = scene.objects['Link']\n\t\tplayer.rig['armConstraint'] = False",
"def remove(self, obj):\n with self._lock:\n if obj not in self._objects:\n raise ObjectNotInPool(\"Object is not in the list of pool objects.\")\n # mark the resource as deleted\n self._removed[id(obj)] = True\n # if it is currently in the available set, remove it\n self._available = [o for o in self._available if o is not obj]\n if self.all_removed():\n raise AllResourcesRemoved(\n \"All resources have been removed. \"\n \"Further use of the resource pool is void.\")",
"def remove_small_objects(image, min_size=50): \n return skimage.morphology.remove_small_objects(image, min_size=min_size)",
"def remove_node(self, node):\n super(Bucket, self).remove_node(node)\n # if _any_isclose(self.free_capacity, node.free_capacity):\n self.adjust_capacity_down(node.free_capacity)\n\n return node",
"def remove_from_rack(self, stones):\n self.rack = helpers.remove_from_arr(self.rack, stones)",
"def delMin(self):\n self.heapList[1] = self.heapList[self.currentSize]\n self.heapList.pop()\n self.currentSize -= 1\n self.percDown(1)",
"def _remove_min(node):\n # This is the smallest key -> delete it\n if node.left is None:\n return None\n\n # Force left child to be red\n if is_black(node.left) and is_black(node.left.left):\n node = LLRBT._move_red_left(node)\n\n node.left = LLRBT._remove_min(node.left)\n\n # On the way up, fix right leaning trees and 4-nodes\n return LLRBT._balance(node)",
"def remove(self, obj, interval):\r\n isdb, store_key = self._store_key(obj, interval)\r\n if isdb:\r\n self.ticker_storage.pop(store_key, None)\r\n self.save()\r\n self.ticker_pool.remove(store_key, interval)",
"def remove_from_group(self):\n self.simulator.devices['bulbs'].remove(self)",
"def remove_reserve(self):\r\n self._reserves -= 1",
"def remove_schedule(self, sched):\r\n try:\r\n period = sched.period\r\n self._schedules.remove(sched)\r\n except (AttributeError, ValueError):\r\n return\r\n\r\n if period == self._shortest:\r\n shortest = self.period\r\n for scheds in self._schedules:\r\n for sched in scheds:\r\n if sched.period < shortest:\r\n shortest = sched.period",
"def removeObject(self, obj):\n\t\tself._objs.remove(obj)",
"def _need_to_remove_head(self, signal):\n if self._is_empty():\n return False\n elif self._is_too_big():\n return True\n elif self._is_head_too_old(signal):\n return True\n else:\n return False",
"def remove(self):\n if len(self.ballList) > 0:\n self.ballList.pop()\n self.count(True)\n else:\n self.count(False)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
eHybridization when both primers are found.
|
def ehyb_both_prim_found(blast, f_hsp, r_hsp):
lo_hsp_ehybrid_qcov = ehyb(blast) # assigns ehybrid attributes to each hsp from amp vs db
ehybrid_qcov_pass = [hsp for hsp in lo_hsp_ehybrid_qcov if hsp.ehybrid == True]
ehybrid_qcov_fail = [hsp for hsp in lo_hsp_ehybrid_qcov if hsp.ehybrid == False]
for hsp in ehybrid_qcov_pass:
# if f_hsp.name in hsp.name and r_hsp.name == hsp.name:
if abs(f_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
or abs(r_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
or abs(f_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
or abs(r_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
and f_hsp.contig_name == hsp.contig_name:
f_hsp.ehybrid, r_hsp.ehybrid = True, True
f_hsp.amp_len, r_hsp.amp_len = hsp.length, hsp.length
f_hsp.amp_sbjct, r_hsp.amp_sbjct = hsp.sbjct, hsp.sbjct
f_hsp.amp_query, r_hsp.amp_query = hsp.query, hsp.query
for hsp in ehybrid_qcov_fail:
# if f_hsp.name in hsp.name and r_hsp.name in hsp.name:
if abs(f_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
or abs(r_hsp.start - hsp.start) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
or abs(f_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
or abs(r_hsp.end - hsp.end) <= (MAX_PERC_EHYB_PRIMER_ENDS * hsp.length) \
and r_hsp.contig_name == hsp.contig_name:
f_hsp.ehybrid, r_hsp.ehybrid = False, False
f_hsp.amp_len, r_hsp.amp_len = hsp.length, hsp.length
f_hsp.amp_sbjct, r_hsp.amp_sbjct = hsp.sbjct, hsp.sbjct
f_hsp.amp_query, r_hsp.amp_query = hsp.query, hsp.query
|
[
"def map_hydrogens(spc_1: ARCSpecies,\n spc_2: ARCSpecies,\n backbone_map: Dict[int, int],\n ) -> Dict[int, int]:\n atom_map = backbone_map\n atoms_1, atoms_2 = spc_1.mol.atoms, spc_2.mol.atoms\n for hydrogen_1 in atoms_1:\n if hydrogen_1.is_hydrogen() and atoms_1.index(hydrogen_1) not in atom_map.keys():\n success = False\n heavy_atom_1 = list(hydrogen_1.edges.keys())[0]\n heavy_atom_2 = atoms_2[backbone_map[atoms_1.index(heavy_atom_1)]]\n num_hydrogens_1 = len([atom for atom in heavy_atom_1.edges.keys() if atom.is_hydrogen()])\n if num_hydrogens_1 == 1:\n # We know that num_hydrogens_2 == 1 because the candidate map resulted from are_adj_elements_in_agreement().\n hydrogen_2 = [atom for atom in heavy_atom_2.edges.keys() if atom.is_hydrogen()][0]\n atom_map[atoms_1.index(hydrogen_1)] = atoms_2.index(hydrogen_2)\n success = True\n # Consider 2/3/4 hydrogen atoms on this heavy atom.\n # 1. Check for a heavy atom with only H atoms adjacent to it (CH4, NH3, H2).\n if not success:\n if all(atom.is_hydrogen() for atom in heavy_atom_1.edges.keys()):\n for atom_1, atom_2 in zip([atom for atom in atoms_1 if atom.is_hydrogen()],\n [atom for atom in atoms_2 if atom.is_hydrogen()]):\n atom_map[atoms_1.index(atom_1)] = atoms_2.index(atom_2)\n success = True\n # 2. Check for a torsion involving heavy_atom_1 as a pivotal atom (most common case).\n if not success:\n if spc_1.rotors_dict is not None:\n heavy_atom_1_index = atoms_1.index(heavy_atom_1)\n for rotor_dict in spc_1.rotors_dict.values():\n if heavy_atom_1_index in [rotor_dict['torsion'][1], rotor_dict['torsion'][2]]:\n atom_map = add_adjacent_hydrogen_atoms_to_map_based_on_a_specific_torsion(\n spc_1=spc_1,\n spc_2=spc_2,\n heavy_atom_1=heavy_atom_1,\n heavy_atom_2=heavy_atom_2,\n torsion=rotor_dict['torsion'],\n atom_map=atom_map,\n find_torsion_end_to_map=True,\n )\n success = True\n break\n # 3. Check for a pseudo-torsion (may involve multiple bonds) with heavy_atom_1 as a pivot.\n if not success:\n pseudo_torsion = list()\n for atom_1_3 in heavy_atom_1.edges.keys():\n if atom_1_3.is_non_hydrogen():\n for atom_1_4 in atom_1_3.edges.keys():\n if atom_1_4.is_non_hydrogen() and atom_1_4 is not heavy_atom_1:\n pseudo_torsion = [atoms_1.index(atom) for atom in [hydrogen_1, heavy_atom_1, atom_1_3, atom_1_4]]\n break\n if not len(pseudo_torsion):\n # Compromise for a hydrogen atom in position 4.\n for atom_1_4 in atom_1_3.edges.keys():\n if atom_1_4 is not heavy_atom_1:\n pseudo_torsion = [atoms_1.index(atom) for atom in [hydrogen_1, heavy_atom_1, atom_1_3, atom_1_4]]\n break\n if len(pseudo_torsion):\n atom_map = add_adjacent_hydrogen_atoms_to_map_based_on_a_specific_torsion(\n spc_1=spc_1,\n spc_2=spc_2,\n heavy_atom_1=heavy_atom_1,\n heavy_atom_2=heavy_atom_2,\n torsion=pseudo_torsion[::-1],\n atom_map=atom_map,\n find_torsion_end_to_map=False,\n )\n success = True\n break\n # 4. Check by angles and bond lengths (search for 2 consecutive heavy atoms).\n if not success:\n atom_1_3, angle_1, bond_length_1 = None, None, None\n for atom_1_3 in heavy_atom_1.edges.keys():\n if atom_1_3.is_non_hydrogen():\n heavy_atom_1_index, hydrogen_1_index = atoms_1.index(heavy_atom_1), atoms_1.index(hydrogen_1)\n angle_1 = calculate_angle(coords=spc_1.get_xyz(),\n atoms=[atoms_1.index(atom_1_3), heavy_atom_1_index, hydrogen_1_index])\n bond_length_1 = calculate_distance(coords=spc_1.get_xyz(),\n atoms=[heavy_atom_1_index, hydrogen_1_index])\n break\n if atom_1_3 is not None:\n atom_2_3_index = atom_map[atoms_1.index(atom_1_3)]\n angle_deviations, bond_length_deviations, hydrogen_indices_2 = list(), list(), list()\n for hydrogen_2 in heavy_atom_2.edges.keys():\n if hydrogen_2.is_hydrogen() and atoms_2.index(hydrogen_2) not in atom_map.values():\n heavy_atom_2_index, hydrogen_2_index = atoms_2.index(heavy_atom_2), atoms_2.index(hydrogen_2)\n angle_2 = calculate_angle(coords=spc_2.get_xyz(),\n atoms=[atom_2_3_index, heavy_atom_2_index, hydrogen_2_index])\n bond_length_2 = calculate_distance(coords=spc_2.get_xyz(),\n atoms=[heavy_atom_2_index, hydrogen_2_index])\n angle_deviations.append(abs(angle_1 - angle_2))\n bond_length_deviations.append(abs(bond_length_1 - bond_length_2))\n hydrogen_indices_2.append(hydrogen_2_index)\n deviations = [bond_length_deviations[i] * hydrogen_indices_2[i] for i in range(len(angle_deviations))]\n atom_map[atoms_1.index(hydrogen_1)] = hydrogen_indices_2[deviations.index(min(deviations))]\n return atom_map",
"def hybrid(self):\n return self._hybrid",
"def _create_hybrid_topology(self):\n\n hybrid_top = app.Topology()\n\n # In the first instance, create a list of necessary atoms from\n # both old & new Topologies\n atom_list = []\n\n for pidx in range(self.hybrid_system.getNumParticles()):\n if pidx in self._hybrid_to_old_map:\n idx = self._hybrid_to_old_map[pidx]\n atom_list.append(list(self._old_topology.atoms())[idx])\n else:\n idx = self._hybrid_to_new_map[pidx]\n atom_list.append(list(self._new_topology.atoms())[idx])\n\n # Now we loop over the atoms and add them in alongside chains & resids\n \n # Non ideal variables to track the previous set of residues & chains\n # without having to constantly search backwards\n prev_res = None\n prev_chain = None\n\n for at in atom_list:\n if at.residue.chain != prev_chain:\n hybrid_chain = hybrid_top.addChain()\n prev_chain = at.residue.chain\n\n if at.residue != prev_res:\n hybrid_residue = hybrid_top.addResidue(\n at.residue.name, hybrid_chain, at.residue.id\n )\n prev_res = at.residue\n\n hybrid_atom = hybrid_top.addAtom(\n at.name, at.element, hybrid_residue, at.id\n )\n\n # Next we deal with bonds\n # First we add in all the old topology bonds\n for bond in self._old_topology.bonds():\n at1 = self.old_to_hybrid_atom_map[bond.atom1.index]\n at2 = self.old_to_hybrid_atom_map[bond.atom2.index]\n\n hybrid_top.addBond(\n list(hybrid_top.atoms())[at1],\n list(hybrid_top.atoms())[at2],\n bond.type, bond.order,\n )\n\n # Finally we add in all the bonds from the unique atoms in the\n # new Topology\n for bond in self._new_topology.bonds():\n at1 = self.new_to_hybrid_atom_map[bond.atom1.index]\n at2 = self.new_to_hybrid_atom_map[bond.atom2.index]\n if ((at1 in self._atom_classes['unique_new_atoms']) or\n (at2 in self._atom_classes['unique_new_atoms'])):\n hybrid_top.addBond(\n list(hybrid_top.atoms())[at1],\n list(hybrid_top.atoms())[at2],\n bond.type, bond.order,\n )\n\n return hybrid_top",
"def get_primals(self):\n pass",
"def infer(self):\n self.algorithm(self.primal_lattice, self.dual_lattice)",
"def compute_bridge(self):\n # G1 is the smallest graph and G2 is the bigger graph\n G1 = self\n G2 = self.LHS\n if G1.vcount() > G2.vcount():\n # Swap\n G1, G2 = G2, G1\n # The bridge\n G = HimesisPreConditionPattern()\n # We don't need to actually solve the largest common subgraph (LCS) problem\n # because we assume that the nodes are labelled uniquely in each graph\n # and that if a label is in G1 and in G2, then it will be in G\n Labels2 = G2.vs[Himesis.Constants.MT_LABEL]\n for label in G1.vs[Himesis.Constants.MT_LABEL]:\n if label in Labels2:\n # Get the corresponding node from G1 \n v1 = G1.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == label)\n if len(v1) == 1:\n v1 = v1[0]\n elif len(v1) == 0:\n raise Exception('Label does not exist: ' + str(label))\n else:\n raise Exception('Label is not unique: ' + str(label))\n # Get the corresponding node from G2 \n v2 = G2.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == label)\n if len(v2) == 1:\n v2 = v2[0]\n elif len(v2) == 0:\n raise Exception('Label does not exist: ' + str(label))\n else:\n raise Exception('Label is not unique: ' + str(label))\n new_node = G.add_node()\n # Now do a conjunction of the attributes\n for attr in v1.attribute_names():\n G.vs[new_node][attr] = v1[attr]\n for attr in v2.attribute_names():\n # The attribute is not in v1\n if attr not in G.vs[new_node].attribute_names():\n G.vs[new_node][attr] = v2[attr]\n # Ignore the GUID attribute, it will be automatically set at run-time\n elif attr == Himesis.Constants.GUID:\n continue\n elif Himesis.is_RAM_attribute(attr):\n if not v2[attr]:\n # There is no constraint for this attribute\n continue\n # The attribute constraint code is the conjunction of the LHS constraint\n # with the NAC constraint for this attribute\n s = '''from %s import %s\nfrom %s import %s''' % (G1.name, G1.name, G2.name, G2.name)\n if G1 == self:\n s += ('''\nlhs = %s()''' % G2.name) + '''\nreturn %s(lhs).%s(attr_value, this) and %s().%s(attr_value, this)'''\n else:\n s += ('''\nlhs = %s()''' % G1.name) + '''\nreturn %s().%s(attr_value, this) and %s(lhs).%s(attr_value, this)'''\n G.vs[new_node][attr] = s % (G1.name, G1.get_attr_constraint_name(v1.index, attr),\n G2.name, G2.get_attr_constraint_name(v2.index, attr))\n elif v1[attr] != v2[attr]:\n #TODO: This should be a TransformationLanguageSpecificException\n raise Exception('Unable to conjunct \\'%s\\' while computing the bridge' % attr)\n #else: v1[attr] == v2[attr], so we don't need to do anything more \n # Now add the edges\n # We only need to go through the edges of the smaller graph\n for e in G1.edge_iter():\n src_label = G1.vs[G1.es[e].source][Himesis.Constants.MT_LABEL]\n trg_label = G1.vs[G1.es[e].target][Himesis.Constants.MT_LABEL]\n src = G.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == src_label)\n trg = G.vs.select(lambda v : v[Himesis.Constants.MT_LABEL] == trg_label)\n if len(src) == len(trg) == 1:\n src = src[0]\n trg = trg[0]\n G.add_edges((src.index, trg.index))\n elif len(src) == 0 :\n# raise Exception('Label does not exist :: '+str(src_label))\n pass\n elif len(trg) == 0 :\n# raise Exception('Label does not exist :: '+str(tgt_label))\n pass\n elif len(src) > 1 :\n raise Exception('Label is not unique: ' + str(src_label))\n elif len(trg) > 1 :\n raise Exception('Label is not unique: ' + str(trg_label)) \n return G",
"def create_hybrid_image(image1, image2, filter,first_weight,filter_type):\n\n assert image1.shape[0] == image2.shape[0]\n assert image1.shape[1] == image2.shape[1]\n assert image1.shape[2] == image2.shape[2]\n\n ############################\n # low pass filter is the normal gaussian distribution\n\n low_frequencies = my_imfilter(image1,filter)\n if filter_type == 1:\n # high pass filter is the negative of the gaussian distribution\n h=-filter\n h[int(filter.shape[0]/2),int(filter.shape[0]/2)] = filter[int(filter.shape[0]/2),int(filter.shape[0]/2)]\n h[int(filter.shape[0]/2),int(filter.shape[0]/2)] = h[int(filter.shape[0]/2),int(filter.shape[0]/2)] -h.sum()\n high_frequencies = my_imfilter(image2,h)\n hybrid_image = first_weight*low_frequencies + (1-first_weight)*high_frequencies\n hybrid_image = np.clip(hybrid_image, 0.0, 1.0)\n elif filter_type == 2:\n high_frequencies = image2 - my_imfilter(image2, filter)\n hybrid_image = first_weight*low_frequencies + (1-first_weight)*high_frequencies\n #print(hybrid_image.max())\n hybrid_image = np.clip(hybrid_image, 0.0, 1.0)\n #print(hybrid_image.max())\n ### END OF STUDENT CODE ####\n ############################\n\n return low_frequencies, high_frequencies, hybrid_image",
"def hybrid_image(self):\n\n src1 = np.copy(self.imagesData[\"2_1\"])\n src2 = np.copy(self.imagesData[\"2_2\"])\n\n # Minimum required shape\n min_shape = (min(src1.shape[0], src2.shape[0]),\n min(src1.shape[1], src2.shape[1]))\n\n # resize images to ensure both have same shapes\n src1_resized = cv2.resize(src1, min_shape, interpolation=cv2.INTER_AREA)\n src2_resized = cv2.resize(src2, min_shape, interpolation=cv2.INTER_AREA)\n\n # Apply filters\n image1_dft = FrequencyFilters.high_pass_filter(source=src1_resized, size=20)\n image2_dft = FrequencyFilters.low_pass_filter(source=src2_resized, size=15)\n\n # Mix 2 images\n hybrid_image = image1_dft + image2_dft\n\n self.display_image(source=hybrid_image, widget=self.img2_output)",
"def create_hybrid_image(image1, image2, filter):\n\n assert image1.shape[0] == image2.shape[0]\n assert image1.shape[1] == image2.shape[1]\n assert image1.shape[2] == image2.shape[2]\n\n ############################\n ### TODO: YOUR CODE HERE ###\n\n low_frequencies = my_imfilter(image1, filter)\n high_frequencies = image2 - my_imfilter(image2, filter)\n\n hybrid_image = low_frequencies + (high_frequencies)\n np.clip(hybrid_image, 0, 1, out=hybrid_image)\n\n ### END OF STUDENT CODE ####\n ############################\n\n return low_frequencies, high_frequencies, hybrid_image",
"def n_primals(self):\n pass",
"def protocol_ridge_step1(self):\n num_dimension = len(self.merged_enc_A)\n\n # sample a random matrix(R) and a random vector(r)\n Range = self.pk.n-1\n MaxInt = self.pk.max_int\n R = [[( random.randrange(Range) - MaxInt ) for _ in range(num_dimension)] for _ in range(num_dimension)]\n \n # check that R is invertible. ([det(A)] is non-zero <=> A is invertible)\n # if R is not invertible, random-sample again until R is invertible.\n det_R = compute_det(R, self.pk.n)\n while(det_R == 0.0):\n R = [[( random.randrange(Range) - MaxInt ) for _ in range(num_dimension)] for _ in range(num_dimension)]\n det_R = compute_det(R, self.pk.n)\n\n r = [(int)( random.randrange(Range) - MaxInt ) for _ in range(num_dimension)]\n\n self.R = R\n self.r = r\n self.det_R = det_R\n\n # Matrix multiplication with multi proccessing.\n splitedAR = []\n R_trans = transpose(self.R)\n for i in range(num_dimension):\n for j in range(num_dimension):\n splitedAR.append([self.merged_enc_A[i], R_trans[j]])\n\n splitedbA = list(zip(self.merged_enc_b, self.merged_enc_A))\n\n with multi.Pool(processes = multi.cpu_count()) as pool: #multi processing\n # masking C = A*R with multi-processing\n enc_C = pool.map(self.compute_enc_C, splitedAR)\n\n # masking d = b + A*r\n enc_d = pool.map(self.compute_enc_d, splitedbA)\n pool.close()\n pool.terminate()\n enc_C = list(zip(*[iter(enc_C)]*num_dimension)) # reshape\n\n '''\n # Matrix multiplication with multi proccessing.\n interval = num_dimension//5\n self.interval = interval\n A_splited = [self.merged_enc_A[i:i + interval] for i in range(0, num_dimension, interval)]\n R_trans = transpose(R)\n R_t_splited = [R_trans[i:i + interval] for i in range(0, num_dimension, interval)]\n splitedAR = []\n for A_i in A_splited:\n for R_t_i in R_t_splited:\n splitedAR.append([A_i, transpose(R_t_i)])\n\n splitedbA = list(zip(self.merged_enc_b, self.merged_enc_A))\n\n with multi.Pool(processes = multi.cpu_count()) as pool: #multi processing\n # masking C = A*R with multi-processing\n temp_enc_C = pool.map(self.compute_enc_C, splitedAR)\n enc_C = []\n for i in range(5): #reshape\n enc_C += [list(chain.from_iterable(items)) for items in zip(*temp_enc_C[i:i+5])]\n\n # masking d = b + A*r\n enc_d = pool.map(self.compute_enc_d, splitedbA)\n pool.close()\n pool.terminate()\n '''\n\n return enc_C, enc_d",
"def hybridJaccardResultFilter(sentence, tagName, phraseFirstTokenIdx, phraseTokenCount):\n if tagName in hybridJaccardProcessors:\n phrase = sentence.getTokens()[phraseFirstTokenIdx:(phraseFirstTokenIdx+phraseTokenCount)]\n hjResult = hybridJaccardProcessors[tagName].findBestMatchWordsCached(phrase)\n if hjResult is None:\n return False\n sentence.setFilteredPhrase(hjResult)\n return True",
"def calculate_phen(a, b, pheno1, pheno2, df, phenos_to_use, phen_corr):\n\n # If in lower triangle, do not compute; symmetric matrix\n if a > b:\n return phen_corr[b, a]\n elif a == b:\n return 1\n else:\n # if this combination of phenos doesn't exist in the map file, then nan\n if (pheno1 in phenos_to_use) and (pheno2 in phenos_to_use):\n phen_beta1, phen_beta2 = get_betas(df, pheno1, pheno2, \"sig\")\n return (\n pearsonr(phen_beta1, phen_beta2)[0]\n if phen_beta1 is not None\n else np.nan\n )\n else:\n return np.nan",
"def _propose(self,suitor):\n\n engaged = False\n count = self.proposals[suitor]\n preferences = self.male_pref[suitor,:]\n \n while not engaged and count < len(preferences):\n \n partner = preferences[count]\n \n if np.isnan(self.husband[partner]):\n self.wife[suitor] = partner\n self.husband[partner] = suitor\n engaged = True\n else:\n fiancee = self.husband[partner]\n \n if self.fem_pref[partner,suitor] < self.fem_pref[partner,fiancee]:\n \n self.bachelors.put(fiancee)\n self.wife[suitor] = partner\n self.husband[partner] = suitor\n engaged = True\n\n count += 1\n \n self.proposals[suitor] = count",
"def _check_priorities(self) -> None:\n\n priority_dict = defaultdict(list)\n for p in self.policies:\n priority_dict[p.priority].append(type(p).__name__)\n\n for k, v in priority_dict.items():\n if len(v) > 1:\n logger.warning(\n (\n \"Found policies {} with same priority {} \"\n \"in PolicyEnsemble. When personalizing \"\n \"priorities, be sure to give all policies \"\n \"different priorities. More information: \"\n \"{}/core/policies/\"\n ).format(v, k, DOCS_BASE_URL)\n )",
"def omm_hybrid_topology(self):\n return self._omm_hybrid_topology",
"def run_pairwise_comp(self, ref_df):\n\n #List of lists\n ref_df_peps = self._get_all_peptides_from_df(ref_df) #Extract high affinity peptides\n score_dict_per_len = self._get_protein_dict_per_len(self.filt_dfs, ref_df_peps) #Create scores dictionary\n\n for prot_name in self.original_proteins:\n\n prot_seq = self.original_proteins_df.ProtSeq[self.original_proteins_df.ID == prot_name].values[0]\n ranges = self.original_proteins_df.Ranges[self.original_proteins_df.ID == prot_name].values[0]\n #Ranges: index data about the location of high affinity peptides in protein being used for comparison\n #Ranges_2: make shallow list from deep list of lists\n ranges_2 = [item for sublist in [i[0] for i in ranges] for item in sublist]\n\n matches_range = []\n\n for list_pep in ref_df_peps:\n for single_pep in list_pep:\n\n high_aa_count = 0\n pep_len = len(single_pep)\n count = prot_seq.count(single_pep) #Number of times a single pep occurs in the entire prot seq\n\n if count > 0: #Find locations where matches occur\n it = re.finditer(single_pep, prot_seq)\n\n for i in it:\n present_range = list(range(i.start(), i.end()))\n if set(present_range).issubset(set(ranges_2)):\n high_aa_count += 1\n matches_range.append(present_range) #Retain match location data\n\n self._update_dict_values_per_len(score_dict_per_len, prot_name, count,\n pep_len, high_aa_count, matches_range)\n\n return score_dict_per_len",
"def Hybrid(SEQ1, SEQ2, TMIN, TMAX, TSTEP = 1, TYPE = 'RNA', SODIUM = 1,\r\n MAGNESIUM = 0, UNAFOLDPATH = 'C:\\\\UNAFold\\\\bin\\\\',\r\n RETURNTYPE = 'tuple'):\r\n\r\n command = UNAFOLDPATH + 'hybrid-min '\r\n command += ' --tmin=' + str(TMIN)\r\n command += ' --tmax=' + str(TMAX)\r\n command += ' --tinc=' + str(TSTEP)\r\n command += ' --sodium=' + str(SODIUM)\r\n command += ' --magnesium=' + str(MAGNESIUM)\r\n command += ' --NA=' + TYPE\r\n command += ' -q '\r\n\r\n command += SEQ1 + ' ' + SEQ2\r\n\r\n# print command\r\n sys_call = subprocess.Popen(command, shell = True, stdout = subprocess.PIPE)\r\n sys_call.wait()\r\n output = sys_call.communicate()[0]\r\n\r\n output_list = re.split('\\n', output)\r\n\r\n if RETURNTYPE == 'tuple':\r\n final_list = []\r\n for row_num in range(len(output_list)-1):\r\n temp = re.split('\\t', output_list[row_num])\r\n print temp\r\n final_list.append((TMIN+row_num*TSTEP, float(temp[0]),\r\n float(temp[1]), float(temp[2])))\r\n\r\n return final_list\r\n elif RETURNTYPE == 'array':\r\n final_array = numpy.zeros((len(output_list)-1, 4))\r\n for row_num in range(len(output_list)-1):\r\n temp = re.split('\\t', output_list[row_num])\r\n final_array[row_num] = [TMIN+row_num*TSTEP, float(temp[0]),\r\n float(temp[1]), float(temp[2])]\r\n\r\n return final_array",
"def _add_particles(self):\n # Begin by copying all particles in the old system\n for particle_idx in range(self._old_system.getNumParticles()):\n mass_old = self._old_system.getParticleMass(particle_idx)\n\n if particle_idx in self._old_to_new_map.keys():\n particle_idx_new_system = self._old_to_new_map[particle_idx]\n mass_new = self._new_system.getParticleMass(\n particle_idx_new_system)\n # Take the average of the masses if the atom is mapped\n particle_mass = (mass_old + mass_new) / 2\n else:\n particle_mass = mass_old\n\n hybrid_idx = self._hybrid_system.addParticle(particle_mass)\n self._old_to_hybrid_map[particle_idx] = hybrid_idx\n\n # If the particle index in question is mapped, make sure to add it\n # to the new to hybrid map as well.\n if particle_idx in self._old_to_new_map.keys():\n self._new_to_hybrid_map[particle_idx_new_system] = hybrid_idx\n\n # Next, add the remaining unique atoms from the new system to the\n # hybrid system and map accordingly.\n for particle_idx in self._unique_new_atoms:\n particle_mass = self._new_system.getParticleMass(particle_idx)\n hybrid_idx = self._hybrid_system.addParticle(particle_mass)\n self._new_to_hybrid_map[particle_idx] = hybrid_idx\n\n # Create the opposite atom maps for later use (nonbonded processing)\n self._hybrid_to_old_map = self._invert_dict(self._old_to_hybrid_map)\n self._hybrid_to_new_map = self._invert_dict(self._new_to_hybrid_map)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return the closest fingerprint match of the binary results based off of the fingerprints in cgftypes_file
|
def find_closest_fingerprint(bin_results:list, cgftypes_file:str) -> list:
with open(cgftypes_file, "r") as fingerprint_track:
csvReader = csv.reader(fingerprint_track)
header = next(csvReader)
cgf_type_index = header.index("cgf.type")
strain_freq_index = header.index("num.strains")
dict_gene_indicies = {gene : header.index(gene) for gene in GENE_LIST}
found_bin_dict = {gene: bin_results[index] for index, gene in enumerate(GENE_LIST)}
largest_lo_tup = []
largest_strain_freq = 0
for row in csvReader:
row_bin_dict = {gene: int(row[index]) for gene, index in dict_gene_indicies.items()}
dict_differences = {val : found_bin_dict[val] - row_bin_dict[val] for val in found_bin_dict
if val in row_bin_dict and found_bin_dict[val] - row_bin_dict[val] != 0}
if largest_lo_tup == []:
largest_lo_tup = [(row[cgf_type_index], dict_differences)]
largest_strain_freq = row[strain_freq_index]
elif len(dict_differences) < len(largest_lo_tup[0][1]):
largest_lo_tup = [(row[cgf_type_index], dict_differences)]
elif len(dict_differences) == len(largest_lo_tup[0][1]):
strain_freq = row[strain_freq_index]
if strain_freq > largest_strain_freq:
largest_strain_freq = strain_freq
largest_lo_tup = [(row[cgf_type_index], dict_differences)] + largest_lo_tup
else:
largest_lo_tup.append((row[cgf_type_index], dict_differences))
print(largest_lo_tup)
return largest_lo_tup
|
[
"def find_matches(filename, e):\n best_matches = {}\n with open(filename) as f:\n try:\n for record in NCBIXML.parse(f):\n best = {}\n if record.alignments:\n for alignment in record.alignments:\n genome = extract_id(alignment.hit_def)\n locus = extract_locus(alignment.hit_def)\n \n best_value = e\n for hsp in alignment.hsps:\n if hsp.expect < best_value:\n best_value = hsp.expect\n \n if genome not in best:\n best[genome] = []\n \n best[genome].add((locus, best_value))\n\n best_matches[extract_full_id(record.query)] = best\n\n except ValueError as e:\n return None\n\n return best_matches",
"def find_closest_file(multiband_files, hotspot_files):\n\n # Holder for file key/value pair dict.\n file_combo = {}\n\n # The naming convention of the files includes the timestamp in the filenames.\n # There are three timestamps in the file name: The '_c' is the \"center\" between the start and end time.\n # This will return a dict with a KEY: file path in the S3 bucket of the FDCF file and VALUE: the datetime of the scan.\n hotspot_file_times = {fname: datetime.strptime((fname.split(\"_c\")[1]).replace('.nc', \"\"), \"%Y%j%H%M%S%f\")\n for fname in hotspot_files}\n\n for mb in multiband_files:\n # The naming convention of the files includes the timestamp in the filenames.\n # There are three timestamps in the file name: The '_c' is the \"center\" between the start and end time.\n mb_time = datetime.strptime((mb.split(\"_c\")[1]).replace('.nc', \"\"), \"%Y%j%H%M%S%f\")\n\n # Returns the time of the hotspot file that is closest in time to the mb_time.\n closest_hotspot_time = min(hotspot_file_times.values(), key=lambda t: abs(t - mb_time))\n\n # From the list of FDCF filepaths, return the file path for the given time.\n closest_hotspot_file = list(hotspot_file_times.keys())[\n list(hotspot_file_times.values()).index(closest_hotspot_time)]\n\n timedelta = abs(closest_hotspot_time - mb_time)\n\n # Append to dictionary\n file_combo[mb] = closest_hotspot_file\n return file_combo",
"def search_match_1(snip_fgp1):\n conn = psycopg2.connect(host=\"sculptor.stat.cmu.edu\", database=c.DB_USER,\n user=c.DB_USER, password=c.DB_PASSWORD)\n cur = conn.cursor()\n cur.execute(\"SELECT song_id FROM songs\")\n uniq_id = cur.fetchall()\n uniq_id = reduce(np.append, uniq_id)\n \n tolerance = 10**(-3) # this is the default tolerance level, tuned\n \n matching_cnt = []\n window_num = []\n \n for song_id in uniq_id:\n distance = retriv_fgp1(int(song_id), snip_fgp1)\n matching_cnt.append(np.sum(distance<=tolerance))\n window_num.append(len(distance))\n \n # This is the new criterion: must have more than 10% similarity of a song\n # in the database - considered different lengths of songs\n similarity_lst = list(map(lambda i,j: i/j > 0.1, matching_cnt, window_num))\n matched_idx = [i for i,val in enumerate(similarity_lst) if val==True]\n matched_sid = [uniq_id[i] for i in matched_idx]\n \n if matched_sid == []:\n sm_logger.info('Oops, we try hard but find nothing...')\n return None\n else:\n possible_lst = []\n for i in matched_sid:\n possible_lst.append(retriv_name(int(i)))\n sm_logger.info('Found some songs matched the snippet!')\n return possible_lst",
"async def get_best_results(self, query: str) -> List[Tuple[float, str]]:\n results = []\n for genre in self.genres:\n ratios = [difflib.SequenceMatcher(None, query, genre).ratio()]\n for word in REGEX_NON_ALPHABET.split(genre):\n ratios.append(difflib.SequenceMatcher(None, query, word).ratio())\n results.append((round(max(ratios), 2), genre))\n return sorted((item for item in results if item[0] >= 0.60), reverse=True)[:4]",
"def match(): # code\n\n # for testing, match first fingerprint code in creation.utilisation.imp\n #code = \"\"\n\n utilizations = Model.get('creation.utilisation.imp')\n result = utilizations.find(['title', \"=\", \"999,999\"])\n if not result:\n sys.exit()\n #code = result.fingerprint\n\n print result[0].fingerprint",
"def find_matching_file(filepath, name, size, crc):\n # if size and CRC not specified then require strict name matching\n files = os.listdir(filepath)\n candidates=[]\n for f in files:\n if f==name or f.startswith(name+\".\"):\n candidates.append(f)\n if not candidates:\n for f in files:\n if f.lower()==name.lower() or f.lower().startswith(name.lower()+\".\"):\n candidates.append(f)\n if not candidates:\n for f in files:\n if name.lower().startswith(f.lower()):\n candidates.append(f)\n\n print(name, candidates)\n for c in candidates:\n cf = os.path.join(filepath, c)\n print(c)\n fsz, fcrc = sz_crc32(cf)\n print(fsz, fcrc)\n if fsz == size and fcrc==crc:\n print(\"exact match\")\n return cf\n if size is None and fcrc==crc:\n print(\"match bases on CRC (size unknown)\")\n return cf\n\n return None",
"def get_result_file(reference_file):\r\n replacements = { 'reference' : 'result', 'reference.' : ''}\r\n for key, replacment in replacements.items():\r\n basename = os.path.basename(reference_file)\r\n basename = basename.replace(key, replacment)\r\n hit = re.search(r'(_\\d{8}_\\d{6})[^\\d]', basename)\r\n if hit:\r\n candidates = glob(os.path.join(os.path.dirname(reference_file), basename.replace(hit.group(1), '*')))\r\n rgx = re.compile(basename.replace(hit.group(1), r'_\\d{8}_\\d{6}'))\r\n for item in candidates:\r\n if item != reference_file and rgx.search(item):\r\n return item\r\n return None",
"def detect(synonym, file1, file2, num_tuple):\n if not isfile(file1) or not isfile(file2) or not isfile(synonym):\n print(\"Invalid file name, please try again\")\n return\n file1 = open(file1)\n file2 = open(file2)\n file1_list = []\n file2_list = []\n match = 0\n synonyms = {}\n with open(synonym) as synonym:\n for line in synonym:\n syn_line = line.strip().split()\n for words in syn_line:\n synonyms[words] = syn_line\n for line in file1:\n file1_list += set_tuples(line, num_tuple)\n for line in file2:\n file2_list += set_tuples(line, num_tuple)\n for tuple1 in file1_list:\n for tuple2 in file2_list:\n if compare_tuples(tuple1, tuple2, synonyms):\n match += 1\n break\n print(\"{0:.2f}%\".format(match/(max(len(file1_list), len(file2_list))) * 100))",
"def get_closest(data_dir, id, scan_type, ref_date = datetime.now()):\n if not data_dir:\n data_dir = os.getcwd()\n scans = get_scans(data_dir, id, scan_type)\n closest = find_closest(scans, ref_date)\n return closest",
"def retriv_fgp1(i, snip_fgp1):\n \n conn = psycopg2.connect(host=\"sculptor.stat.cmu.edu\", database=c.DB_USER,\n user=c.DB_USER, password=c.DB_PASSWORD)\n cur = conn.cursor()\n sql_command = \"SELECT fingerprint1 FROM fingerprints WHERE song_id = %s\"\n cur.execute(sql_command, [i])\n fgp1 = cur.fetchall()\n conn.close()\n \n fgp1 = reduce(np.append, fgp1)\n for i in range(len(fgp1)):\n fgp1[i] = float(fgp1[i])\n distance1 = abs(snip_fgp1-fgp1)\n sm_logger.info(\"Distance has been calculated\")\n return distance1",
"def compare_file(file1, file2):\n with open(file1) as f1, open(file2) as f2:\n f1_inputs = []\n f1_outputs = []\n f2_inputs = []\n f2_outputs = []\n # get input from f1\n # get output from f1\n # get input from f2\n # get output from f2\n for line in f1:\n t=line.split()[1] # type\n k=line.split()[2].split('=')[0] # key\n v=line.split('=')[1] # value\n if line.startswith('input'):\n f1_inputs.append({'type': t, 'key': k, 'value': v})\n if line.startswith('output'):\n f1_outputs.append({'type': t, 'key': k, 'value': v})\n for line in f2:\n t=line.split()[1] # type\n k=line.split()[2].split('=')[0] # key\n v=line.split('=')[1] # value\n if line.startswith('input'):\n f2_inputs.append({'type': t, 'key': k, 'value': v})\n if line.startswith('output'):\n f2_outputs.append({'type': t, 'key': k, 'value': v})\n # comparing f1 and f2\n # now only compare in alphabetical order\n # i'll do a arbitrary combination later\n if not len(f1_inputs) == len(f2_inputs): return [False, 'length']\n if not len(f1_outputs) == len(f2_outputs): return [False, 'length']\n\n f1i = [f1_inputs[i]['value'] for i in range(len(f1_inputs))]\n f2i = [f2_inputs[i]['value'] for i in range(len(f2_inputs))]\n f1o = [f1_outputs[i]['value'] for i in range(len(f1_outputs))]\n f2o = [f2_outputs[i]['value'] for i in range(len(f2_outputs))]\n\n if not sorted(f1i) == sorted(f2i):\n # return [False, [f1i, f2i, f1o, f2o]]\n return [False, [f1_inputs, f2_inputs]]\n if not sorted(f1o) == sorted(f2o):\n return [False, [f1_outputs, f2_outputs]]\n \n # for i in range(len(f1_inputs)):\n # if not f1_inputs[i]['value'] == f2_inputs[i]['value']: return False\n # for i in range(len(f1_outputs)):\n # if not f1_outputs[i]['value'] == f2_outputs[i]['value']: return False\n return [True, '']",
"def find_best_match(seq, ref):\n\n if not _has_rapidfuzz:\n raise ImportError(\"Function requires the rapidfuzz package.\")\n\n if len(seq) > len(ref):\n raise ValueError(\"`ref` is shorter than `seq`\")\n\n distances = {}\n for i in range(0, len(ref) - len(seq) + 1): # moving window of comparison\n distance = rapidfuzz.levenshtein.distance(seq, ref[i : (i + len(seq))])\n distances[(i, i + len(seq))] = distance\n\n shortest_distance = min(distances.values())\n\n best_matches = {distance: shortest_distance, \"locations\": []}\n for location, distance in distances.items():\n if distance == shortest_distance:\n best_matches[\"locations\"].append(location)\n\n results = {\"distances\": distances, \"best_matches\": best_matches}\n\n return results",
"def phred_autodetect(input_file, USER_PHRED):\n\n if input_file.endswith('.gz'): # Open file\n infile = gzip.open(input_file, 'rt')\n else: \n infile = open(input_file, 'r') \n\n # Phred sets\n phred64_set = set(\"@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefgh\")\n phred33_set = set(\"!\\\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJ\")\n\n quality_string = '' # Initialize variables\n line_count = 0\n is_phred33 = False \n is_phred64 = False\n phred_determined = False\n\n line = infile.readline()[:-1] # Read line by line, until phred type is found\n while phred_determined == False:\n line_count += 1\n\n if line_count == 4: # At this point, we are looking at a quality string\n quality_string = line\n is_phred33 = set(quality_string).issubset(phred33_set)\n is_phred64 = set(quality_string).issubset(phred64_set)\n line_count = 0\n\n if is_phred33 and not is_phred64:\n phred_determined = True\n return \"33\"\n\n elif not is_phred33 and is_phred64:\n phred_determined = True\n return \"64\"\n \n line = infile.readline().strip()\n\n infile.close()\n\n # In case phred can't be determined, use the users input. \n if not phred_determined: \n # If user did not specify phred type \n if USER_PHRED == '':\n print('ERROR: We cannot autodetect the phred encoding type of your file(s). Please specify it in the input.')\n sys.exit(1)\n phred_determined = True\n return USER_PHRED",
"def match_file(ht, filename, density=None, sr=11025, n_fft=512, n_hop=256, window=1, shifts=4, verbose=False):\n hq = audfprint.wavfile2hashes(filename, sr=sr, density=density, \n n_fft=n_fft, n_hop=n_hop, shifts=shifts)\n # Fake durations as largest hash time\n if len(hq) == 0:\n durd = 0.0\n else:\n durd = float(n_hop * hq[-1][0])/sr\n if verbose:\n print \"Analyzed\",filename,\"of\",('%.3f'%durd),\"s to\",len(hq),\"hashes\"\n # Run query\n return match_hashes(ht, hq, window=window), durd, len(hq)",
"def getBestHits(file_):\n\thits=parse(file_)\n\tem={}\n\tfor h in hits:\n\t\tscaffold=h.reference\n\t\tcontig=h.query\n\t\tem[scaffold]=em.get(scaffold,{})\n\t\tprev=em[scaffold].get(contig,None)\n\t\tif prev==None:\n\t\t\tem[scaffold][contig]=h\n\t\telse:\n\t\t\tif float(em[scaffold][contig].covq) < float(h.covq):\n\t\t\t\tem[scaffold][contig]=h\n\tfor s in em:\n\t\tbest_hits=em[s].values()\n\t\tbest_hits.sort(key=lambda x: int(x.rstart))\n\t\tem[s]=best_hits\n\treturn em",
"def map(self, fingerprints_test, fingerprints_train, verbose=False):\n # Get unique fingerprints\n fingerprints_train = np.unique(fingerprints_train)\n fingerprints_test = np.unique(fingerprints_test)\n\n # Create mappings\n mapping_train = dict()\n\n # Loop over fingerprints\n for fp in fingerprints_train:\n # Extract keys\n for key in fp:\n # Add fingerprint to each key\n mapping_train[key] = mapping_train.get(key, set()) | set([fp])\n\n # Refine mapping to fp_test -> set([fps_train labels])\n mapping = dict()\n # Loop over all testing fingerprints\n for i, fp in enumerate(fingerprints_test):\n\n # Print progress if verbose\n if verbose:\n print(\"{}/{}\".format(i+1, fingerprints_test.shape[0]), end='\\r')\n\n # Initialise set\n matches = set()\n\n # Loop over all keys of fingerprint\n for key in fp:\n # Get possible fingerprints\n matches |= mapping_train.get(key, set())\n\n # Initialise highest score\n highest_score = 0\n\n # Loop over all matches\n for match in matches:\n # Get score\n score = fp.compare(match)\n # If larger than highest score, replace match\n if score > highest_score:\n mapping[fp] = match\n highest_score = score\n\n # Return result\n return mapping",
"def match_bf(img1path, img2path, num_keypoints=1000):\n orb = cv2.ORB(num_keypoints, 1.2)\n\n img_from = cv2.imread(img1path)\n img_to = cv2.imread(img2path)\n\n # comparision\n (kp1, des1) = orb.detectAndCompute(img_from, None)\n (kp2, des2) = orb.detectAndCompute(img_to, None)\n\n # matcher\n bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)\n matches = bf.match(des1, des2)\n\n # sort matches\n matches = sorted(matches, key=lambda val: val.distance)\n\n return matches",
"def find_closest_image(self, img, imtype): \n obstime = datetime.datetime.strptime('%s-%s-%s %s' % ('2015', '01', '23', self.obs_dict[img][3]), '%Y-%m-%d %H:%M:%S')\n if obstime.hour < 12: ot = obstime + datetime.timedelta(days=1)\n keys = self.obs_dict.keys()\n keys.sort()\n best_time=1e5\n best_image = None\n for k in keys:\n if self.obs_dict[k][1]==imtype:\n ot = datetime.datetime.strptime('%s-%s-%s %s' % ('2015', '01', '23', self.obs_dict[k][3]), '%Y-%m-%d %H:%M:%S')\n if ot.hour < 12: ot = ot + datetime.timedelta(days=1)\n t = min((obstime-ot).seconds, (ot-obstime).seconds)\n if t < best_time:\n best_time = t\n best_img = k\n return best_img",
"def findMatchesBetweenImages(image_1, image_2, num_matches):\n # matches - type: list of cv2.DMath\n matches = None\n # image_1_kp - type: list of cv2.KeyPoint items.\n image_1_kp = None\n # image_1_desc - type: numpy.ndarray of numpy.uint8 values.\n image_1_desc = None\n # image_2_kp - type: list of cv2.KeyPoint items.\n image_2_kp = None\n # image_2_desc - type: numpy.ndarray of numpy.uint8 values.\n image_2_desc = None\n\n # COPY YOUR CODE FROM A7 HERE.\n\n # sift = SIFT()\n # image_1_kp, image_1_desc = sift.detectAndCompute(image_1, None)\n # image_2_kp, image_2_desc = sift.detectAndCompute(image_2, None)\n # bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)\n # matches = bf.match(image_1_desc,image_2_desc)\n # matches = sorted(matches, key = lambda x:x.distance)\n # matches = matches[:num_matches]\n\n alg = cv2.ORB()\n # alg = cv2.SIFT()\n\n # 1. Compute SIFT keypoints and descriptors for both images\n image_1_kp, image_1_desc = alg.detectAndCompute(image_1,None)\n image_2_kp, image_2_desc = alg.detectAndCompute(image_2,None)\n\n # 2. Create a Brute Force Matcher, using the hamming distance (and set crossCheck to true).\n bf_matcher = cv2.BFMatcher(normType=cv2.NORM_HAMMING,crossCheck=True)\n\n # 3. Compute the matches between both images.\n matches = bf_matcher.match(image_1_desc,image_2_desc)\n\n # 4. Sort the matches based on distance so you get the best matches.\n # 5. ...the top 10 matches in a list.\n matches = sorted(matches, key = lambda x:x.distance)[:num_matches]\n\n\n return image_1_kp, image_2_kp, matches"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Prints the given state of the puzzle
|
def print_puzzle(state):
print('-----')
for i in range(4):
print('|', end="")
for j in range(3):
if state[i][j] == 0:
print(" |", end="")
else:
print("", state[i][j], "|", end="")
if i == 0:
break
print('\n-------------')
|
[
"def print_state(self):\n p1_board = self.board[0:6]\n p2_board = self.board[7:13]\n p2_board.reverse()\n p1_purse = self.board[6]\n p2_purse = self.board[13]\n\n print('\\n')\n print(\"Player 1 Score: {}\".format(self.p1_score))\n print(\"Player 2 Score: {}\".format(self.p2_score))\n print('\\n')\n print(\"Active Player: {}\".format(self.active_player))\n print(\"Actions: \", self.get_legal_actions())\n print(\"Game Over: {}\".format(self.is_over))\n print('\\n')\n print('\\t ' + ' '.join(map(str, p2_board)))\n print('\\t' + str(p2_purse) + '\\t\\t' + str(p1_purse))\n print('\\t ' + ' '.join(map(str, p1_board)))\n print('\\n')\n print(\"=\"*50)",
"def printState(self, state, info):\n\n print(\"Current value of state: %s\" % (info))\n for y in range(5):\n line=[]\n for x in range(5):\n line.append(hex(state[x][y]))\n print('\\t%s' % line)",
"def print_puzzle(self):\n self.initial_puzzle.print_puzzle()",
"def show(self, state, stream=sys.stdout):\n \n for i in range(self.n):\n fmtstr = []\n for j in range(self.n-1):\n fmtstr.append( \" %s |\"%TicTacToe.Chrs[state.board[i*self.n+j]])\n fmtstr.append(\" %s \"%TicTacToe.Chrs[state.board[(i+1)*self.n-1]])\n line = \"\".join(fmtstr)\n print(line, file=stream)\n if i < self.n-1:\n print('-'*len(line), file=stream)",
"def _print_state(self):\n\t\t# cross-platform clear screen\n\t\tos.system(['clear', 'cls'][os.name == 'nt'])\n\t\t# print the round\n\t\tprint(\" Round: \" + str(self._round))\n\t\tprint(\"\")\n\t\t# print the grid\n\t\tfor i in xrange(self.height):\n\t\t\tprint(\"\\t\"),\n\t\t\tfor j in xrange(self.width):\n\t\t\t\tprint(\"| \" + str(self.board[i][j])),\n\t\t\tprint(\"|\")\n\t\tprint(\"\\t\"),\n\t\t# print the bottom of the grid with columns index\n\t\tfor k in xrange(self.width):\n\t\t\tprint(\" _\"),\n\t\tprint(\"\")\n\t\tprint(\"\\t\"),\n\t\tfor k in xrange(self.width):\n\t\t\tprint(\" %d\" % (k + 1)),\n\t\tprint(\"\")\n\t\t# print final message when the game is finished\n\t\tif self.finished:\n\t\t\tprint(\"Game Over!\")\n\t\t\tif self.winner != None:\n\t\t\t\tprint(str(self.winner.type) + \" is the winner!\")\n\t\t\telse:\n\t\t\t\tprint(\"Game is a draw\")",
"def display(state):\n divider = \"\\n---+---+---+---+---+---+---\\n\"\n symbol_dict = {1: \"x\", -1: \"o\", 0: \" \"}\n\n output_rows = []\n for state_row in np.array_split(tuple(state), indices_or_sections=6):\n y = \"|\". join([\" {} \".format(symbol_dict[x]) for x in state_row])\n output_rows.append(y)\n\n ascii_grid = divider.join(output_rows)\n print(ascii_grid)",
"def display_revealed_puzzle(self):\n for i in self.revealed_puzzle:\n print(i, end=\" \")\n print(\"\")",
"def print_state(self, player: int):\n state = self.game.get_state(player)\n print_state(state)",
"def show_state(state, ghost_pos=None):\n chars = {'C': 'C',\n '-C': '0',\n 'NV': '.'}\n\n ss = [range(4) for _ in range(4)]\n for s in state:\n c, i, j = s.split('_')\n ss[int(i)][int(j)] = chars[c]\n if ghost_pos:\n ss[ghost_pos[0]][ghost_pos[1]] = 'G'\n\n pprint.pprint(ss)",
"def print_solution(self, goal_node):\n # path is list of nodes from initial state (root of the tree)\n # to the goal_node\n path = goal_node.path()\n # print the solution\n print( \"Solution takes {0} steps from the initial state\\n\".format(len(path)-1) )\n self.print_state(path[0].state)\n print( \"to the goal state\\n\")\n self.print_state(path[-1].state)\n print( \"Below is the sequence of moves\\n\")\n for node in path:\n self.print_node(node)",
"def print_state(self):\n print(self.board.get_other_player_name(self.board.current_player.name) + \n \" player action: \" + self.last_command.strip()) \n print(self.board)\n self.print_metadata()",
"def print_puzzle(puzzle, change_list=None):\n if change_list is None:\n change_list = []\n hr = \"-\" * 11 + \"|\" + \"-\" * 11 + \"|\" + \"-\" * 11\n br = \" \" * 11 + \"|\" + \" \" * 11 + \"|\" + \" \" * 11\n print\n for i in range(0, 9):\n row_str = \"\"\n for j in range(0, 9):\n val = puzzle[i][j]\n print_val = \".\" if val == 0 else str(val)\n if (i, j) in change_list:\n row_str += \"_\" + print_val + \"_\"\n else:\n row_str += \" \" + print_val + \" \"\n if j == 2 or j == 5:\n row_str += \"|\"\n else:\n row_str += \" \"\n print row_str\n if i == 2 or i == 5:\n print hr\n elif i < 8:\n print br\n print",
"def display():\r\n\r\n print(f'\\n{\"State\":<20}{\"Capital\":<20}{\"Population\":<20}{\"Flower\":<20}')\r\n print()\r\n for state in sorted(state_info_dict):\r\n info_list = state_info_dict[state]\r\n capital = info_list[0]\r\n population = f'{info_list[1]:,}'\r\n flower = info_list[2]\r\n print(f'{state:<20}{capital:<20}{population:<20}{flower:<20}')",
"def state_to_string(state):\n return ('i: \\t' + str(state[2][0]) + '\\t' + str(state[2][1]) + '\\n'\n 'v: \\t' + str(state[1][0]) + '\\t'+str(state[1][1]) + '\\n'\n 'o: \\t' + str(state[0][0]) + '\\t'+str(state[0][1]) + '\\n'\n 'h: \\t' + str(state[3][0]) + '\\t'+str(state[3][1]) + '\\n'\n 'p: \\t' + str(state[4][0]) + '\\t'+str(state[4][1]) + '\\n')",
"def print_SAT(self):\n clauses = []\n n = self.total_vertices\n # Each vertex must be assigned to exactly one color.\n for i in range(1, n+1):\n self.exactly_one(i, clauses)\n # Neighbours must be differently colored (P NAND Q)\n for edge in self.edges:\n for j in range(1, self.colors+1):\n clauses.append([-self.var(edge[0],j), -self.var(edge[1],j)])\n # Print number of clauses and number of variables\n print(len(clauses), n*self.colors)\n # Print all clauses\n for clause in clauses:\n clause.append(0)\n print(\" \".join(map(str, clause)))",
"def print_local_state(self, local_state):\n print(local_state, end='')",
"def show_state(self):\n\n pass",
"def stabilizers(self):\n string_map_matrix = {(0, 0): \"I\", (1, 0): \"X\",\n (1, 1): \"Y\", (0, 1): \"Z\"}\n result = \"\"\n for i in range(2 * self.n):\n if i == self.n:\n result += \"-\" * (self.n) + \"\\n\"\n\n for j in range(self.n):\n (x_ij, z_ij) = (self.state[i, j], self.state[i, j + self.n])\n result += string_map_matrix[(x_ij, z_ij)]\n\n result += \"\\n\"\n\n print(result[:-1])",
"def test_state(self):\n\n sv = Statevector.from_label(\"+-rl\")\n output = state_drawer(sv, \"latex_source\")\n expected_output = (\n r\"\\frac{1}{4} |0000\\rangle- \\frac{i}{4} |0001\\rangle+\\frac{i}{4} |0010\\rangle\"\n r\"+\\frac{1}{4} |0011\\rangle- \\frac{1}{4} |0100\\rangle+\\frac{i}{4} |0101\\rangle\"\n r\" + \\ldots +\\frac{1}{4} |1011\\rangle- \\frac{1}{4} |1100\\rangle\"\n r\"+\\frac{i}{4} |1101\\rangle- \\frac{i}{4} |1110\\rangle- \\frac{1}{4} |1111\\rangle\"\n )\n self.assertEqual(output, expected_output)",
"def print_board():\n \n print \"\"\n print \" | | \"\n print \" \" + grid_status[(1,1)] + \" | \" + grid_status[(1,2)] + \" | \" + grid_status[(1,3)]\n print \"___|___|___\"\n print \" | | \"\n print \" \" + grid_status[(2,1)] + \" | \" + grid_status[(2,2)] + \" | \" + grid_status[(2,3)]\n print \"___|___|___\"\n print \" | | \"\n print \" \" + grid_status[(3,1)] + \" | \" + grid_status[(3,2)] + \" | \" + grid_status[(3,3)]\n print \" | | \"\n print \"\""
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a string corresponding to the move between two positions of a tile
|
def get_move(old_i, new_i):
dx = new_i[0] - old_i[0]
dy = new_i[1] - old_i[1]
if dx > 0:
return "left"
elif dx < 0:
return "right"
elif dy > 0:
return "up"
elif dy < 0:
return "down"
else:
return ""
|
[
"def translate_to_tile(self, tile_x, pos_x, tile_y, pos_y):\n x = int(tile_x) * DISPLAY_SIZE['x'] + pos_x\n y = int(tile_y) * DISPLAY_SIZE['y'] + pos_y\n return x, y",
"def render_tile(tile):\n\n # each tile list has the meaning: [Visible (bool), Mine (bool), Adjacent Mines (int)]\n # visible, mine, adjacent_mines = tile\n\n if tile[0]:\n if tile[1]:\n return \"X\"\n elif tile[2]:\n return str(tile[2])\n else:\n return \" \"\n else:\n return \"O\"",
"def move(position, direction):\n return (position[0] + direction[0], position[1] + direction[1])",
"def test_movement_strings(self):\n to_dir, from_dir = self.room.get_movement_strings((1, 0, 0))\n assert to_dir == \"east\"\n assert from_dir == \"the west\"",
"def convert_move_to_message(self, move) -> None:\n piece_letter = self.piece_letter[move['from_piece']]\n eating = 'x' if move['to_piece'] != Pieces.NONE else ''\n target_cell = self.cols[move['to_col']] + self.rows[move['to_row']]\n return piece_letter + eating + target_cell",
"def solve_2x2(self):\r\n # Move zero_tile to (0,0)\r\n move_string = move_and_update(self, 'lu')\r\n # Checks whether puzzle if solved if it isn't then does rotation till it is\r\n while (not self.row0_invariant(0)):\r\n move_string += move_and_update(self, 'rdlu')\r\n return move_string",
"def give_me_next_tile(self, reference_tile, direction):\n x_dir = y_dir = 0\n if direction == TO_THE_TOP_LEFT:\n x_dir = -1\n y_dir = -1\n if direction == TO_THE_TOP:\n y_dir = -1\n if direction == TO_THE_TOP_RIGHT:\n x_dir = 1\n y_dir = -1\n if direction == TO_THE_RIGHT:\n x_dir = 1\n if direction == TO_THE_BOTTOM_RIGHT:\n x_dir = 1\n y_dir = 1\n if direction == TO_THE_BOTTOM:\n y_dir = 1\n if direction == TO_THE_BOTTOM_LEFT:\n x_dir = -1\n y_dir = 1\n if direction == TO_THE_LEFT:\n x_dir = -1\n\n try:\n return (\n self.grid.get_tiles_dict()[\n f\"{reference_tile.get_x_pos() + x_dir}\"\n + f\"x{reference_tile.get_y_pos() + y_dir}\"\n ],\n SUCCESS,\n )\n except KeyError: # That means there is no more tile\n return (None, THERE_IS_NO_TILE)\n except Exception as e:\n debug_print(\"\", text_color_str=\"white\", bg_color_str=\"black\")\n return (None, ERROR + \" \" + str(e))",
"def next_pos(i, j, move):\n return i + directions[move].row, j + directions[move].col",
"def get_move_notation(chess_move):\n\n if chess_move in [\"0-0\", \"0-0-0\"]:\n move_notation = chess_move\n for color in \"w/b\":\n if is_checkmate(color, pieces):\n move_notation += \"#\"\n elif is_check(color, pieces):\n move_notation += \"+\" \n else:\n m_piece = chess_move[0].upper()\n m_init = chess_move[1:3].lower()\n m_ends = chess_move[4:6].lower()\n\n if m_piece == \"P\" and m_ends[-1] in \"1/8\":\n promoted_piece = seek_piece(m_ends, pieces).name\n move_notation = m_piece + m_init + \"-\" + m_ends + \"=\" + promoted_piece\n else:\n move_notation = m_piece + m_init + \"-\" + m_ends\n \n color = \"w\" if seek_piece(m_ends, pieces).color == \"b\" else \"b\"\n if is_checkmate(color, pieces):\n move_notation += \"#\"\n elif is_check(color, pieces):\n move_notation += \"+\"\n \n return move_notation",
"def position_after_move(move):\n center = len(sight()) // 2\n start_pos = (center, center)\n if move not in PROGRESS_MOVES:\n return start_pos\n else:\n move_direction = direction_of_move(move)\n new_pos = get_pos_in_direction(start_pos, move_direction)\n new_cell = get_cell_in_sight(new_pos)\n # If you fall into a pit\n if not new_cell or new_cell.floor == PIT:\n return None\n # If you or something you tried to push bumped into a wall\n elif _wall_bump(start_pos, move_direction):\n return start_pos\n # If you only moved 0 or 1 tile\n elif move != FORWARD_TWO:\n return new_pos\n else:\n two_pos = get_pos_in_direction(new_pos, move_direction)\n new_cell = get_cell_in_sight(two_pos)\n # If your second move dropped you into a pit\n if not new_cell or new_cell.floor == PIT:\n return None\n # If your second move caused a bump into a wall\n elif _wall_bump(start_pos, move_direction, 2):\n return new_pos\n else:\n return two_pos",
"def _parse_sgf_move(node_value):\n\tif node_value == '' or node_value == 'tt':\n\t\treturn go.PASS_MOVE\n\telse:\n\t\trow = string.letters.index(node_value[1])\n\t\tcol = string.letters.index(node_value[0])\n\t\t# GameState expects (x, y) where x is column and y is row\n\t\treturn (col, row)",
"def get_instructions(self) -> str:\n return \"Players take turns claiming cells. When a player captures\" \\\n \" at least half of the cells in a ley-line, then player\" \\\n \" captures the ley-line. Player wins when captures at\" \\\n \" least half of the lay-lines.\"",
"def travel(string, x, y):\n\n # performs translation depending on character name\n for char in string:\n if char == 'N' or char == 'n':\n y = y + 1\n elif char == 'S' or char == 's':\n y = y - 1\n elif char == 'W' or char == 'w':\n x = x - 1\n elif char == 'E' or char == 'e':\n x = x + 1\n return str((x, y))",
"def print_moves(self) -> None:\n\n for i in range(0, len(self.move_stack_left), 2):\n # Print the number of the move.\n self.moves_text.display_message(self.top_x +\n self.left_moves_x_coordinate,\n self.top_y + 60 + 10 * (i + 1),\n str(int(i / 2) + 1))\n # Print white move\n self.moves_text.display_message(self.top_x +\n self.left_moves_x_coordinate + 30,\n self.top_y + 60 + 10 * (i + 1),\n self.move_stack_left[i])\n # Print black move\n if i < len(self.move_stack_left) - 1:\n self.moves_text.display_message(self.top_x +\n self.left_moves_x_coordinate\n + 100,\n self.top_y + 60 + 10 * (i + 1),\n self.move_stack_left[i + 1])\n\n for i in range(0, len(self.move_stack_right), 2):\n # Print the number of the move.\n self.moves_text.display_message(self.top_x +\n self.right_moves_x_coordinate,\n self.top_y + 60 + 10 * (i + 1),\n str(int(i / 2) + 16))\n # Print white move\n self.moves_text.display_message(self.top_x +\n self.right_moves_x_coordinate + 30,\n self.top_y + 60 + 10 * (i + 1),\n self.move_stack_right[i])\n # Print black move\n if i < len(self.move_stack_right) - 1:\n self.moves_text.display_message(self.top_x +\n self.right_moves_x_coordinate +\n 100,\n self.top_y + 60 + 10 * (i + 1),\n self.move_stack_right[i + 1])",
"def get_move(self):\n return self.sdg(self.board, self.falling_piece)",
"def displayMove(x, y, new_x, new_y):\n global move_counter\n if move_counter%2 == 0:\n message_queue(\"Player moved: \")\n move_counter+=1\n else:\n message_queue(\"AI moved: \")\n move_counter+=1\n to_write = str(y+1) + \", \" + str(x+1) + \" to \" + str(new_y+1) + \", \" + str(new_x+1)\n message_queue(to_write)",
"def get_computer_move():\n # TODO\n computer_move = random.randint(0,2)\n if computer_move == 0:\n return \"r\"\n elif computer_move == 1:\n return \"p\"\n elif computer_move == 2:\n return \"s\"",
"def m_move_one(state, b1, b2):\n if state.pos[b1] == 'table':\n return [('stack', b1, b2)]\n elif b2 == 'table':\n return [('unstack', b1, state.pos[b1])]\n else:\n return [('restack', b1, state.pos[b1], b2)]",
"def get_tile_name_xy(x: int, y: int) -> str:\n return \"srtm_{0:02d}_{1:02d}\".format(x, y)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the path, obtained through BBS algorithm and a boolean indicating if the path terminates at the goal state
|
def BBS(initial_state, check_dict):
print("Implementing BBS...")
q = []
heapq.heappush(q, (initial_state[0][2], initial_state))
accomplished = False
while len(q) != 0:
path = heapq.heappop(q)[1]
if is_goal(path[-1][0]):
goal = path
accomplished = True
break
state_container = next_possible_states(path, check_dict, False)
for i in state_container:
if len(path) <= 1:
temp = list(path)
temp.append(i)
heapq.heappush(q, (i[2], temp))
else:
if i[0] != path[-2][0]:
temp = list(path)
temp.append(i)
heapq.heappush(q, (i[2], temp))
if accomplished:
print("Solved! Number of moves:", len(goal) - 1)
return goal, True
else:
print("Cannot be solved. Number of moves:", len(path) - 1)
return path, False
|
[
"def path_walker_backtrack(self, path: list):\n trajectory = -1\n while path:\n current = path.pop()\n if self._maze.get_cell(current.x, current.y).get_flag() == 1:\n current = current.get_parent()\n if current.get_parent() is not None and self.is_end_of_hallway(current):\n current, trajectory_backtrack = self.backtrack(current.get_parent())\n return current, 'blocked', trajectory + trajectory_backtrack\n else:\n return current, 'blocked', trajectory\n trajectory += 1\n\n children = current.get_children()\n blocked_neighbors_count = 0\n for child in children:\n child_cell = self._maze.get_cell(child[0], child[1])\n self._knowledge.update_cell(child_cell, child[0], child[1])\n if child_cell.get_flag() == 1:\n blocked_neighbors_count += 1\n current.update_no_of_neighbors(len(children))\n current.update_no_of_blocked_neighbors(blocked_neighbors_count)\n return None, 'unblocked', trajectory",
"def find_next_path(self, goal):\n box = self.find_closest_box_for_goal(goal)\n agent = self.find_closest_agent_for_box(box)\n # path for agent to box\n agent_to_box, block_info = self.shortest_path_to_box(agent, box)\n # path for box to goal\n box_to_goal, b_info = self.shortest_path_to_goal_with_agent(box, goal,\n agent)\n # update block_info so we have complete picture\n block_info.update(b_info)\n # and combine paths\n original_path = agent_to_box + box_to_goal\n\n block_cell = self.detect_blocking_objects(original_path, block_info,\n agent, box)\n\n if block_cell is not None:\n return self.find_next_resolving_path(block_cell, original_path,\n block_info)\n\n box_to_goal, conflict = self.validate_box_movement(agent_to_box, box_to_goal)\n # if swap was not possible on path to goal, find another spot\n if conflict:\n path = self.find_swapable_position(agent_to_box, box, agent, goal)\n else:\n path = agent_to_box + box_to_goal\n return path",
"def find_path(self):\n\n found_end = 0\n while found_end == 0:\n\n # Check if the open list is empty, if so send code to nofity that the\n # goal was not found\n if len(self.open_list) == 0:\n found_end = 2\n\n else:\n eval_node = heapq.heappop(self.open_list)\n\n # Check if current node is the goal, if so send code to notify that\n # the goal was found\n if self.check_for_goal(eval_node):\n found_end = 1\n self.goal_node = eval_node\n self.closed_list.append(eval_node)\n\n # Otherwise add the current node to the closed list and evaluate\n # its 8 neighbors\n else:\n self.closed_list.append(eval_node)\n self.analyze_neighbors(eval_node)\n\n return found_end",
"def bfs_paths(self, start, goal):\n queue = [(start, [start])]\n while queue:\n (vertex, path) = queue.pop(0)\n for next in self.get_adj(vertex) - set(path):\n if next == goal:\n yield path + [next]\n else:\n queue.append((next, path + [next]))",
"def find_path(self):\n nei = self.check_neighbor_options()\n\n self.check_end() # At the finish line, no more work to be done\n\n # Dead End\n if len(nei) == 0:\n self.crossroads(nei)\n\n # Crossroad\n elif len(nei) > 1:\n self.crossroads(nei)\n\n else:\n while len(nei) == 1:\n # If only one direction to move, move it!\n self.move_bot(nei[0])\n nei = self.check_neighbor_options()",
"def prob_path(self):\n dp = [ [0.0, 0.0, 0.0] for x in range(len(self.outcome)) ] # dynamic programming array [A,B]\n for i in range(len(self.outcome)):\n dp_temp = dp[i-1][:]\n dp[i][0] = self.max_state('A', self.outcome[i], dp_temp)\n dp[i][1] = self.max_state('B', self.outcome[i], dp_temp)\n dp[i][2] = self.max_state('C', self.outcome[i], dp_temp)\n \n dp_max = max(dp[len(self.outcome)-1]) # traceback the path\n if dp_max == dp[len(self.outcome)-1][0]:\n hidden_path = ['A']\n elif dp_max == dp[len(self.outcome)-1][1]:\n hidden_path = ['B']\n else:\n hidden_path = ['C']\n for i in range(len(self.outcome)-1):\n dA = dp_max-self.transition('A', hidden_path[0])-self.emission(hidden_path[0], self.outcome[-i-1])\n dB = dp_max-self.transition('B', hidden_path[0])-self.emission(hidden_path[0], self.outcome[-i-1])\n dC = dp_max-self.transition('C', hidden_path[0])-self.emission(hidden_path[0], self.outcome[-i-1])\n if compare_float(dA, dp[-i-2]): ##set the path\n hidden_path = ['A']+hidden_path\n dp_max = dA\n elif compare_float(dB, dp[-i-2]):\n hidden_path = ['B']+hidden_path\n dp_max = dB\n else:\n hidden_path = ['C']+hidden_path\n dp_max = dC\n \n print \"\".join(x for x in hidden_path)",
"def getPath(self):\n # print(\"I'm serious. You actually did it. Here is your path again so you can see how far you have come.\")\n return self.pathTraveled",
"def take_next_step(self) -> None:\r\n next_path_dic = {} # temporary var used to keep track of the result of the step\r\n paths_to_end = set() # temporary var used to keep track of which paths have met the termination criteria\r\n \r\n for current_path_val in self.path_dic: # loop through each point, or current state of a path\r\n for transition in self.transitions:# loop through each transformation (or card draw)\r\n next_path_val = current_path_val + transition # this is value after a card has been drawn\r\n \r\n if next_path_val >= self.target: # if the path has reached an endpoint, add to a set\r\n # which will be used later to move paths to the endpoint dictionary\r\n paths_to_end.add(next_path_val)\r\n\r\n # doing the transformation\r\n if next_path_val in next_path_dic: #this point has already been found, just need to update its probability\r\n next_path_dic[next_path_val] += self.path_dic[current_path_val] \\\r\n / len(self.transitions)\r\n else: # this point hasn't been found yet, need to create it\r\n next_path_dic[next_path_val] = self.path_dic[current_path_val] / len(self.transitions)\r\n \r\n self.path_dic = next_path_dic # all transformations have been done. The next state is set as the current state\r\n \r\n # now that we've calucated the next steps for all paths, \r\n # loop through paths that met the end condition and move them from\r\n # the path dictionary to the endpoint dictionary\r\n for point in paths_to_end:\r\n if point in self.end_point_dic: # if this endpoint has been reached before, add the\r\n # probability of current path to probablility of endpoint\r\n self.end_point_dic[point] += self.path_dic.pop(point) #pop from the pathDic becuase this path is ended\r\n \r\n else: #havent reached this endpoint before, add it to the dictionary\r\n self.end_point_dic.update({point: self.path_dic.pop(point)})",
"def bfs_shortest_path(graph, start, goal):\n # keep track of all visited nodes\n visited = []\n\n # keep track of the nodes to be explored, starting with the starting node\n # provided\n queue = deque([[start]])\n\n if start == goal:\n return \"The start element is the goal element\"\n\n while queue:\n # get the first element of the current path\n path = queue.popleft()\n\n # get the last node from the path\n node = path[-1]\n\n if node not in visited:\n neighbors = graph[node]\n\n # create a new path for all the neighbors and push it into the queue\n for neighbor in neighbors:\n new_path = list(path)\n new_path.append(neighbor)\n queue.append(new_path)\n\n # return path to goal if neighbor is goal\n if neighbor == goal:\n return new_path\n\n visited.append(node)\n \n return \"There is no path between {} and {}\".format(start, goal)",
"def is_goal(self, state):\n return state == self.goal",
"def is_goal(state):\n return sum(sum(state, [])) == 1",
"def is_done(self):\n return not (self.patrn_bfs_queue and self.sub_bfs_queue)",
"def find_path(self):\n\n # current = (self._start_cell.row, self._start_cell.column)\n # current[0] -= 1\n # if self._valid_move(*current):\n # self._mark_path(*current)\n # current[0] -= 1\n # if self._valid_move(*current):\n # self._mark_path(*current)\n # else:\n # current[0] += 1\n # if self._valid_move(*current):\n # self._mark_path(*current)\n\n current = (self._start_cell.row, self._start_cell.col)\n self._mark_path(*current)\n stack = Stack()\n stack.push(current)\n\n while not stack.is_empty() and not self._exit_found(*stack.peek()):\n current = stack.peek()\n # print(current)\n found = False\n\n for row, col in [(-1, 0), (0, 1), (1, 0), (0, -1)]:\n next_cell = (current[0] + row, current[1] + col)\n if self._valid_move(*next_cell):\n stack.push(next_cell)\n self._mark_path(*next_cell)\n found = True\n break\n\n # for row in [-1, 1]:\n # next = (current[0] + row, current[1])\n # if self._valid_move(*next):\n # stack.push(next)\n # self._mark_path(*next)\n # found = True\n # break\n # if found:\n # continue\n #\n # for col in [-1, 1]:\n # next = (current[0], current[1] + col)\n # if self._valid_move(*next):\n # stack.push(next)\n # self._mark_path(*next)\n # found = True\n # break\n\n if not found:\n self._mark_tried(*current)\n stack.pop()\n\n if stack.is_empty():\n return False\n elif self._exit_found(*stack.peek()):\n return True",
"def bfs(drawer, map, start, goal):\n ### START: 1c\n visited = map.copy()\n visited[start] = 1\n new_paths = [[start]]\n end_reached = False\n while not end_reached:\n paths = new_paths\n new_paths = []\n for path in paths:\n adj_nodes = get_adjacent_nodes(path[-1], map.shape)\n for node in adj_nodes:\n if visited[node]:\n continue\n visited[node] = 1\n new_paths.append(path + [node])\n drawer.draw_path(path + [node])\n if node == goal:\n final_path = new_paths[-1]\n end_reached = True\n break\n return final_path\n ### END: 1c",
"def doPathWalk(self):\r\n self.path_dic = {0: 1} ### first step is the initial state before we've done anything\r\n self.end_point_dic = {} # initializing the dict that keeps track of all endpoints and their probabilities\r\n while len(self.path_dic): # ## the dict is used to keep track of paths in a breadth first search\r\n # as long as there is a path, keep iterating\r\n self.take_next_step() #### state of self is updated \r\n\r\n return self",
"def getTriggerPathFlag(self) -> \"SbBool\":\n return _coin.SoDataSensor_getTriggerPathFlag(self)",
"def _find_branch(pt, targets, branches):\n res = pt.path[0]\n count = 0\n while res not in targets:\n count += 1\n res = pt.path[count]\n count = 0\n cur_branch = branches[0]\n while not res == cur_branch.target:\n count += 1\n cur_branch = branches[count]\n return cur_branch",
"def goal_test(self, current):\n\n if current.state == self.goal_state:\n return True\n else:\n return False",
"def test_find_path_bi():\n n1 = Node({'A': ['B', 'C']})\n n2 = Node({'B': ['C', 'D']})\n n3 = Node({'C': ['D']})\n n4 = Node({'D': ['C']})\n n5 = Node({'E': []})\n node_list = [n1, n2, n3, n4, n5]\n test_g = Graph(node_list)\n\n assert test_g.find_path_bi(n1, n5) == [] # no path\n assert test_g.find_path_bi(n1, n2) != [] # one path\n assert test_g.find_path_bi(n1, n3) != [] # three paths",
"def is_goal(self):\n\n return self.depth == len(self.grid) * len(self.grid[0]) - 1"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Merges several make configuration objects into one in the order they are specified. If several configurations contain attributes with the same names, the latter value will be kept (with respect to the order in which they were provided to this function).
|
def merge(*args):
d = {}
for conf in args:
for key, value in conf.__dict__.items():
d[key] = value
return from_dict(d)
|
[
"def mergeConfig(self, *args, **kwargs):\n other = cherrypy.lib.reprconf.Config(*args, **kwargs)\n # Top-level keys are namespaces to merge, second level should get replaced\n for k, v in other.items():\n mergeFrom = self.get(k, {})\n mergeFrom.update(v)\n self[k] = mergeFrom",
"def merge_with_master_config(self, config, defaults={}, delete_orphan_fields=False) -> dict:\n if isinstance(config, str):\n import json\n config = json.loads(config)\n properties = self.all_properties()\n config['fields'] = config.get('fields', dict())\n fields = config['fields']\n\n d_color = defaults.get('color', 'white')\n d_icon = defaults.get('icon', 'icons:default')\n\n if delete_orphan_fields:\n exist = {p.name() for p in properties}\n unexist = set(fields.keys()) - exist\n for name in unexist:\n del fields[name]\n\n for p in properties:\n field = fields.get(p.name(), {'show_in_search': False,\n 'combine_fields': False,\n 'number_of_rules': 0,\n 'glossaries': [],\n 'use_in_network_search': False,\n 'case_sensitive': False,\n 'show_as_link': 'text',\n 'blacklists': [],\n 'show_in_result': 'no',\n 'rule_extractor_enabled': False,\n 'search_importance': 1,\n 'group_name': '',\n 'show_in_facets': False,\n 'predefined_extractor': 'none',\n 'rule_extraction_target': ''})\n config['fields'][p.name()] = field\n field['screen_label'] = ' '.join(p.label())\n field['description'] = '\\n'.join(p.definition())\n field['name'] = p.name()\n\n # color\n if 'color' not in field:\n color = self.__merge_close_ancestor_color(p, fields, attr='color')\n field['color'] = color if color else d_color\n # icon\n if 'icon' not in field:\n icon = self.__merge_close_ancestor_color(p, fields, attr='icon')\n field['icon'] = icon if icon else d_icon\n # type\n if isinstance(p, OntologyObjectProperty):\n field['type'] = 'kg_id'\n else:\n try:\n field['type'] = self.__merge_xsd_to_type(next(iter(p.included_ranges())))\n except StopIteration:\n field['type'] = None\n return config",
"def _merge_configurations(self):\n m = dict()\n m.update(self._default)\n m.update(self._repo)\n m.update(self._user)\n return m",
"def config_merge(configs, suite_name=None, **kwargs):\n\n new_script = L.eval('new_script')\n yaml_cache = {}\n for desc, paths in configs:\n log.debug(\"merging config %s\", desc)\n\n if suite_name is not None:\n desc = combine_path(suite_name, desc)\n\n yaml_complete_obj = {}\n deep_merge(yaml_complete_obj, TEUTHOLOGY_TEMPLATE)\n for path in paths:\n if path not in yaml_cache:\n with open(path) as f:\n txt = f.read()\n yaml_cache[path] = (txt, yaml.safe_load(txt))\n\n yaml_fragment_txt, yaml_fragment_obj = yaml_cache[path]\n if yaml_fragment_obj is None:\n continue\n yaml_fragment_obj = copy.deepcopy(yaml_fragment_obj)\n premerge = yaml_fragment_obj.get('teuthology', {}).pop('premerge', '')\n if premerge:\n log.debug(\"premerge script running:\\n%s\", premerge)\n env, script = new_script(premerge, log, deep_merge, yaml.safe_load)\n env['base_frag_paths'] = [strip_fragment_path(x) for x in paths]\n env['description'] = desc\n env['frag_paths'] = paths\n env['suite_name'] = suite_name\n env['yaml'] = yaml_complete_obj\n env['yaml_fragment'] = yaml_fragment_obj\n for k,v in kwargs.items():\n env[k] = v\n if not script():\n log.debug(\"skipping merge of fragment %s due to premerge filter\", path)\n yaml_complete_obj['teuthology']['fragments_dropped'].append(path)\n continue\n deep_merge(yaml_complete_obj, yaml_fragment_obj)\n\n postmerge = yaml_complete_obj.get('teuthology', {}).get('postmerge', [])\n postmerge = \"\\n\".join(postmerge)\n log.debug(\"postmerge script running:\\n%s\", postmerge)\n env, script = new_script(postmerge, log, deep_merge, yaml.safe_load)\n env['base_frag_paths'] = [strip_fragment_path(x) for x in paths]\n env['description'] = desc\n env['frag_paths'] = paths\n env['suite_name'] = suite_name\n env['yaml'] = yaml_complete_obj\n for k,v in kwargs.items():\n env[k] = v\n if not script():\n log.debug(\"skipping config %s due to postmerge filter\", desc)\n continue\n yield desc, paths, yaml_complete_obj",
"def combine_configs(paths, updates):\n configs = []\n for path in paths:\n with open(path) as f:\n configs.append(yaml.load(f))\n return reduce(dict_merge, configs + [updates])",
"def setup_builders_from_config_list(builder_specs, helper,\n do_upload_render_results,\n do_upload_bench_results, builder_format):\n for builder_tuple in sorted(builder_specs):\n builder = builder_format(*builder_tuple)\n builder.create(helper, do_upload_render_results, do_upload_bench_results)",
"def merge_config_and_args(config, args):\n\n # find truthy sysconfig specifications\n sysconf = config[\"sysconfig\"]\n new_spec = {name: sysconf[name] for name in sysconf if sysconf[name]}\n\n # find truthy args specifications, overwriting config if present\n cmd = vars(args)\n new_args = {name: cmd[name] for name in cmd if cmd[name]}\n new_spec.update(new_args)\n\n # sysconfig gets updated with all truthy members of the prioritized union\n sysconf.update(new_spec)\n\n # new_args now gets the original namespace and all truthy members of the prioritized\n # union\n cmd.update(new_spec)\n new_args = argparse.Namespace(**cmd)\n\n return config, new_args",
"def merge(self, other: \"Configs\") -> \"Configs\":\n config: Configs = Configs()\n\n config.set_config(\n CONFIG_SECTIONS.ADDOPTS, [*self._addopts, *other.nbqa_addopts]\n )\n config.set_config(\n CONFIG_SECTIONS.PROCESS_CELLS,\n self._process_cells or other.nbqa_process_cells,\n )\n config.set_config(CONFIG_SECTIONS.MUTATE, self._mutate or other.nbqa_mutate)\n config.set_config(CONFIG_SECTIONS.DIFF, self._diff or other.nbqa_diff)\n config.set_config(CONFIG_SECTIONS.FILES, self._files or other.nbqa_files)\n config.set_config(CONFIG_SECTIONS.EXCLUDE, self._exclude or other.nbqa_exclude)\n return config",
"def merge(self, other: \"Configs\") -> \"Configs\":\n config: Configs = Configs()\n\n config.set_config(\n CONFIG_SECTIONS.ADDOPTS, [*self._addopts, *other.nbqa_addopts]\n )\n config.set_config(CONFIG_SECTIONS.CONFIG, self._config or other.nbqa_config)\n config.set_config(\n CONFIG_SECTIONS.IGNORE_CELLS, self._ignore_cells or other.nbqa_ignore_cells\n )\n config.set_config(CONFIG_SECTIONS.MUTATE, self._mutate or other.nbqa_mutate)\n config.set_config(CONFIG_SECTIONS.DIFF, self._diff or other.nbqa_diff)\n config.set_config(CONFIG_SECTIONS.FILES, self._files or other.nbqa_files)\n config.set_config(CONFIG_SECTIONS.EXCLUDE, self._exclude or other.nbqa_exclude)\n return config",
"def _parse_configs(self, **kwargs):\n self.config = {}\n self.input_config = {}\n self.augment_config = False\n\n for k in kwargs:\n if k == \"augment\":\n self.augment_config = kwargs[k]\n elif k in INPUT_PARAMS:\n self.input_config[k] = kwargs[k]\n else:\n self.config[k] = kwargs[k]\n\n config_path = os.path.join(self.logdir, \"config.yml\")\n config_dict = {\"model\":self.config, \"input\":self.input_config,\n \"augment\":self.augment_config}\n yaml.dump(config_dict, open(config_path, \"w\"), default_flow_style=False)",
"def merge_from_list(self, list_args):\n def xs(name, parser_args, list_args):\n \"\"\"build the generator of matching list_args\"\"\"\n for args, kwargs in list_args:\n if len(set(args) & parser_args) > 0:\n yield args, kwargs\n\n else:\n if 'dest' in kwargs:\n if kwargs['dest'] == name:\n yield args, kwargs\n\n for args, kwargs in xs(self.name, self.parser_args, list_args):\n self.merge_args(args)\n self.merge_kwargs(kwargs)",
"def merge_config_files(fnames):\n def _load_yaml(fname):\n with open(fname) as in_handle:\n config = yaml.load(in_handle)\n\n return config\n\n out = _load_yaml(fnames[0])\n for fname in fnames[1:]:\n cur = _load_yaml(fname)\n for k, v in cur.iteritems():\n if k in out and isinstance(out[k], dict):\n out[k].update(v)\n else:\n out[k] = v\n\n return out",
"def composite(configs, method=\"override\"):\n \n if method not in ['override', 'update', 'append']:\n raise ConfigError(\n \"Unrecognized composite method: \" + str(method))\n\n composite_config = Config()\n\n for config in configs:\n\n # make sure we have a config object\n if not isinstance(config, Config):\n path = config\n if not os.path.exists(path):\n continue\n config = Config.read(path)\n\n if not config:\n continue\n\n if method == \"override\":\n composite_config.override(config)\n elif method == \"append\":\n composite_config.append(config)\n else:\n composite_config.update(config)\n\n return composite_config",
"def _organize_configs(self):\n # organize learner configs\n self.learner_cfg.args = self.args\n self.learner_cfg.env_info = self.env_info\n self.learner_cfg.hyper_params = self.hyper_params\n self.learner_cfg.log_cfg = self.log_cfg\n self.learner_cfg.head.configs.state_size = self.env_info.observation_space.shape\n self.learner_cfg.head.configs.output_size = self.env_info.action_space.n\n\n # organize worker configs\n self.worker_cfg.env_info = self.env_info\n self.worker_cfg.hyper_params = self.hyper_params\n self.worker_cfg.backbone = self.learner_cfg.backbone\n self.worker_cfg.head = self.learner_cfg.head\n self.worker_cfg.loss_type = self.learner_cfg.loss_type\n\n # organize logger configs\n self.logger_cfg.args = self.args\n self.logger_cfg.env_info = self.env_info\n self.logger_cfg.log_cfg = self.log_cfg\n self.logger_cfg.comm_cfg = self.comm_cfg\n self.logger_cfg.backbone = self.learner_cfg.backbone\n self.logger_cfg.head = self.learner_cfg.head",
"def build_config(sections):\n cfg = ConfigObj()\n cfg.filename = 'foo'\n def _iter_section(cfg, section_list):\n for section_name, data, subsection_list in section_list:\n cfg[section_name] = data\n _iter_section(cfg[section_name], subsection_list)\n\n _iter_section(cfg, sections)\n return cfg",
"def make_config(self):\n if not self.search_terms:\n self.make_search_terms()\n if not self.stmts:\n self.make_gene_statements()\n config = dict()\n config['name'] = self.name\n config['human_readable_name'] = self.human_readable_name\n config['search_terms'] = [st.to_json() for st in self.search_terms]\n config['assembly'] = {\n 'belief_cutoff': 0.8,\n 'filter_ungrounded': True\n }\n if self.description:\n config['description'] = self.description\n return config",
"def _merged_args(self, auth_args=None):\n #Merge conf out of the default configuration, aliased configurations,\n #and tools configuration.\n base_dict = config.copy()\n if auth_args is None:\n base_dict = cherrypy.Tool._merged_args(self, base_dict)\n else:\n base_dict.update(auth_args)\n\n return base_dict",
"def set_attributes(self, model_1, obj_1, obj_2, overwrite=True):\n for (\n attr\n ) in (\n obj_2.traits()\n ): # Iterate through all attributes in obj_2. These should be the same traits as obj_1 assuming the precondition\n class_name = str(type(obj_2.traits()[attr])).strip(\"<>'\").split(\".\")[-1]\n # TODO: check for reactance tuples: str(obj_2.traits()[attr]._trait.klass).strip(\"<>'\").split('.')[-1] != (Int,Int,Int):\n\n if class_name == \"List\":\n phase_order = {\n \"A\": 0,\n \"B\": 1,\n \"C\": 2,\n \"N\": 3,\n } # Should only have to deal with 3 phases.\n #\n # BUG WARNING: The order of objects in the list is important and is used to determine the changes that are made\n # Try to ensure that phases are specified to avoid this problem\n # If number of elements in obj_1 is 0, all elements of obj_2 are added\n # If number of elements is the same, they are modified with a 1-1 comparison\n # If number of elements in obj_2 is < obj_1, set the first values of obj_1 as obj_2\n # If number of elements in obj_2 is > obj_1, set the all the values in obj_1 in the order they'r in obj_2 and append the extras\n # This will fail if obj_1 is (A, B, C) and obj_2 is (A, C), as it'll assign phase C to phase B.\n # This will also fail if obj_1 is (C) and obj_2 is (A,B,C) as C will have A assigned to it.\n # This will also fail if obj_1 is (A,B) and obj_2 is (A,C) as B will have C assigned to it.\n list_1 = getattr(obj_1, attr)\n list_2 = getattr(obj_2, attr)\n if list_1 is None or len(list_1) == 0:\n result_list = []\n for element in list_2:\n result_list.append(self.copy(model_1, element))\n setattr(obj_1, attr, result_list)\n continue\n elif list_2 is None or len(list_2) == 0:\n continue\n\n # Almost all Lists are of objects which have phases. Exceptions being windings, reactances and positions\n # Require the phases to be specified in both systems to modify based on phase\n has_phases = True\n for i in range(len(list_1)):\n if not (\n hasattr(list_1[0], \"phase\") and list_1[0].phase is not None\n ):\n has_phases = False\n for i in range(len(list_2)):\n if not (\n hasattr(list_2[0], \"phase\") and list_2[0].phase is not None\n ):\n has_phases = False\n if has_phases and len(list_1) > 0 and len(list_2) > 0:\n # Firstly sort the lists so they're in correct order by phase.\n list_1.sort(key=lambda x: phase_order[x.phase])\n list_2.sort(key=lambda x: phase_order[x.phase])\n list_1_phase = phase_order[list_1[0].phase]\n list_2_phase = phase_order[list_2[0].phase]\n list_1_idx = 0\n list_2_idx = 0\n while list_1_idx < len(list_1) and list_2_idx < len(list_2):\n if list_1_idx < len(list_1):\n list_1_phase = phase_order[list_1[list_1_idx].phase]\n else:\n list_1_phase = 1000000\n if list_2_idx < len(list_2):\n list_2_phase = phase_order[list_2[list_2_idx].phase]\n else:\n list_2_phase = 1000001\n\n # i.e. recurse\n if list_1_phase == list_2_phase:\n self.set_attributes(\n model_1,\n list_1[list_1_idx],\n list_2[list_2_idx],\n overwrite,\n )\n list_1_idx = list_1_idx + 1\n list_2_idx = list_2_idx + 1\n elif list_1_phase < list_2_phase:\n list_1_idx = (\n list_1_idx + 1\n ) # e.g. obj_1 = (A, B, C) and obj_2 = (B). We don't update this phase\n\n else:\n getattr(obj_1, attr).append(list_2[list_2_idx])\n list_2_idx = list_2_idx + 1\n\n elif len(list_1) == len(list_2):\n for i in range(len(list_1)):\n self.set_attributes(model_1, list_1[i], list_2[i], overwrite)\n\n elif len(list_1) > len(list_2):\n for i in range(len(list_2)):\n self.set_attributes(model_1, list_1[i], list_2[i], overwrite)\n\n else: # i.e. len(list_1) < len(list_2):\n for i in range(len(list_2)):\n if i < len(list_1):\n self.set_attributes(\n model_1, list_1[i], list_2[i], overwrite\n )\n else:\n getattr(obj_1, attr).append(list_2[i])\n\n else:\n value = getattr(obj_2, attr)\n if value is not None:\n if getattr(obj_1, attr) is not None and overwrite == False:\n continue\n setattr(obj_1, attr, value)",
"def do_configs(self, name):\n for _name, item in self._named_items(\".//configuration/property\", name):\n if item.text:\n defs = [\"-D%s\" % define.strip() for define in item.text.split(\" \") if define.strip()]\n fprint(\" \".join(defs))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Parse a caption (srt) text passed in and return a list of section numbers ordered descending by highest speech speed
|
def get_srt_section_ids(text: str) -> List[int]:
text = text.strip().split("\n\n")
sections = []
for line in text:
id, duration, text = line.split("\n")
start_time, end_time = duration.split("-->")
duration = (parse(end_time) - parse(start_time)).total_seconds()
sections.append(Section(id=id, duration=duration, text=text))
return [
int(section.id)
for section in sorted(
sections,
key=lambda s: len(s.text)/s.duration,
reverse=True
)
]
|
[
"def get_srt_section_ids(text: str) -> List[int]:\n sections = []\n\n for section in grouper(text.strip().splitlines(), 4):\n idx, duration, caption = [sec.strip() for sec in section if sec]\n idx = int(idx)\n duration = caption_duration_from_string(duration)\n sections.append(Section(idx, duration, caption))\n\n return [section.idx for section in sorted(sections, key=lambda x: x.speed)]",
"def _parse_subtitle(html_chunk):\n subtitle = html_chunk.match(\n [\"div\", {\"class\": \"comment\"}],\n \"h2\",\n [\"span\", {\"class\": \"gray\"}],\n )\n\n return get_first_content(subtitle)",
"def parse_dialog(raw_text):\n stripped = raw_text.strip()\n raw_split = (line.split(\":\", 1) for line in stripped.split(\"\\n\"))\n return [\n SpokenText(speaker=speaker.strip(), text=text.strip())\n for speaker, text in raw_split\n ]",
"def parse_content(captions_url: str) -> str:\n data = requests.get(f\"{captions_url}&fmt=json3\").json()\n\n # create a string of all the words in utf format.\n events = data['events']\n words = []\n\n for event in events: \n segments = event['segs'] if 'segs' in event.keys() else None\n if segments is not None:\n for segment in segments:\n words.append(segment['utf8'])\n \n \n words = [word for word in words if word != '\\n']\n\n return ''.join(words)",
"def make_subs_fixed(response, bin_size=3000):\n transcriptions = []\n index = 0\n results = response['results']\n\n for result in results:\n words = result['alternatives'][0].get('words', None)\n if words is None:\n continue\n\n # start/end times for result (sorted?)\n bin_start = words[0].get('startTime', 0)\n bin_end = bin_start + bin_size\n\n # index of first word in the current bin\n word_first = 0\n\n # subtitle index\n index += 1\n\n for i, w in enumerate(words):\n word_start = w.get('startTime', 0)\n word_end = w.get('endTime', 0)\n\n if word_end//1000 < bin_end//1000:\n continue\n else:\n # create transcript for bin and append it to output\n transcript = ' '.join(map(lambda w: w['word'], words[word_first:i]))\n sub_start = datetime.timedelta(milliseconds=bin_start)\n sub_end = datetime.timedelta(milliseconds=words[i-1].get('endTime', 0))\n transcriptions.append(srt.Subtitle(index, sub_start, sub_end, transcript))\n\n # increment index and update bin info\n index += 1\n word_first = i\n bin_start = word_start\n bin_end = bin_start + bin_size\n\n # create and append transcript for the last bin\n transcript = ' '.join(map(lambda w: w['word'], words[word_first:]))\n sub_start = datetime.timedelta(milliseconds=bin_start)\n sub_end = datetime.timedelta(milliseconds=words[-1].get('endTime', 0))\n transcriptions.append(srt.Subtitle(index, sub_start, sub_end, transcript))\n index += 1\n\n # turn transcription list into subtitles\n subtitles = srt.compose(transcriptions)\n return subtitles",
"def parse(self, text, fps=None):\n text = text.strip()\n text = text.replace('\\x00','')\n sublist = self._parse(text, fps)\n if len(sublist) <=1:\n raise NoSubtitlesParseError()\n return sublist",
"def extract_silence_times(text):\n text_regex = re.compile(r'silence_start: (-?\\d+\\.?\\d*)|silence_end: (\\d+\\.?\\d*)')\n splits = []\n for start, end in text_regex.findall(text):\n if start:\n if float(start) < 0:\n start = 0\n splits.append(float(start) - PADDING)\n if end:\n splits.append(float(end) + PADDING)\n\n slit_i = iter(splits)\n\n return list(zip(slit_i, slit_i))",
"def scrape_from_html(album_html):\n soup = BeautifulSoup(album_html, 'html.parser')\n header = soup.find('h2').get_text()\n content = soup.find('div', class_='lyrics')\n\n thanks = content.find('div', class_='thanks')\n if thanks:\n thanks.decompose()\n\n note = content.find('div', class_='note')\n if note:\n note.decompose()\n\n #get rid of the 'ARTIST LYRICS' thing\n regex = re.compile(r'[A-Z ]*LYRICS')\n blocks = regex.split(content.get_text())\n\n text = '*'*(len(header)+4)\n text = text + '\\n' + '* '+header+' *'\n text = text + '\\n' + '*'*(len(header)+4)\n\n for block in blocks:\n text = text + '\\n' + block\n\n return text",
"def get_all_captions(f_name: str) -> dict:\n with open(f_name, 'r') as f:\n caption_file = f.read().splitlines()\n\n # Store captions in a dictionary\n all_captions = {}\n\n for line in caption_file:\n name = re.search(\"^.*?jpg\", line).group(0)\n caption = \"<start> \" + re.search(\"\\t(.*)\", line).group(0)[1:] + \" <end>\"\n\n if name in all_captions.keys():\n all_captions[name].append(caption)\n else:\n all_captions[name] = [caption]\n\n return all_captions",
"def subtitles(strict=True):\n # max_value settings are just to avoid overflowing TIMEDELTA_MAX_DAYS by\n # using arbitrary low enough numbers.\n #\n # We also skip subs with start time >= end time, so we split them into two\n # groups to avoid overlap.\n start_timestamp_strategy = timedeltas(min_value=0, max_value=500000)\n end_timestamp_strategy = timedeltas(min_value=500001, max_value=999999)\n\n # \\r is not legal inside Subtitle.content, it should have already been\n # normalised to \\n.\n content_strategy = st.text(min_size=1).filter(lambda x: \"\\r\" not in x)\n proprietary_strategy = st.text().filter(\n lambda x: all(eol not in x for eol in \"\\r\\n\")\n )\n\n if strict:\n content_strategy = content_strategy.filter(is_strictly_legal_content)\n\n subtitle_strategy = st.builds(\n srt.Subtitle,\n index=st.integers(min_value=0),\n start=start_timestamp_strategy,\n end=end_timestamp_strategy,\n proprietary=proprietary_strategy,\n content=content_strategy,\n )\n\n return subtitle_strategy",
"def get_sound_inverse_text_freq(connection, text_id):\n units = []\n unit_proj = {\n '_id': False,\n 'tokens.features.form': True\n }\n unit_proj['tokens.features.sound'] = True\n db_cursor = connection.connection[Unit.collection].find(\n {'text': text_id, 'unit_type': 'line'},\n unit_proj\n )\n for unit in db_cursor:\n for token in unit['tokens']:\n cur_features = token['features']\n # use the sound feature index as an identifier. \n # sound feature does not need to stay connected to its word\n for cur_tindex in cur_features['sound']:\n # continually append units as each line is processed\n units.append(cur_tindex)\n # count number of times each feature member appears in text\n units_count = Counter(units)\n # Frequency is the number of times a word occurs in a text \n # divided by the total number of words in that text\n frequencies = {}\n inv_frequencies = {}\n N_text = len(units)\n for sound in units_count:\n frequencies[sound] = units_count[sound]/N_text\n inv_frequencies[sound] = 1/frequencies[sound]\n return inv_frequencies",
"def findSectionScore(soup):\n\n #wordCount and score are used for each section, totalScore and totalWords are for the entire article\n wordCount = 0\n sectionScores = {}\n currentSection = ''\n score = 0\n totalScore = 0\n totalWords = 0\n\n # we go through all of the text in the article\n for tag in soup.findAll(\"span\"):\n\n #figure out which sub section the words belong to\n newSection = tag.find_previous(text=re.compile(r'^=='))\n\n if newSection == None:\n continue\n\n #update sectionScores and totalScore if we come to the end of a section\n if newSection is not currentSection:\n if wordCount and score:\n score = score/wordCount\n sectionScores[currentSection] = score\n\n currentSection = newSection\n totalWords += wordCount\n totalScore += score *wordCount\n\n score = 0\n wordCount = 0\n\n score, wordCount = findTagScore(tag, wordCount,score)\n\n if totalWords:\n\n totalScore = totalScore/totalWords\n\n\n\n return sectionScores, totalScore",
"def tokenize_captions(captions, lang='en'):\n\n tokenizer = MosesTokenizer(lang=lang)\n regex = re.compile('[^a-zA-Z ]')\n #First parameter is the replacement, second parameter is your input string\n \n return [tokenizer.tokenize(regex.sub('', caption), return_str=True) for caption in captions]",
"def read_rttm(rttm):\n sad = []\n with open(rttm, 'r') as fin:\n speech = fin.readlines()\n for line in speech:\n _, _, _, on, off, _, _, _, _ = line.strip('\\n').split('\\t')\n try:\n sad.append((float(on), float(off)))\n except:\n pass\n return sad",
"def getToc(self):\n toc = []\n toc.extend( str(self.disc).split('\\n') )\n for trk in self._tracks:\n toc.extend( str(trk).split('\\n') )\n # expand tabs to 4 spaces, strip trailing white space on each line\n toc = [line.expandtabs(4).rstrip() for line in toc]\n return toc",
"def sentencePreProcess(path, congruency=None, beat_type=None, extraction=None, check_beat=None):\n output_list = []\n with open(path, 'r') as f: #open stimuli file as object \n rawText = f.readlines()\n\n if beat_type == 'binary' and congruency == 'congruent':\n sent_offset = 7\n elif beat_type == 'binary' and congruency == 'incongruent1':\n sent_offset = 8\n elif beat_type == 'ternary' and congruency == 'congruent':\n sent_offset = 11 \n elif beat_type == 'ternary' and congruency == 'incongruent1':\n sent_offset = 12\n elif beat_type == 'ternary' and congruency == 'incongruent2':\n sent_offset = 10\n elif congruency == 'neutral':\n sent_offset = 8\n else:\n sent_offset = None\n\n # seperate the individual words and then turn underscore into spaces\n for sent_idx, line in enumerate(rawText): # iterate over lines in raw \n sentence = line[:].replace('\\n', '') # getting rid of the line break thing\n sentence = sentence.split(' ') # splitting the sentence up by spaces\n for word_idx, word in enumerate(sentence[:]): # iterate over words\n sentence[word_idx] = word.replace('_', ' ') # cleaning off the underscore and turning it into space\n stim_data = {'sent_stim':sentence, 'beat_type':beat_type, \n 'congruency':congruency, 'extraction': extraction, 'sent_number': sent_idx, \n 'check_beat': check_beat, 'trial_type': 'main', 'sent_offset': sent_offset,}\n output_list.append(stim_data)\n\n return output_list",
"def extract_speech(audio_file, rttm_file, chunks_dir):\n \n wav_list = []\n onsets = []\n offsets = []\n\n try:\n with open(rttm_file, 'r') as rttm:\n i = 0\n for line in rttm:\n # Replace tabulations by spaces\n fields = line.replace('\\t', ' ')\n # Remove several successive spaces\n fields = ' '.join(fields.split())\n fields = fields.split(' ')\n onset, duration, activity = float(fields[3]), float(fields[4]), fields[7]\n if activity == 'speech':\n basename = os.path.basename(audio_file).split('.wav')[0]\n output = os.path.join(chunks_dir, '_'.join([basename, str(i)])+'.wav')\n cmd = ['sox', audio_file, output,\n 'trim', str(onset), str(duration)]\n subprocess.call(cmd)\n wav_list.append(output)\n onsets.append(onset)\n offsets.append(onset+duration)\n i += 1\n except IOError:\n shutil.rmtree(chunks_dir)\n sys.exit(\"Issue when extracting speech segments from wav.\")\n\n onsets = np.array(onsets)\n offsets = np.array(offsets)\n\n return wav_list, onsets, offsets",
"def _interview_text_data(self, caption):\n text_data = []\n for item_list in caption:\n text_data.append(item_list['text'])\n return ' '.join(text_data)",
"def post_process(caption: str) -> str:\n\n end_of_sentence = \"<end>\"\n sentence = caption.split(end_of_sentence)[0]\n\n return sentence"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Triggered when another instrument is selected from the combobox.
|
def on_comboBox_instrument_currentIndexChanged(self, p0):
self.instrument = self.getInstrumentFromName(p0)
if self.instrument.providesTempRange:
self.groupBox_temp.setEnabled(False)
else:
self.groupBox_temp.setEnabled(True)
|
[
"def on_pick(self, event):\r\n pass",
"def comboBoxOccasion_SelectionChanged(self, event):\n self.SelectedItem.occasion = event.GetInt()",
"def OnSelectedItemChanged(self):\n pass",
"def onPotencialChanged(self):\n self.potencial = self.potenzialDropDown.currentIndex()",
"def item_selection_changed(self):\n pass",
"def on_bell_select(self):\n print(\"on_bell_select was triggered\")",
"def comboBoxTargetAnimation_SelectionChanged(self, event):\n self.SelectedItem.animation2_id = DM.FixedIndex(event.GetInt())",
"def connect(self, *args, **kwargs):\n self.combobox.connect(*args, **kwargs)",
"def callbackFunc(event): # this function used to get selected item from the combo box and load into oid i/p box\r\n choice = quality_combo.get()\r\n choice = int((choice.strip())[0])\r\n\r\n oid.delete(0,1)\r\n oid.insert(0, choice)",
"def comboBoxUserAnimation_SelectionChanged(self, event):\n self.SelectedItem.animation1_id = DM.FixedIndex(event.GetInt())",
"def OnSelectedItemUpdated(self):\n pass",
"def comboBoxConsumable_SelectionChanged(self, event):\n self.SelectedItem.consumable = (event.GetInt() == 0)",
"def combobox_index_change_callback(self, index):\n\n layer = self.layers[index]\n band_count = layer.bandCount()\n\n # Clear all previous item\n self.dlg.comboBox_2.clear()\n # Add the number as string to the combobox.\n self.dlg.comboBox_2.addItems([str(i) for i in range(1, band_count + 1)])",
"def on_combo_view_changed(self, widget):\n model = widget.get_model()\n value = model[widget.get_active()][1]\n self.emit('view-changed', value)\n self.popdown()",
"def _instrumentRight(self, extra=None):\n self.ui.instrumentNameEntry.setText('instrument')\n self.ui.newProductButton.connect(self.ui.newProductButton,\n QtCore.SIGNAL('clicked()'),\n self._newProduct)",
"def OnSendTestCommand(self, event):\r\n name = self.m_comboBox8.GetValue()\r\n if name == 'Meter':\r\n adress = self.MeterAdress.GetValue()\r\n self.doOnSend(adress)\r\n elif name == 'Reference source (S)' :\r\n adress = self.SAdress.GetValue()\r\n self.doOnSend(adress)\r\n elif name == 'To calibrate (X)':\r\n adress = self.XAdress.GetValue()\r\n self.doOnSend(adress)\r\n else:\r\n self.m_textCtrl23.AppendText('select instrument\\n')",
"def on_vendor_selected(self, index: int):\n self.selected_vendor_index = index",
"def comboBoxParameter_SelectionChanged(self, event):\n self.SelectedItem.parameter_type = event.GetInt()",
"def option_activated(self, *args, **kwargs):\n if isinstance(self.options[self.selected], game_logic.ItemCharges) and\\\n 'stackable' in self.options[self.selected].categories and\\\n self.options[self.selected].charges > 1:\n self.director.push_scene(NumberInputScene(\n num_range=(1, self.options[self.selected].charges),\n num_start=self.options[self.selected].charges,\n title=str(self.options[self.selected]),\n callback=lambda t: self._split_stack_and_drop(t)))\n else:\n self.game.player.perform(actions.act_drop_item, self.game.player, self.options[self.selected])\n super().option_activated(*args, **kwargs)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
L{client._urljoin} preserves the fragment identifier from either the new path or the base URL respectively, as specified in the HTTP 1.1 bis draft.
|
def test_preserveFragments(self):
self.assertEqual(
client._urljoin(b"http://foo.com/bar#frag", b"/quux"),
b"http://foo.com/quux#frag",
)
self.assertEqual(
client._urljoin(b"http://foo.com/bar", b"/quux#frag2"),
b"http://foo.com/quux#frag2",
)
self.assertEqual(
client._urljoin(b"http://foo.com/bar#frag", b"/quux#frag2"),
b"http://foo.com/quux#frag2",
)
|
[
"def simple_urljoin(base, other):\n return '/'.join([base.rstrip('/'), other.lstrip('/')])",
"def unsafe_join_url_path(base, *args):\n base = \"/\" + base.lstrip(\"/\")\n for path in args:\n base = base.rstrip(\"/\") + \"/\" + path.lstrip(\"/\")\n return base",
"def BaseJoin(base, uriRef):\r\n if IsAbsolute(base):\r\n return Absolutize(uriRef, base)\r\n else:\r\n dummyscheme = 'basejoin'\r\n res = Absolutize(uriRef, '%s:%s' % (dummyscheme, base))\r\n if IsAbsolute(uriRef):\r\n # scheme will be inherited from uriRef\r\n return res\r\n else:\r\n # no scheme in, no scheme out\r\n return res[len(dummyscheme)+1:]",
"def BaseJoin(base, uriRef):\n if IsAbsolute(base):\n return Absolutize(uriRef, base)\n else:\n dummyscheme = 'basejoin'\n res = Absolutize(uriRef, '%s:%s' % (dummyscheme, base))\n if IsAbsolute(uriRef):\n # scheme will be inherited from uriRef\n return res\n else:\n # no scheme in, no scheme out\n return res[len(dummyscheme)+1:]",
"def _urljoin(left, right):\n\n # Handle the tricky case of right being a full URL\n tmp = urlparse.urlparse(right)\n if tmp.scheme or tmp.netloc:\n # Go ahead and use urlparse.urljoin()\n return urlparse.urljoin(left, right)\n\n # Check for slashes\n joincond = (left[-1:], right[:1])\n if joincond == ('/', '/'):\n # Too many, preserve only one\n return left + right[1:]\n elif '/' in joincond:\n # Just one; great!\n return left + right\n else:\n # Not enough; add one\n return left + '/' + right",
"def join(base, *parts):\n path = base\n if not parts:\n path[:0] + SEP\n for part in parts:\n if part.startswith(SEP):\n path = part\n elif not path or path.endswith(SEP):\n path += part\n else:\n path += SEP + part\n return path",
"def url_join(base_url, url, context, *args):\r\n if url_is_absolute(url):\r\n return iri_to_uri(url)\r\n elif base_url:\r\n return iri_to_uri(urljoin(base_url, url))\r\n else:\r\n LOGGER.warning('Relative URI reference without a base URI: ' + context,\r\n *args)\r\n return None",
"def nativejoin(base, path):\n return url2pathname(pathjoin(base, path))",
"def url_join(url, components):\n assert type(components) is list\n assert '\\\\' not in url, \\\n 'URL is not supposed to contain backslashes. Is this windows path? '+url\n return url + '/' + '/'.join(components)",
"def rebase_one(base, url, force_rebase):\n parsed = urlparse.urlparse(url)\n if parsed.scheme == parsed.netloc == '':\n return urlparse.urljoin(base, url)\n elif force_rebase:\n return base + url\n else:\n return url",
"def path_join(fragment, *path):\n result = os.path.join(fragment, *path)\n return os.path.normpath(result)",
"def url_pathcombine(self, href_str):\n split_urls = self.url.split('/') \n if href_str.lower().startswith('http'):\n return href_str\n elif href_str.startswith('#'):\n return ''\n else:\n current_host = ''\n if self.url.lower().__contains__('http'):\n current_host_list = split_urls[0:3]\n current_host = '/'.join(current_host_list)\n else:\n current_host = split_urls[0]\n current_root = split_urls[0:-1]\n (splitchar_num, href_path) = self._parse_href(href_str)\n new_url = self.url + \"/\" + href_path\n if splitchar_num == 0:\n new_url = \"/\".join(current_root) + \"/\" + href_path\n elif splitchar_num == 1:\n new_url = current_host + \"/\" + href_path\n elif splitchar_num == -1:\n if len(split_urls) > 2:\n new_url = \"/\".join(split_urls[0:-2]) + \"/\" + href_path\n else:\n return self.url + \"/\" + href_path\n return new_url",
"def pathjoin(path1, path2):\n u = urllib.parse.urlparse(path1)\n if u.scheme != \"\":\n return urllib.parse.urljoin(path1, path2)\n\n return _os_path_join(path1, path2)",
"def concat_url(endpoint, url):\n return \"%s/%s\" % (endpoint.rstrip(\"/\"), url.strip(\"/\"))",
"def urljoin_bytes(*atoms):\n url = b'/'.join([x for x in atoms if x])\n while b'//' in url:\n url = url.replace(b'//', b'/')\n # Special-case the final url of \"\", and return \"/\" instead.\n return url or b'/'",
"def urljoin(*atoms):\n url = '/'.join([x for x in atoms if x])\n while '//' in url:\n url = url.replace('//', '/')\n # Special-case the final url of \"\", and return \"/\" instead.\n return url or '/'",
"def test_join_url_preserves_original_url(self, r_request):\n url = Url('http://domain.com/')\n r_request = MagicMock(return_value=None)\n\n new_url = url.join('/path')\n new_url.get()\n requests.request.assert_called_with(\n 'GET',\n 'http://domain.com/path'\n )\n\n new_url = url.join('/path/')\n new_url.get()\n requests.request.assert_called_with(\n 'GET',\n 'http://domain.com/path/'\n )",
"def Absolutize(uriRef, baseUri):\r\n # Reasons to avoid using urllib.basejoin() and urlparse.urljoin():\r\n # - Both are partial implementations of long-obsolete specs.\r\n # - Both accept relative URLs as the base, which no spec allows.\r\n # - urllib.basejoin() mishandles the '' and '..' references.\r\n # - If the base URL uses a non-hierarchical or relative path,\r\n # or if the URL scheme is unrecognized, the result is not\r\n # always as expected (partly due to issues in RFC 1808).\r\n # - If the authority component of a 'file' URI is empty,\r\n # the authority component is removed altogether. If it was\r\n # not present, an empty authority component is in the result.\r\n # - '.' and '..' segments are not always collapsed as well as they\r\n # should be (partly due to issues in RFC 1808).\r\n # - Effective Python 2.4, urllib.basejoin() *is* urlparse.urljoin(),\r\n # but urlparse.urljoin() is still based on RFC 1808.\r\n\r\n # This procedure is based on the pseudocode in RFC 3986 sec. 5.2.\r\n #\r\n # ensure base URI is absolute\r\n if not baseUri:\r\n raise ValueError('baseUri is required and must be a non empty string')\r\n if not IsAbsolute(baseUri):\r\n raise ValueError('%r is not an absolute URI' % baseUri)\r\n # shortcut for the simplest same-document reference cases\r\n if uriRef == '' or uriRef[0] == '#':\r\n return baseUri.split('#')[0] + uriRef\r\n # ensure a clean slate\r\n tScheme = tAuth = tPath = tQuery = None\r\n # parse the reference into its components\r\n (rScheme, rAuth, rPath, rQuery, rFrag) = SplitUriRef(uriRef)\r\n # if the reference is absolute, eliminate '.' and '..' path segments\r\n # and skip to the end\r\n if rScheme is not None:\r\n tScheme = rScheme\r\n tAuth = rAuth\r\n tPath = RemoveDotSegments(rPath)\r\n tQuery = rQuery\r\n else:\r\n # the base URI's scheme, and possibly more, will be inherited\r\n (bScheme, bAuth, bPath, bQuery, bFrag) = SplitUriRef(baseUri)\r\n # if the reference is a net-path, just eliminate '.' and '..' path\r\n # segments; no other changes needed.\r\n if rAuth is not None:\r\n tAuth = rAuth\r\n tPath = RemoveDotSegments(rPath)\r\n tQuery = rQuery\r\n # if it's not a net-path, we need to inherit pieces of the base URI\r\n else:\r\n # use base URI's path if the reference's path is empty\r\n if not rPath:\r\n tPath = bPath\r\n # use the reference's query, if any, or else the base URI's,\r\n tQuery = rQuery is not None and rQuery or bQuery\r\n # the reference's path is not empty\r\n else:\r\n # just use the reference's path if it's absolute\r\n if rPath[0] == '/':\r\n tPath = RemoveDotSegments(rPath)\r\n # merge the reference's relative path with the base URI's path\r\n else:\r\n if bAuth is not None and not bPath:\r\n tPath = '/' + rPath\r\n else:\r\n tPath = bPath[:bPath.rfind('/')+1] + rPath\r\n tPath = RemoveDotSegments(tPath)\r\n # use the reference's query\r\n tQuery = rQuery\r\n # since the reference isn't a net-path,\r\n # use the authority from the base URI\r\n tAuth = bAuth\r\n # inherit the scheme from the base URI\r\n tScheme = bScheme\r\n # always use the reference's fragment (but no need to define another var)\r\n #tFrag = rFrag\r\n\r\n # now compose the target URI (RFC 3986 sec. 5.3)\r\n return UnsplitUriRef((tScheme, tAuth, tPath, tQuery, rFrag))",
"def Absolutize(uriRef, baseUri):\n # Reasons to avoid using urllib.basejoin() and urlparse.urljoin():\n # - Both are partial implementations of long-obsolete specs.\n # - Both accept relative URLs as the base, which no spec allows.\n # - urllib.basejoin() mishandles the '' and '..' references.\n # - If the base URL uses a non-hierarchical or relative path,\n # or if the URL scheme is unrecognized, the result is not\n # always as expected (partly due to issues in RFC 1808).\n # - If the authority component of a 'file' URI is empty,\n # the authority component is removed altogether. If it was\n # not present, an empty authority component is in the result.\n # - '.' and '..' segments are not always collapsed as well as they\n # should be (partly due to issues in RFC 1808).\n # - Effective Python 2.4, urllib.basejoin() *is* urlparse.urljoin(),\n # but urlparse.urljoin() is still based on RFC 1808.\n\n # This procedure is based on the pseudocode in RFC 3986 sec. 5.2.\n #\n # ensure base URI is absolute\n if not baseUri:\n raise ValueError('baseUri is required and must be a non empty string')\n if not IsAbsolute(baseUri):\n raise ValueError('%r is not an absolute URI' % baseUri)\n # shortcut for the simplest same-document reference cases\n if uriRef == '' or uriRef[0] == '#':\n return baseUri.split('#')[0] + uriRef\n # ensure a clean slate\n tScheme = tAuth = tPath = tQuery = None\n # parse the reference into its components\n (rScheme, rAuth, rPath, rQuery, rFrag) = SplitUriRef(uriRef)\n # if the reference is absolute, eliminate '.' and '..' path segments\n # and skip to the end\n if rScheme is not None:\n tScheme = rScheme\n tAuth = rAuth\n tPath = RemoveDotSegments(rPath)\n tQuery = rQuery\n else:\n # the base URI's scheme, and possibly more, will be inherited\n (bScheme, bAuth, bPath, bQuery, bFrag) = SplitUriRef(baseUri)\n # if the reference is a net-path, just eliminate '.' and '..' path\n # segments; no other changes needed.\n if rAuth is not None:\n tAuth = rAuth\n tPath = RemoveDotSegments(rPath)\n tQuery = rQuery\n # if it's not a net-path, we need to inherit pieces of the base URI\n else:\n # use base URI's path if the reference's path is empty\n if not rPath:\n tPath = bPath\n # use the reference's query, if any, or else the base URI's,\n tQuery = rQuery is not None and rQuery or bQuery\n # the reference's path is not empty\n else:\n # just use the reference's path if it's absolute\n if rPath[0] == '/':\n tPath = RemoveDotSegments(rPath)\n # merge the reference's relative path with the base URI's path\n else:\n if bAuth is not None and not bPath:\n tPath = '/' + rPath\n else:\n tPath = bPath[:bPath.rfind('/')+1] + rPath\n tPath = RemoveDotSegments(tPath)\n # use the reference's query\n tQuery = rQuery\n # since the reference isn't a net-path,\n # use the authority from the base URI\n tAuth = bAuth\n # inherit the scheme from the base URI\n tScheme = bScheme\n # always use the reference's fragment (but no need to define another var)\n #tFrag = rFrag\n\n # now compose the target URI (RFC 3986 sec. 5.3)\n return UnsplitUriRef((tScheme, tAuth, tPath, tQuery, rFrag))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Replace the string "HOST" in C{template} with this test's host. Byte strings Python between (and including) versions 3.0 and 3.4 cannot be formatted using C{%} or C{format} so this does a simple replace.
|
def makeURIString(self, template):
self.assertIsInstance(self.host, bytes)
self.assertIsInstance(self.uriHost, bytes)
self.assertIsInstance(template, bytes)
self.assertIn(b"HOST", template)
return template.replace(b"HOST", self.uriHost)
|
[
"def test_replace_template(self):\n template_sample = (r'a {{templatename '\n r' | accessdate={{Fecha|1993}} '\n r' |atitle=The [[real title]] }}')\n self.assertEqual(textlib.replaceExcept(template_sample, 'a', 'X',\n ['template'], site=self.site),\n 'X' + template_sample[1:])\n\n template_sample = (r'a {{templatename '\n r' | 1={{a}}2{{a}} '\n r' | 2={{a}}1{{a}} }}')\n self.assertEqual(textlib.replaceExcept(template_sample, 'a', 'X',\n ['template'], site=self.site),\n 'X' + template_sample[1:])\n\n template_sample = (r'a {{templatename '\n r' | 1={{{a}}}2{{{a}}} '\n r' | 2={{{a}}}1{{{a}}} }}')\n self.assertEqual(textlib.replaceExcept(template_sample, 'a', 'X',\n ['template'], site=self.site),\n 'X' + template_sample[1:])\n\n # sf.net bug 1575: unclosed template\n template_sample = template_sample[:-2]\n self.assertEqual(textlib.replaceExcept(template_sample, 'a', 'X',\n ['template'], site=self.site),\n 'X' + template_sample[1:])",
"def expand(template, variables=None):\r\n if variables is None:\r\n variables = {}\r\n return patterns.sub(functools.partial(_replace, variables), template)",
"def test_str_replace_syntax(self):\r\n\r\n snippet = {'str_replace': [{'template': 'Template var1 string var2'},\r\n {'params': {'var1': 'foo', 'var2': 'bar'}}]}\r\n\r\n tmpl = parser.Template(hot_tpl_empty)\r\n\r\n self.assertRaises(TypeError, self.resolve, snippet, tmpl)",
"def tmplt_replacer(template, key, what, condition, else_=\"\"):\n if condition:\n tmp = template.safe_substitute({key : what})\n else:\n tmp = template.safe_substitute({key : else_})\n return string.Template(tmp)",
"def test_str_replace_number(self):\r\n\r\n snippet = {'str_replace': {'template': 'Template number string bar',\r\n 'params': {'number': 1}}}\r\n snippet_resolved = 'Template 1 string bar'\r\n\r\n tmpl = parser.Template(hot_tpl_empty)\r\n\r\n self.assertEqual(snippet_resolved, self.resolve(snippet, tmpl))",
"def test_txt_template(self):\n self._set_zone(\"\"\"\n# default A value and TXT template\n:127.0.0.2:IP address $ is listed\n# 127.0.0.4 will use default A and TXT\n127.0.0.4\n# 127.0.0.5 will use specific A and default TXT\n127.0.0.5 :5\n# 127.0.0.6 will use specific a and no TXT\n127.0.0.6 :6:\n# 127.0.0.7 will use default A and specific TXT\n127.0.0.7 IP address $ running an open relay \n \"\"\")\n self.assertEqual(self.lookup_ip('127.0.0.4'), '127.0.0.2')\n self.assertEqual(self.lookup_ip('127.0.0.4',t='txt'), 'IP address 127.0.0.4 is listed')\n self.assertEqual(self.lookup_ip('127.0.0.5'), '127.0.0.5')\n self.assertEqual(self.lookup_ip('127.0.0.5',t='txt'), 'IP address 127.0.0.5 is listed')\n self.assertEqual(self.lookup_ip('127.0.0.6'), '127.0.0.6')\n self.assertEqual(self.lookup_ip('127.0.0.6',t='txt'), None)\n self.assertEqual(self.lookup_ip('127.0.0.7'), '127.0.0.2')\n self.assertEqual(self.lookup_ip('127.0.0.7',t='txt'), 'IP address 127.0.0.7 running an open relay')",
"def fill_template(template_text,\n context=None,\n retry=10,\n compiler_class=Compiler,\n first_exception=None,\n futurized=False,\n python_template_version='3',\n **kwargs):\n if template_text is None:\n raise TypeError(\"Template text specified as None to fill_template.\")\n if not context:\n context = kwargs\n if isinstance(python_template_version, str):\n python_template_version = packaging.version.parse(python_template_version)\n klass = Template.compile(source=template_text, compilerClass=compiler_class)\n t = klass(searchList=[context])\n try:\n return unicodify(t)\n except NotFound as e:\n if first_exception is None:\n first_exception = e\n if sys.version_info.major > 2 and python_template_version.release[0] < 3 and retry > 0:\n tb = e.__traceback__\n last_stack = traceback.extract_tb(tb)[-1]\n if last_stack.name == '<listcomp>':\n # On python 3 list, dict and set comprehensions as well as generator expressions\n # have their own local scope, which prevents accessing frame variables in cheetah.\n # We can work around this by replacing `$var` with `var`, but we only do this for\n # list comprehensions, as this has never worked for dict or set comprehensions or\n # generator expressions in Cheetah.\n var_not_found = e.args[0].split(\"'\")[1]\n replace_str = 'VFFSL(SL,\"%s\",True)' % var_not_found\n lineno = last_stack.lineno - 1\n module_code = t._CHEETAH_generatedModuleCode.splitlines()\n module_code[lineno] = module_code[lineno].replace(replace_str, var_not_found)\n module_code = \"\\n\".join(module_code)\n compiler_class = create_compiler_class(module_code)\n return fill_template(template_text=template_text,\n context=context,\n retry=retry - 1,\n compiler_class=compiler_class,\n first_exception=first_exception,\n python_template_version=python_template_version,\n )\n raise first_exception or e\n except Exception as e:\n if first_exception is None:\n first_exception = e\n if sys.version_info.major > 2 and python_template_version.release[0] < 3 and not futurized:\n # Possibly an error caused by attempting to run python 2\n # template code on python 3. Run the generated module code\n # through futurize and hope for the best.\n module_code = t._CHEETAH_generatedModuleCode\n module_code = futurize_preprocessor(module_code)\n compiler_class = create_compiler_class(module_code)\n return fill_template(template_text=template_text,\n context=context,\n retry=retry,\n compiler_class=compiler_class,\n first_exception=first_exception,\n futurized=True,\n python_template_version=python_template_version,\n )\n raise first_exception or e",
"def test_host_dot_escape_in_re(self):\n def constructor(proto_host: str) -> bytes:\n return (f'<a href=\"{proto_host}\"></a>'\n f'<img src=\"{proto_host}\"/>'\n f'<link href=\"{proto_host}\"/>').encode()\n\n self.assert_modified_html(\n self.get_source(constructor, ['ya.ru', 'yazru.ru']),\n self.get_source(constructor, ['', 'yazru.ru'])\n )",
"def materialize(template, substitutions, outfile=None):\n materialized_str = template\n for param, val in substitutions.items():\n materialized_str = re.sub(param, val, materialized_str)\n\n if outfile:\n with open(outfile, \"w\") as of:\n of.write(materialized_str)\n\n return materialized_str",
"def template_replace(data, replacement):\n\n # Make the replacement variable visible for the __eval_replace function\n global __item\n __item = replacement\n\n # Clone the data to keep the original untouched\n local_data = copy(data)\n\n # Walk through the data structure and try to replace all special strings\n if isinstance(local_data, list):\n local_data = map(\n lambda x: template_replace(x, replacement), local_data)\n elif isinstance(local_data, dict):\n for key, val in local_data.iteritems():\n local_data[key] = template_replace(val, replacement)\n elif isinstance(local_data, basestring):\n # Replace the special string by it's evaluated value\n p = re.compile(r'\\{\\[\\{\\s*(\\w+)([^}\\s]+|)\\s*\\}\\]\\}')\n local_data = p.sub(__eval_replace, local_data)\n\n return local_data",
"def expand_vdf_template(input_text, variables):\n def expand_and_quote(match):\n try:\n expansion = variables[match.group(1)]\n except KeyError:\n raise ExpansionError(\"unknown variable %s\" % (match.group(0),))\n return vdf_quote(expansion)\n\n return re.sub(r'\\$\\{(.*?)\\}', expand_and_quote, input_text)",
"def set_source_template(template):",
"def substitute_params(cfg, params):\n return Template(cfg).safe_substitute(params)",
"def parameterized_config(template) -> str:\n all_vars = get_all_expansion_variables()\n return template.format(**all_vars)",
"def replace(self, pattern, repl, *args, **kwargs):\n with decoded(self):\n self.body, c = utils.safe_subn(\n pattern, repl, self.body, *args, **kwargs\n )\n c += self.headers.replace(pattern, repl, *args, **kwargs)\n return c",
"def paste_into_template(template, dict_of_values):\n\n for key in dict_of_values.keys():\n template = template.replace(\"$\"+key, str(dict_of_values[key]))\n\n return template",
"def with_host(self, host):\n # N.B. doesn't cleanup query/fragment\n if not isinstance(host, str):\n raise TypeError(\"Invalid host type\")\n if not self.is_absolute():\n raise ValueError(\"host replacement is not allowed for relative URLs\")\n if not host:\n raise ValueError(\"host removing is not allowed\")\n val = self._val\n return URL(\n self._val._replace(\n netloc=self._make_netloc(val.username, val.password, host, val.port)\n ),\n encoded=True,\n )",
"def substitute(s, variables):\n env = variables.copy()\n def repl(m):\n try: return str(eval(m.group(1), env))\n except: return ''\n return re.sub('\\{(.+?)\\}', repl, s)",
"def ReplaceVariables(text, variables):\n for unused_prefix, var_name, value in variables:\n text = text.replace('@%s@' % var_name.upper(), str(value))\n\n return text",
"def substitute(string, substitutions):\n for key, value in substitutions:\n string = re.sub(re.escape(\"{{\" + key + \"}}\"), value, string)\n return string"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Assert that all of a L{client.URI}'s components match the expected values.
|
def assertURIEquals(
self, uri, scheme, netloc, host, port, path, params=b"", query=b"", fragment=b""
):
self.assertEqual(
(scheme, netloc, host, port, path, params, query, fragment),
(
uri.scheme,
uri.netloc,
uri.host,
uri.port,
uri.path,
uri.params,
uri.query,
uri.fragment,
),
)
|
[
"def test_check_uri(self):\n # OK\n self.assertTrue(SiteService.check_uri(\"localhost:12345\"))\n self.assertTrue(SiteService.check_uri(\"www.google.com:12345\"))\n self.assertTrue(SiteService.check_uri(\"127.0.0.1:12345\"))\n # Missing Port\n self.assertFalse(SiteService.check_uri(\"localhost:\"))\n # Missing seperator\n self.assertFalse(SiteService.check_uri(\"localhost\"))\n self.assertFalse(SiteService.check_uri(\"localhost12345\"))\n self.assertFalse(SiteService.check_uri(\"localhost@12345\"))\n # Starts with invalid char\n self.assertFalse(SiteService.check_uri(\"_localhost:12345\"))\n self.assertFalse(SiteService.check_uri(\".localhost:12345\"))\n # Non-numeric port\n self.assertFalse(SiteService.check_uri(\"localhost:bah\"))",
"def test_client_url_composition(\n opener, path: Optional[str], params: Optional[Dict[str, str]], expected: str\n) -> None:\n client = Client(opener=opener, base_url=\"https://example.com/\")\n assert client._build_request(path=path, params=params).full_url == expected",
"def test_uri(sut: SystemUnderTest, uri, response):\n\n # Test Assertion.PROTO_URI_SAFE_CHARS\n safe = safe_uri(uri)\n result = Result.PASS if safe else Result.FAIL\n msg = 'Test passed' if safe else 'URI contains one or more unsafe chars'\n sut.log(result, response.request.method, response.status_code, uri,\n Assertion.PROTO_URI_SAFE_CHARS, msg)\n\n # Test Assertion.PROTO_URI_NO_ENCODED_CHARS\n encoded = encoded_char_in_uri(uri)\n result = Result.PASS if not encoded else Result.FAIL\n msg = ('Test passed' if not encoded else\n 'URI contains one or more percent-encoded chars')\n sut.log(result, response.request.method, response.status_code, uri,\n Assertion.PROTO_URI_NO_ENCODED_CHARS, msg)\n\n # Test Assertion.PROTO_URI_RELATIVE_REFS\n result, msg = check_relative_ref(uri)\n sut.log(result, response.request.method, response.status_code, uri,\n Assertion.PROTO_URI_RELATIVE_REFS, msg)",
"def test_equality(self):\n self.assertEqual(geo_uri(\"geo:0,0,0\"), geo_uri(\"geo:0,0,0\"))\n self.assertEqual(geo_uri(\"geo:0,0,0;crs=wgs84\"), geo_uri(\"geo:0,0,0\"))\n self.assertEqual(geo_uri(\"geo:0,0,0;crs=wgs84\"), geo_uri(\"geo:0,0,0;crs=wgs84\"))\n \n self.assertEqual(geo_uri(\"geo:90,0,0\"), geo_uri(\"geo:90,0,0\"))\n self.assertEqual(geo_uri(\"geo:90,0,0\"), geo_uri(\"geo:90,-22.43,0;crs=wgs84\"))\n self.assertEqual(geo_uri(\"geo:90,0,0\"), geo_uri(\"geo:90,180,0\"))\n self.assertEqual(geo_uri(\"geo:90,0,0\"), geo_uri(\"geo:90,-180,0\"))\n self.assertEqual(geo_uri(\"geo:0,180,0\"), geo_uri(\"geo:0,-180,0\"))\n self.assertEqual(geo_uri(\"geo:27,180,0\"), geo_uri(\"geo:27,-180,0\"))\n \n self.assertEqual(geo_uri(\"geo:0,0,0;u=30\"), geo_uri(\"geo:0,0,0;u=30\"))\n self.assertEqual(geo_uri(\"geo:0,0,0;u=30\"), geo_uri(\"geo:0,0,0;u=29.9999\"))\n self.assertNotEqual(geo_uri(\"geo:0,0,0;u=30\"), geo_uri(\"geo:0,0,0\"))\n self.assertNotEqual(geo_uri(\"geo:0,0,0;u=30\"), geo_uri(\"geo:0,0;u=30\"))\n \n self.assertNotEqual(geo_uri(\"geo:0,0,0\"), geo_uri(\"geo:0,0\"))\n self.assertNotEqual(geo_uri(\"geo:0,0,0\"), geo_uri(\"geo:1,0,0\"))\n self.assertNotEqual(geo_uri(\"geo:0,0,0\"), geo_uri(\"geo:0,1,0\"))\n self.assertNotEqual(geo_uri(\"geo:0,0,0\"), geo_uri(\"geo:0,0,1\"))\n \n self.assertEqual(geo_uri(\"geo:40.685922,-111.853206,1321\"), geo_uri(\"geo:40.685922,-111.853206,1321\"))\n self.assertEqual(geo_uri(\"geo:40.685922,-111.853206\"), geo_uri(\"geo:40.685922,-111.853206\"))\n self.assertNotEqual(geo_uri(\"geo:40.685922,-111.853206,1321\"), geo_uri(\"geo:40.685922,-111.853206\"))\n \n self.assertEqual(geo_uri(\"geo:40.685,-111.85,1321\"), geo_uri(\"geo:40.685000,-111.8500,1321\"))\n \n self.assertEqual(geo_uri(\"geo:0,0,0;unknown=ab-cd\"), geo_uri(\"geo:0,0,0;unknown=ab%2dcd\"))\n self.assertNotEqual(geo_uri(\"geo:0,0,0;unknown=ab%21cd\"), geo_uri(\"geo:0,0,0\"))\n \n self.assertEqual(geo_uri(\"geo:0,0;a=1;b=2\"), geo_uri(\"geo:0,0;b=2;a=1\"))",
"def test_emptyPath(self):\n uri = self.makeURIString(b\"http://HOST/\")\n self.assertURIEquals(\n client.URI.fromBytes(uri),\n scheme=b\"http\",\n netloc=self.uriHost,\n host=self.host,\n port=80,\n path=b\"/\",\n )",
"def test_originForm(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST/foo\"))\n self.assertEqual(b\"/foo\", uri.originForm)",
"def test_valid_endpoint_uri(test_endpoint):\n\n with pytest.raises(ValueError):\n test_endpoint.uri = False",
"def assertURLEqual(self, first, second, msg=None):\n\n first_parsed = urlparse(first)\n second_parsed = urlparse(second)\n self.assertEqual(first_parsed[:3], second_parsed[:3], msg)\n\n first_qsl = sorted(parse_qsl(first_parsed.query))\n second_qsl = sorted(parse_qsl(second_parsed.query))\n self.assertEqual(first_qsl, second_qsl, msg)",
"def test_standard_uris(sut: SystemUnderTest, uri, response):\n\n if response.request.method == 'GET':\n # Test Assertion.PROTO_STD_URI_SERVICE_ROOT\n if uri == '/redfish/v1/':\n result, msg = response_is_json(uri, response)\n sut.log(result, response.request.method, response.status_code,\n uri, Assertion.PROTO_STD_URI_SERVICE_ROOT, msg)\n\n # Test Assertion.PROTO_STD_URI_VERSION\n if uri == '/redfish':\n result, msg = check_slash_redfish(uri, response)\n sut.log(result, response.request.method, response.status_code,\n uri, Assertion.PROTO_STD_URI_VERSION, msg)\n\n # Test Assertion.PROTO_STD_URIS_SUPPORTED\n if uri in ['/redfish', '/redfish/v1/', '/redfish/v1/odata']:\n result, msg = response_is_json(uri, response)\n sut.log(result, response.request.method, response.status_code,\n uri, Assertion.PROTO_STD_URIS_SUPPORTED, msg)\n if uri == '/redfish/v1/$metadata':\n result, msg = response_is_xml(uri, response)\n sut.log(result, response.request.method, response.status_code,\n uri, Assertion.PROTO_STD_URIS_SUPPORTED, msg)\n\n # Test Assertion.PROTO_STD_URI_SERVICE_ROOT_REDIRECT\n if uri == '/redfish/v1':\n result, msg = response_is_json(uri, response)\n sut.log(result, response.request.method, response.status_code,\n uri, Assertion.PROTO_STD_URI_SERVICE_ROOT_REDIRECT, msg)",
"def test_pathIterable(self):\n url = URL(path=[u'hello', u'world'])\n self.assertEqual(url.path, (u'hello', u'world'))",
"def test_identicalEqual(self):\n u = URL.fromText('http://localhost/')\n self.assertEqual(u, u)",
"def test_build_uri_slashs(self):\n iiq = insightiq_api.InsightiqApi(username='pat', password='a')\n\n value = iiq._build_uri('/someEndpoint')\n expected = 'https://localhost/someEndpoint'\n\n self.assertEqual(value, expected)",
"def test_urls_are_valid():\n for key in eio.DATA_URLS:\n dataset = eio.DATA_URLS[key]\n if not isinstance(dataset, list):\n dataset = [dataset]\n for url, name, kind in dataset:\n r = requests.get(\"http://www.example.com\")\n assert r.status_code == 200",
"def test_build_uri(self):\n iiq = insightiq_api.InsightiqApi(username='pat', password='a')\n\n value = iiq._build_uri('someEndpoint')\n expected = 'https://localhost/someEndpoint'\n\n self.assertEqual(value, expected)",
"def test_originFormNoPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"))\n self.assertEqual(b\"/\", uri.originForm)",
"def test_add_uri_ipv4() -> None: # type: ignore\n api = act.api.Act(\"\", None, \"error\")\n\n uri = \"http://127.0.0.1:8080/home\"\n\n facts = act.api.helpers.uri_facts(api, uri)\n\n assert len(facts) == 5\n assert (\n api.fact(\"componentOf\").source(\"ipv4\", \"127.0.0.1\").destination(\"uri\", uri)\n in facts\n )\n assert (\n api.fact(\"componentOf\").source(\"path\", \"/home\").destination(\"uri\", uri) in facts\n )\n assert api.fact(\"scheme\", \"http\").source(\"uri\", uri) in facts\n assert api.fact(\"basename\", \"home\").source(\"path\", \"/home\") in facts\n assert api.fact(\"port\", \"8080\").source(\"uri\", uri) in facts",
"def test_host_from_uri(self):\n self.assertEqual(host_from_uri(u'http://a.b-c.com:8080'), (u'a.b-c.com', u'8080'))\n self.assertEqual(host_from_uri(u'https://a.b.com:8080'), (u'a.b.com', u'8080'))\n self.assertEqual(host_from_uri(u'http://www.example.com'), (u'www.example.com', u'80'))\n self.assertEqual(host_from_uri(u'https://www.example.com'), (u'www.example.com', u'443'))",
"def test_invalid_uri() -> None:\n _invalid_uri = \"http://example.com/an invalid path\"\n with pytest.raises(InvalidURIError):\n _ = URI(_invalid_uri)",
"def test_similarEqual(self):\n u1 = URL.fromText('http://localhost/')\n u2 = URL.fromText('http://localhost/')\n self.assertEqual(u1, u2)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
L{client.URI.fromBytes} by default assumes port 80 for the I{http} scheme and 443 for the I{https} scheme.
|
def test_parseDefaultPort(self):
uri = client.URI.fromBytes(self.makeURIString(b"http://HOST"))
self.assertEqual(80, uri.port)
# Weird (but commonly accepted) structure uses default port.
uri = client.URI.fromBytes(self.makeURIString(b"http://HOST:"))
self.assertEqual(80, uri.port)
uri = client.URI.fromBytes(self.makeURIString(b"https://HOST"))
self.assertEqual(443, uri.port)
|
[
"def test_parseCustomDefaultPort(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"), defaultPort=5144)\n self.assertEqual(5144, uri.port)\n uri = client.URI.fromBytes(\n self.makeURIString(b\"https://HOST\"), defaultPort=5144\n )\n self.assertEqual(5144, uri.port)",
"def test_externalUnicodeInterference(self):\n goodInput = self.makeURIString(b\"http://HOST/path\")\n badInput = goodInput.decode(\"ascii\")\n urlparse(badInput)\n uri = client.URI.fromBytes(goodInput)\n self.assertIsInstance(uri.scheme, bytes)\n self.assertIsInstance(uri.host, bytes)\n self.assertIsInstance(uri.path, bytes)",
"def test_hostBracketIPv6AddressLiteral(self):\n uri = client.URI.fromBytes(b\"http://[::1]:80/index.html\")\n\n self.assertEqual(uri.host, b\"::1\")\n self.assertEqual(uri.netloc, b\"[::1]:80\")\n self.assertEqual(uri.toBytes(), b\"http://[::1]:80/index.html\")",
"def test_emptyPath(self):\n uri = self.makeURIString(b\"http://HOST/\")\n self.assertURIEquals(\n client.URI.fromBytes(uri),\n scheme=b\"http\",\n netloc=self.uriHost,\n host=self.host,\n port=80,\n path=b\"/\",\n )",
"def test_host_from_uri(self):\n self.assertEqual(host_from_uri(u'http://a.b-c.com:8080'), (u'a.b-c.com', u'8080'))\n self.assertEqual(host_from_uri(u'https://a.b.com:8080'), (u'a.b.com', u'8080'))\n self.assertEqual(host_from_uri(u'http://www.example.com'), (u'www.example.com', u'80'))\n self.assertEqual(host_from_uri(u'https://www.example.com'), (u'www.example.com', u'443'))",
"def test_originFormNoPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"))\n self.assertEqual(b\"/\", uri.originForm)",
"def test_originForm(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST/foo\"))\n self.assertEqual(b\"/foo\", uri.originForm)",
"def get_uri(host, port):\n if port:\n return '%s:%s' % (host, port)\n return host",
"def __init__(self, host, verify_ssl=True):\n self.host = host.rstrip(\"/\")\n if not self.host.startswith(\n \"https://\") and not self.host.startswith(\"http://\"):\n raise GitLabException(\"host should start with https:// or http://\")\n\n self.api_url = self.host + \"/api/v3\"\n self.verify_ssl = verify_ssl",
"def base_url(self: HTTPConnection):\n scheme = 'https' if is_secure(self) else 'http'\n port = '' if self.port == self.default_port else f':{self.port}'\n return f'{scheme}://{self.host}{port}'",
"def test_originFormEmptyPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST/\"))\n self.assertEqual(b\"/\", uri.originForm)",
"def get_uri(self):\n return self.get_protocol() + \"://\" + self.get_host() + \":\" + str(self.get_port())",
"def getUrl(host, port):\n return \"http://\" + host + \":\" + port",
"def __init__ (self, uri_or_host, port=None, path=None):\n\t\tif port is not None:\n\t\t\twarn (\n\t\t\t\t\"Please use the THttpPersist('http://host:port/path') syntax\",\n\t\t\t\tDeprecationWarning,\n\t\t\t\tstacklevel=2)\n\t\t\tself.host = uri_or_host\n\t\t\tself.port = port\n\t\t\tassert path\n\t\t\tself.path = path\n\t\t\tself.scheme = 'http'\n\t\telse:\n\t\t\tparsed = urlparse(uri_or_host)\n\t\t\tself.scheme = parsed.scheme\n\t\t\tassert self.scheme in ('http', 'https')\n\t\t\tif self.scheme == 'http':\n\t\t\t\tself.port = parsed.port or HTTP_PORT\n\t\t\telif self.scheme == 'https':\n\t\t\t\tself.port = parsed.port or HTTPS_PORT\n\t\t\tself.host = parsed.hostname\n\t\t\tself.path = parsed.path\n\t\t\tif parsed.query:\n\t\t\t\tself.path += '?%s' % parsed.query\n\t\tself._bufr = StringIO()\n\t\tself._bufw = StringIO()\n\t\tself._conn = None\n\t\tself._timeout = None\n\t\tself._headers = None",
"def http_uri(self):\n host, port = self.http_address\n return \"http://%s:%s\" % (host, port)",
"def https_uri(self):\n host, port = self.https_address\n return \"https://%s:%s\" % (host, port)",
"def _https(base_uri, *extra):\n\n parts = [str(e) for e in extra]\n str_parts = ''.join(parts)\n str_base = str(base_uri)\n\n if str_base.startswith(\"https://\"):\n return \"{0}{1}\".format(str_base, str_parts)\n\n elif str_base.startswith(\"http://\"):\n return \"{0}{1}\".format(str_base.replace(\"http:\",\"https:\", 1), str_parts)\n\n else:\n return \"https://{0}{1}\".format(str_base, str_parts)",
"def _http(base_uri, *extra):\n\n parts = [str(e) for e in extra]\n str_parts = ''.join(parts)\n str_base = str(base_uri)\n\n if str_base.startswith(\"http://\"):\n return \"{0}{1}\".format(str_base, str_parts)\n\n elif str_base.startswith(\"https://\"):\n return \"{0}{1}\".format(str_base.replace(\"https:\",\"http:\", 1), str_parts)\n\n else:\n return \"http://{0}{1}\".format(str_base, str_parts)",
"def test_portText(self):\n portURL = URL.fromText(u\"http://www.example.com:8080/\")\n self.assertEqual(portURL.port, 8080)\n self.assertEqual(portURL.asText(), u\"http://www.example.com:8080/\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
L{client.URI.fromBytes} accepts a C{defaultPort} parameter that overrides the normal default port logic.
|
def test_parseCustomDefaultPort(self):
uri = client.URI.fromBytes(self.makeURIString(b"http://HOST"), defaultPort=5144)
self.assertEqual(5144, uri.port)
uri = client.URI.fromBytes(
self.makeURIString(b"https://HOST"), defaultPort=5144
)
self.assertEqual(5144, uri.port)
|
[
"def test_parseDefaultPort(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"))\n self.assertEqual(80, uri.port)\n # Weird (but commonly accepted) structure uses default port.\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST:\"))\n self.assertEqual(80, uri.port)\n uri = client.URI.fromBytes(self.makeURIString(b\"https://HOST\"))\n self.assertEqual(443, uri.port)",
"def set_uri_port(uri, new_port):\n host, port = HostPortHelper.get_host_port(uri)\n return HostPortHelper.get_uri(host, new_port)",
"def default_port(scheme):\r\n if scheme == \"https\":\r\n return \"443\"\r\n elif scheme == \"http\":\r\n return \"80\"\r\n assert False",
"def _parse_host_and_port(uri, default_port=27017):\n if '://' not in uri:\n return uri, default_port\n\n uri = uri.split('://', 1)[1]\n\n if '/' in uri:\n uri = uri.split('/', 1)[0]\n\n # TODO(pascal): Handle replica sets better. Accessing the secondary hosts\n # should reach the same dataas the primary.\n if ',' in uri:\n uri = uri.split(',', 1)[0]\n\n if ']:' in uri:\n host, uri = uri.split(']:', 1)\n host = host + ']'\n elif ':' in uri and not uri.endswith(']'):\n host, uri = uri.split(':', 1)\n else:\n return uri, default_port\n\n if not uri:\n return uri, default_port\n\n try:\n return host, int(uri)\n except ValueError:\n raise InvalidURI('Invalid URI scheme: could not parse port \"%s\"' % uri)",
"def get_uri(host, port):\n if port:\n return '%s:%s' % (host, port)\n return host",
"def test_emptyPath(self):\n uri = self.makeURIString(b\"http://HOST/\")\n self.assertURIEquals(\n client.URI.fromBytes(uri),\n scheme=b\"http\",\n netloc=self.uriHost,\n host=self.host,\n port=80,\n path=b\"/\",\n )",
"def test_portText(self):\n portURL = URL.fromText(u\"http://www.example.com:8080/\")\n self.assertEqual(portURL.port, 8080)\n self.assertEqual(portURL.asText(), u\"http://www.example.com:8080/\")",
"def test_hostBracketIPv6AddressLiteral(self):\n uri = client.URI.fromBytes(b\"http://[::1]:80/index.html\")\n\n self.assertEqual(uri.host, b\"::1\")\n self.assertEqual(uri.netloc, b\"[::1]:80\")\n self.assertEqual(uri.toBytes(), b\"http://[::1]:80/index.html\")",
"def is_default_port(scheme, port):\r\n return default_port(scheme) == port",
"def set_uri_host(uri, new_host):\n if not new_host:\n return None\n host, port = HostPortHelper.get_host_port(uri)\n return HostPortHelper.get_uri(new_host, port)",
"def test_default_port():\n parser = create_parser()\n parsed_arguments = parser.parse_args([])\n assert parsed_arguments.port == 50000, \"Wrong port\"",
"def test_externalUnicodeInterference(self):\n goodInput = self.makeURIString(b\"http://HOST/path\")\n badInput = goodInput.decode(\"ascii\")\n urlparse(badInput)\n uri = client.URI.fromBytes(goodInput)\n self.assertIsInstance(uri.scheme, bytes)\n self.assertIsInstance(uri.host, bytes)\n self.assertIsInstance(uri.path, bytes)",
"def _make_port(self, port):\n return Port(port)",
"def with_port(self, port):\n # N.B. doesn't cleanup query/fragment\n if port is not None:\n if isinstance(port, bool) or not isinstance(port, int):\n raise TypeError(f\"port should be int or None, got {type(port)}\")\n if port < 0 or port > 65535:\n raise ValueError(f\"port must be between 0 and 65535, got {port}\")\n if not self.is_absolute():\n raise ValueError(\"port replacement is not allowed for relative URLs\")\n val = self._val\n return URL(\n self._val._replace(\n netloc=self._make_netloc(val.username, val.password, val.hostname, port)\n ),\n encoded=True,\n )",
"def create_local_xmlrpc_uri(port):\n # TODO: merge logic in roslib.xmlrpc with this routine\n # in the future we may not want to be locked to http protocol nor root path\n return 'http://%s:%s/' % (get_host_name(), port)",
"def test_host_from_uri(self):\n self.assertEqual(host_from_uri(u'http://a.b-c.com:8080'), (u'a.b-c.com', u'8080'))\n self.assertEqual(host_from_uri(u'https://a.b.com:8080'), (u'a.b.com', u'8080'))\n self.assertEqual(host_from_uri(u'http://www.example.com'), (u'www.example.com', u'80'))\n self.assertEqual(host_from_uri(u'https://www.example.com'), (u'www.example.com', u'443'))",
"def _default_endpoint(self):\n return endpoints.TCP4ServerEndpoint(reactor, self.port,\n interface=self.interface)",
"def _serverFromStringLegacy(reactor, description, default):\n nameOrPlugin, args, kw = _parseServer(description, None, default)\n if type(nameOrPlugin) is not str:\n plugin = nameOrPlugin\n return plugin.parseStreamServer(reactor, *args, **kw)\n else:\n name = nameOrPlugin\n # Chop out the factory.\n args = args[:1] + args[2:]\n return _endpointServerFactories[name](reactor, *args, **kw)",
"def split_host_port(value: str, default_port: Optional[int]) -> Tuple[str, int]:\n t = value.rsplit(':', 1)\n # If *value* contains ``:`` we consider it to be an IPv6 address, so we attempt to remove possible square brackets\n if ':' in t[0]:\n t[0] = ','.join([h.strip().strip('[]') for h in t[0].split(',')])\n t.append(str(default_port))\n return t[0], int(t[1])"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The path of a I{URI} with an empty path is C{b'/'}.
|
def test_emptyPath(self):
uri = self.makeURIString(b"http://HOST/")
self.assertURIEquals(
client.URI.fromBytes(uri),
scheme=b"http",
netloc=self.uriHost,
host=self.host,
port=80,
path=b"/",
)
|
[
"def test_originFormEmptyPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST/\"))\n self.assertEqual(b\"/\", uri.originForm)",
"def test_originFormNoPath(self):\n uri = client.URI.fromBytes(self.makeURIString(b\"http://HOST\"))\n self.assertEqual(b\"/\", uri.originForm)",
"def test_empty(self):\n self.assertEqual(URL().asText(), u'')",
"def _path_from_uri(uri):\n uri = re.sub('^(\\.\\./)*', '', uri)\n return uri",
"def _stripLeadingSlash(path: str) -> str:\n if len(path) >= 1 and path[0] == \"/\":\n return path[1:]\n else:\n return path",
"def abs_noslash(self):\n p = os.path.abspath(self)\n if p.endswith('/') and p not in ('/', '~/'):\n return p[:-1]\n return p",
"def pathfromuri(uri):\n\n\t\taddress = urlsplit(uri)\n\t\tbase = address.netloc\n\n\t\tif address.path != '':\n\n\t\t\t# remove first slash\n\t\t\tif base == '' and address.path[0:1] == '/':\n\t\t\t\tpath = address.path[1:]\n\t\t\telse:\n\t\t\t\tpath = address.path\n\n\t\t\t# don't underscore a directory type path\n\t\t\tif path[-1] == '/':\n\t\t\t\tpath = re.sub('/', '_', path[:-1])\n\t\t\telse:\n\t\t\t\tpath = re.sub('/', '_', path)\n\n\t\t\tbase += path\n\n\t\tif address.query != '':\n\t\t\tquery = re.sub('&', '-', address.query)\n\t\t\tbase += '+' + query\n\n\t\treturn base",
"def get_uri(self):\n return self.host + '/' + self.get_path().lstrip('/')",
"def _is_absolute_uri(self, uri):\n return uri.startswith(\"/\")",
"def build_url(self):\n url = super().build_url()\n if '/None/' in url:\n return url.replace('/None/', '/')\n else:\n return url",
"def build_url(self):\n url = super().build_url()\n if '/None/' in url:\n return url.replace('/None/', '')\n else:\n return url",
"def clean_uri(uri):\n # type: (str) -> str\n return re.sub(r\"\\\\/\", r\"/\", uri)",
"def test_pathurl_argument_is_skipped(self):\n f = File()\n self.assertEqual('', f.pathurl)",
"def path(self):\n path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '',\n self.charset, self.encoding_errors)\n return path.lstrip('/')",
"def urljoin_bytes(*atoms):\n url = b'/'.join([x for x in atoms if x])\n while b'//' in url:\n url = url.replace(b'//', b'/')\n # Special-case the final url of \"\", and return \"/\" instead.\n return url or b'/'",
"def to_fs_path(uri):\n # scheme://netloc/path;parameters?query#fragment\n scheme, netloc, path, _params, _query, _fragment = urlparse(uri)\n\n if netloc and path and scheme == 'file':\n # unc path: file://shares/c$/far/boo\n value = \"//{}{}\".format(netloc, path)\n\n elif RE_DRIVE_LETTER_PATH.match(path):\n # windows drive letter: file:///C:/far/boo\n value = path[1].lower() + path[2:]\n\n else:\n # Other path\n value = path\n\n if IS_WIN:\n value = value.replace('/', '\\\\')\n\n return value",
"def uri_to_path(uri):\n return path.uri_to_path(uri)",
"def _clean_path(path, path_prefix=\"\"):\n if path != path_prefix + '/' and path.endswith('/'):\n return path[:-1]\n return path",
"def GetBase(self):\n if self.baseURI is None:\n return None\n else:\n return str(self.baseURI)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.