input
stringlengths
11
7.65k
target
stringlengths
22
8.26k
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def deserialize(self, value): value = decode(value).strip() expanded = path.expand_path(value) validators.validate_required(value, self._required) validators.validate_required(expanded, self._required) if not value or expanded is None: return None return _ExpandedPath(value, expanded)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, msg): super(TransactionError, self).__init__(msg)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, branch, package): Base.__init__(self) self.branch = branch self.package = package self.sp_obj = None
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, filename, errors): """ :param filename: The name of the transaction file being replayed :param errors: a list of error classes or a string with an error description """ # store args in case someone wants to read them from a caught exception self.filename = filename if isinstance(errors, (list, tuple)): self.errors = errors else: self.errors = [errors] if filename: msg = _('The following problems occurred while replaying the transaction from file "{filename}":').format(filename=filename) else: msg = _('The following problems occurred while running a transaction:') for error in self.errors: msg += "\n " + str(error) super(TransactionReplayError, self).__init__(msg)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def parse(self): f = tempfile.NamedTemporaryFile(delete=True) cmd_str = "curl http://pkgs.fedoraproject.org/cgit/rpms/%s.git/plain/%s.spec > %s" runCommand(cmd_str % (self.package, self.package, f.name)) self.sp_obj = SpecParser(f.name) if not self.sp_obj.parse(): self.err = self.sp_obj.getError() f.close() return False f.close() return True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, filename, msg): super(IncompatibleTransactionVersionError, self).__init__(filename, msg)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def getProvides(self): """Fetch a spec file from pkgdb and get provides from all its [sub]packages""" if self.sp_obj == None: return {} return self.sp_obj.getProvides()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _check_version(version, filename): major, minor = version.split('.') try: major = int(major) except ValueError as e: raise TransactionReplayError( filename, _('Invalid major version "{major}", number expected.').format(major=major) ) try: int(minor) # minor is unused, just check it's a number except ValueError as e: raise TransactionReplayError( filename, _('Invalid minor version "{minor}", number expected.').format(minor=minor) ) if major != VERSION_MAJOR: raise IncompatibleTransactionVersionError( filename, _('Incompatible major version "{major}", supported major version is "{major_supp}".') .format(major=major, major_supp=VERSION_MAJOR) )
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def getPackageCommits(self): if self.sp_obj == None: return "" return self.sp_obj.getMacro("commit")
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def serialize_transaction(transaction): """ Serializes a transaction to a data structure that is equivalent to the stored JSON format. :param transaction: the transaction to serialize (an instance of dnf.db.history.TransactionWrapper) """ data = { "version": VERSION, } rpms = [] groups = [] environments = [] if transaction is None: return data for tsi in transaction.packages(): if tsi.is_package(): rpms.append({ "action": tsi.action_name, "nevra": tsi.nevra, "reason": libdnf.transaction.TransactionItemReasonToString(tsi.reason), "repo_id": tsi.from_repo }) elif tsi.is_group(): group = tsi.get_group() group_data = { "action": tsi.action_name, "id": group.getGroupId(), "packages": [], "package_types": libdnf.transaction.compsPackageTypeToString(group.getPackageTypes()) } for pkg in group.getPackages(): group_data["packages"].append({ "name": pkg.getName(), "installed": pkg.getInstalled(), "package_type": libdnf.transaction.compsPackageTypeToString(pkg.getPackageType()) }) groups.append(group_data) elif tsi.is_environment(): env = tsi.get_environment() env_data = { "action": tsi.action_name, "id": env.getEnvironmentId(), "groups": [], "package_types": libdnf.transaction.compsPackageTypeToString(env.getPackageTypes()) } for grp in env.getGroups(): env_data["groups"].append({ "id": grp.getGroupId(), "installed": grp.getInstalled(), "group_type": libdnf.transaction.compsPackageTypeToString(grp.getGroupType()) }) environments.append(env_data) if rpms: data["rpms"] = rpms if groups: data["groups"] = groups if environments: data["environments"] = environments return data
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__( self, base, filename="", data=None, ignore_extras=False, ignore_installed=False, skip_unavailable=False ): """ :param base: the dnf base :param filename: the filename to load the transaction from (conflicts with the 'data' argument) :param data: the dictionary to load the transaction from (conflicts with the 'filename' argument) :param ignore_extras: whether to ignore extra package pulled into the transaction :param ignore_installed: whether to ignore installed versions of packages :param skip_unavailable: whether to skip transaction packages that aren't available """ self._base = base self._filename = filename self._ignore_installed = ignore_installed self._ignore_extras = ignore_extras self._skip_unavailable = skip_unavailable if not self._base.conf.strict: self._skip_unavailable = True self._nevra_cache = set() self._nevra_reason_cache = {} self._warnings = [] if filename and data: raise ValueError(_("Conflicting TransactionReplay arguments have been specified: filename, data")) elif filename: self._load_from_file(filename) else: self._load_from_data(data)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _load_from_file(self, fn): self._filename = fn with open(fn, "r") as f: try: replay_data = json.load(f) except json.decoder.JSONDecodeError as e: raise TransactionReplayError(fn, str(e) + ".") try: self._load_from_data(replay_data) except TransactionError as e: raise TransactionReplayError(fn, e)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _load_from_data(self, data): self._replay_data = data self._verify_toplevel_json(self._replay_data) self._rpms = self._replay_data.get("rpms", []) self._assert_type(self._rpms, list, "rpms", "array") self._groups = self._replay_data.get("groups", []) self._assert_type(self._groups, list, "groups", "array") self._environments = self._replay_data.get("environments", []) self._assert_type(self._environments, list, "environments", "array")
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _raise_or_warn(self, warn_only, msg): if warn_only: self._warnings.append(msg) else: raise TransactionError(msg)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _assert_type(self, value, t, id, expected): if not isinstance(value, t): raise TransactionError(_('Unexpected type of "{id}", {exp} expected.').format(id=id, exp=expected))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _verify_toplevel_json(self, replay_data): fn = self._filename if "version" not in replay_data: raise TransactionReplayError(fn, _('Missing key "{key}".'.format(key="version"))) self._assert_type(replay_data["version"], str, "version", "string") _check_version(replay_data["version"], fn)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _replay_pkg_action(self, pkg_data): try: action = pkg_data["action"] nevra = pkg_data["nevra"] repo_id = pkg_data["repo_id"] reason = libdnf.transaction.StringToTransactionItemReason(pkg_data["reason"]) except KeyError as e: raise TransactionError( _('Missing object key "{key}" in an rpm.').format(key=e.args[0]) ) except IndexError as e: raise TransactionError( _('Unexpected value of package reason "{reason}" for rpm nevra "{nevra}".') .format(reason=pkg_data["reason"], nevra=nevra) ) subj = hawkey.Subject(nevra) parsed_nevras = subj.get_nevra_possibilities(forms=[hawkey.FORM_NEVRA]) if len(parsed_nevras) != 1: raise TransactionError(_('Cannot parse NEVRA for package "{nevra}".').format(nevra=nevra)) parsed_nevra = parsed_nevras[0] na = "%s.%s" % (parsed_nevra.name, parsed_nevra.arch) query_na = self._base.sack.query().filter(name=parsed_nevra.name, arch=parsed_nevra.arch) epoch = parsed_nevra.epoch if parsed_nevra.epoch is not None else 0 query = query_na.filter(epoch=epoch, version=parsed_nevra.version, release=parsed_nevra.release) # In case the package is found in the same repo as in the original # transaction, limit the query to that plus installed packages. IOW # remove packages with the same NEVRA in case they are found in # multiple repos and the repo the package came from originally is one # of them. # This can e.g. make a difference in the system-upgrade plugin, in case # the same NEVRA is in two repos, this makes sure the same repo is used # for both download and upgrade steps of the plugin. if repo_id: query_repo = query.filter(reponame=repo_id) if query_repo: query = query_repo.union(query.installed()) if not query: self._raise_or_warn(self._skip_unavailable, _('Cannot find rpm nevra "{nevra}".').format(nevra=nevra)) return # a cache to check no extra packages were pulled into the transaction if action != "Reason Change": self._nevra_cache.add(nevra) # store reasons for forward actions and "Removed", the rest of the # actions reasons should stay as they were determined by the transaction if action in ("Install", "Upgrade", "Downgrade", "Reinstall", "Removed"): self._nevra_reason_cache[nevra] = reason if action in ("Install", "Upgrade", "Downgrade"): if action == "Install" and query_na.installed() and not self._base._get_installonly_query(query_na): self._raise_or_warn(self._ignore_installed, _('Package "{na}" is already installed for action "{action}".').format(na=na, action=action)) sltr = dnf.selector.Selector(self._base.sack).set(pkg=query) self._base.goal.install(select=sltr, optional=not self._base.conf.strict) elif action == "Reinstall": query = query.available() if not query: self._raise_or_warn(self._skip_unavailable, _('Package nevra "{nevra}" not available in repositories for action "{action}".') .format(nevra=nevra, action=action)) return sltr = dnf.selector.Selector(self._base.sack).set(pkg=query) self._base.goal.install(select=sltr, optional=not self._base.conf.strict) elif action in ("Upgraded", "Downgraded", "Reinstalled", "Removed", "Obsoleted"): query = query.installed() if not query: self._raise_or_warn(self._ignore_installed, _('Package nevra "{nevra}" not installed for action "{action}".').format(nevra=nevra, action=action)) return # erasing the original version (the reverse part of an action like # e.g. upgrade) is more robust, but we can't do it if # skip_unavailable is True, because if the forward part of the # action is skipped, we would simply remove the package here if not self._skip_unavailable or action == "Removed": for pkg in query: self._base.goal.erase(pkg, clean_deps=False) elif action == "Reason Change": self._base.history.set_reason(query[0], reason) else: raise TransactionError( _('Unexpected value of package action "{action}" for rpm nevra "{nevra}".') .format(action=action, nevra=nevra) )
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _create_swdb_group(self, group_id, pkg_types, pkgs): comps_group = self._base.comps._group_by_id(group_id) if not comps_group: self._raise_or_warn(self._skip_unavailable, _("Group id '%s' is not available.") % group_id) return None swdb_group = self._base.history.group.new(group_id, comps_group.name, comps_group.ui_name, pkg_types) try: for pkg in pkgs: name = pkg["name"] self._assert_type(name, str, "groups.packages.name", "string") installed = pkg["installed"] self._assert_type(installed, bool, "groups.packages.installed", "boolean") package_type = pkg["package_type"] self._assert_type(package_type, str, "groups.packages.package_type", "string") try: swdb_group.addPackage(name, installed, libdnf.transaction.stringToCompsPackageType(package_type)) except libdnf.error.Error as e: raise TransactionError(str(e)) except KeyError as e: raise TransactionError( _('Missing object key "{key}" in groups.packages.').format(key=e.args[0]) ) return swdb_group
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _swdb_group_install(self, group_id, pkg_types, pkgs): swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.install(swdb_group)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _swdb_group_upgrade(self, group_id, pkg_types, pkgs): if not self._base.history.group.get(group_id): self._raise_or_warn( self._ignore_installed, _("Group id '%s' is not installed.") % group_id) return swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.upgrade(swdb_group)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _swdb_group_remove(self, group_id, pkg_types, pkgs): if not self._base.history.group.get(group_id): self._raise_or_warn(self._ignore_installed, _("Group id '%s' is not installed.") % group_id) return swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.remove(swdb_group)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _create_swdb_environment(self, env_id, pkg_types, groups): comps_env = self._base.comps._environment_by_id(env_id) if not comps_env: self._raise_or_warn(self._skip_unavailable, _("Environment id '%s' is not available.") % env_id) return None swdb_env = self._base.history.env.new(env_id, comps_env.name, comps_env.ui_name, pkg_types) try: for grp in groups: id = grp["id"] self._assert_type(id, str, "environments.groups.id", "string") installed = grp["installed"] self._assert_type(installed, bool, "environments.groups.installed", "boolean") group_type = grp["group_type"] self._assert_type(group_type, str, "environments.groups.group_type", "string") try: group_type = libdnf.transaction.stringToCompsPackageType(group_type) except libdnf.error.Error as e: raise TransactionError(str(e)) if group_type not in ( libdnf.transaction.CompsPackageType_MANDATORY, libdnf.transaction.CompsPackageType_OPTIONAL ): raise TransactionError( _('Invalid value "{group_type}" of environments.groups.group_type, ' 'only "mandatory" or "optional" is supported.' ).format(group_type=grp["group_type"]) ) swdb_env.addGroup(id, installed, group_type) except KeyError as e: raise TransactionError( _('Missing object key "{key}" in environments.groups.').format(key=e.args[0]) ) return swdb_env
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _swdb_environment_install(self, env_id, pkg_types, groups): swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.install(swdb_env)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _swdb_environment_upgrade(self, env_id, pkg_types, groups): if not self._base.history.env.get(env_id): self._raise_or_warn(self._ignore_installed,_("Environment id '%s' is not installed.") % env_id) return swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.upgrade(swdb_env)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _swdb_environment_remove(self, env_id, pkg_types, groups): if not self._base.history.env.get(env_id): self._raise_or_warn(self._ignore_installed, _("Environment id '%s' is not installed.") % env_id) return swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.remove(swdb_env)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_data(self): """ :returns: the loaded data of the transaction """ return self._replay_data
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_warnings(self): """ :returns: an array of warnings gathered during the transaction replay """ return self._warnings
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def run(self): """ Replays the transaction. """ fn = self._filename errors = [] for pkg_data in self._rpms: try: self._replay_pkg_action(pkg_data) except TransactionError as e: errors.append(e) for group_data in self._groups: try: action = group_data["action"] group_id = group_data["id"] try: pkg_types = libdnf.transaction.stringToCompsPackageType(group_data["package_types"]) except libdnf.error.Error as e: errors.append(TransactionError(str(e))) continue if action == "Install": self._swdb_group_install(group_id, pkg_types, group_data["packages"]) elif action == "Upgrade": self._swdb_group_upgrade(group_id, pkg_types, group_data["packages"]) elif action == "Removed": self._swdb_group_remove(group_id, pkg_types, group_data["packages"]) else: errors.append(TransactionError( _('Unexpected value of group action "{action}" for group "{group}".') .format(action=action, group=group_id) )) except KeyError as e: errors.append(TransactionError( _('Missing object key "{key}" in a group.').format(key=e.args[0]) )) except TransactionError as e: errors.append(e) for env_data in self._environments: try: action = env_data["action"] env_id = env_data["id"] try: pkg_types = libdnf.transaction.stringToCompsPackageType(env_data["package_types"]) except libdnf.error.Error as e: errors.append(TransactionError(str(e))) continue if action == "Install": self._swdb_environment_install(env_id, pkg_types, env_data["groups"]) elif action == "Upgrade": self._swdb_environment_upgrade(env_id, pkg_types, env_data["groups"]) elif action == "Removed": self._swdb_environment_remove(env_id, pkg_types, env_data["groups"]) else: errors.append(TransactionError( _('Unexpected value of environment action "{action}" for environment "{env}".') .format(action=action, env=env_id) )) except KeyError as e: errors.append(TransactionError( _('Missing object key "{key}" in an environment.').format(key=e.args[0]) )) except TransactionError as e: errors.append(e) if errors: raise TransactionReplayError(fn, errors)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def has_ext_modules(self): return True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None): self.pexpect = pexpect if not pexpectObject else pexpectObject self.debug = debug self.trace = trace self.host = host self._port = port self._connection = None self.modeList = [] self._log = log self._bufferedCommands = None self._bufferedMode = None self._loginUser = loginUser self._resetExpect()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def finalize_options(self): ret = InstallCommandBase.finalize_options(self) self.install_headers = os.path.join(self.install_purelib, 'tensorflow', 'include') return ret
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __del__(self): self.closeCliConnectionTo()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def initialize_options(self): self.install_dir = None self.force = 0 self.outfiles = []
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def showOutputOnScreen(self): self.debug = True self.trace = True self._log = None self._setupLog()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def finalize_options(self): self.set_undefined_options('install', ('install_headers', 'install_dir'), ('force', 'force'))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def connectWithSsh(self): self._debugLog("Establishing connection to " + self.host) self._connection = self.pexpect.spawn( 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' % (self._loginUser.username, self.host, self._port)) if self._connection is None: raise Exception("Unable to connect via SSH perhaps wrong IP!") self._secure = True self._setupLog() self._loginUser.commandLine(self) self.modeList = [self._loginUser]
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def mkdir_and_copy_file(self, header): install_dir = os.path.join(self.install_dir, os.path.dirname(header)) # Get rid of some extra intervening directories so we can have fewer # directories for -I install_dir = re.sub('/google/protobuf_archive/src', '', install_dir) # Copy eigen code into tensorflow/include. # A symlink would do, but the wheel file that gets created ignores # symlink within the directory hierarchy. # NOTE(keveman): Figure out how to customize bdist_wheel package so # we can do the symlink. if 'external/eigen_archive/' in install_dir: extra_dir = install_dir.replace('external/eigen_archive', '') if not os.path.exists(extra_dir): self.mkpath(extra_dir) self.copy_file(header, extra_dir) if not os.path.exists(install_dir): self.mkpath(install_dir) return self.copy_file(header, install_dir)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def resetLoggingTo(self, log): self._connection.logfile = log
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def run(self): hdrs = self.distribution.headers if not hdrs: return self.mkpath(self.install_dir) for header in hdrs: (out, _) = self.mkdir_and_copy_file(header) self.outfiles.append(out)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def write(self, s): sys.stdout.buffer.write(s)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_inputs(self): return self.distribution.headers or []
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def flush(self): sys.stdout.flush()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_outputs(self): return self.outfiles
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def loginSsh(self): self._setupLog() self._debugLog("Login in as "+self._loginUser.username) try: self._loginUser.sendPassword() return True except Exception as e: self.forceCloseCliConnectionTo() raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def find_files(pattern, root): """Return all the files matching pattern below root dir.""" for path, _, files in os.walk(root): for filename in fnmatch.filter(files, pattern): yield os.path.join(path, filename)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _exit_modes_beyond(self, thisMode): if not self.modeList: return while len(self.modeList) > thisMode + 1: self.modeList.pop().exit()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def exitMode(self, mode): if mode in self.modeList: self.modeList.remove(mode)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def check_prereq(self, prereqMode = 0): self._exit_modes_beyond(prereqMode) if len(self.modeList) <= prereqMode: raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def execute_as(self, user): self.check_prereq(self.LOGGED_IN) self._exit_modes_beyond(self.LOGGED_IN) user.commandLine(self) user.login() self.modeList.append(user) return user
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def closeCliConnectionTo(self): if self._connection == None: return self._exit_modes_beyond(-1) self.modeList = [] self._debugLog("Exited all modes.") self.forceCloseCliConnectionTo()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def forceCloseCliConnectionTo(self): self.modeList = None if self._connection: self._debugLog("Closing connection.") self._connection.close() self._connection = None
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _debugLog(self, message): if self.debug: print(message)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _resetExpect(self): self.previousExpectLine = "" if self._connection is not None and isinstance(self._connection.buffer, str): self.previousExpectLine = self._connection.buffer self._connection.buffer = ""
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _lastExpect(self): constructLine = self.previousExpectLine if self._connection is not None and isinstance(self._connection.before, str): constructLine += self._connection.before if self._connection is not None and isinstance(self._connection.after, str): constructLine += self._connection.after return constructLine
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def send(self, command): if self._bufferedCommands is None: self._bufferedCommands = command else: self._bufferedCommands += "\n" + command
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def flush(self): if self._bufferedCommands is None: return self._connection.sendline(str(self._bufferedCommands)) self._bufferedCommands = None
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def buffering(self): return self._bufferedMode
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def bufferedMode(self, mode = True): if mode is None: self.flush() self._bufferedMode = mode
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def create_node(self, *args, **kwargs): nbparam = len(args) + len(kwargs) assert nbparam in (0, len(Fields)), \ "Bad argument number for {}: {}, expecting {}".\ format(Name, nbparam, len(Fields)) self._fields = Fields self._attributes = Attributes for argname, argval in zip(self._fields, args): setattr(self, argname, argval) for argname, argval in kwargs.items(): assert argname in Fields, \ "Invalid Keyword argument for {}: {}".format(Name, argname) setattr(self, argname, argval)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def parse(*args, **kwargs): return ast_to_gast(_ast.parse(*args, **kwargs))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def literal_eval(node_or_string): if isinstance(node_or_string, AST): node_or_string = gast_to_ast(node_or_string) return _ast.literal_eval(node_or_string)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_dpo_proto(addr): if ip_address(addr).version == 6: return DpoProto.DPO_PROTO_IP6 else: return DpoProto.DPO_PROTO_IP4
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, addr): self.addr = addr self.ip_addr = ip_address(text_type(self.addr))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def encode(self): if self.version == 6: return {'ip6': self.ip_addr} else: return {'ip4': self.ip_addr}
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def version(self): return self.ip_addr.version
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def address(self): return self.addr
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def length(self): return self.ip_addr.max_prefixlen
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def bytes(self): return self.ip_addr.packed
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __eq__(self, other): if isinstance(other, self.__class__): return self.ip_addr == other.ip_addr elif hasattr(other, "ip4") and hasattr(other, "ip6"): # vl_api_address_union_t if 4 == self.version: return self.ip_addr == other.ip4 else: return self.ip_addr == other.ip6 else: raise Exception("Comparing VppIpAddressUnions:%s" " with incomparable type: %s", self, other)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __ne__(self, other): return not (self == other)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __str__(self): return str(self.ip_addr)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, saddr, gaddr, glen): self.saddr = saddr self.gaddr = gaddr self.glen = glen if ip_address(self.saddr).version != \ ip_address(self.gaddr).version: raise ValueError('Source and group addresses must be of the ' 'same address family.')
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def encode(self): return { 'af': ip_address(self.gaddr).vapi_af, 'grp_address': { ip_address(self.gaddr).vapi_af_name: self.gaddr }, 'src_address': { ip_address(self.saddr).vapi_af_name: self.saddr }, 'grp_address_length': self.glen, }
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def length(self): return self.glen
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def version(self): return ip_address(self.gaddr).version
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __str__(self): return "(%s,%s)/%d" % (self.saddr, self.gaddr, self.glen)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __eq__(self, other): if isinstance(other, self.__class__): return (self.glen == other.glen and self.saddr == other.gaddr and self.saddr == other.saddr) elif (hasattr(other, "grp_address_length") and hasattr(other, "grp_address") and hasattr(other, "src_address")): # vl_api_mprefix_t if 4 == self.version: return (self.glen == other.grp_address_length and self.gaddr == str(other.grp_address.ip4) and self.saddr == str(other.src_address.ip4)) else: return (self.glen == other.grp_address_length and self.gaddr == str(other.grp_address.ip6) and self.saddr == str(other.src_address.ip6)) return NotImplemented
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, test, policer_index, is_ip6=False): self._test = test self._policer_index = policer_index self._is_ip6 = is_ip6
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def add_vpp_config(self): self._test.vapi.ip_punt_police(policer_index=self._policer_index, is_ip6=self._is_ip6, is_add=True)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def remove_vpp_config(self): self._test.vapi.ip_punt_police(policer_index=self._policer_index, is_ip6=self._is_ip6, is_add=False)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, test, rx_index, tx_index, nh_addr): self._test = test self._rx_index = rx_index self._tx_index = tx_index self._nh_addr = ip_address(nh_addr)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def encode(self): return {"rx_sw_if_index": self._rx_index, "tx_sw_if_index": self._tx_index, "nh": self._nh_addr}
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def add_vpp_config(self): self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=True) self._test.registry.register(self, self._test.logger)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def remove_vpp_config(self): self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=False)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_vpp_config(self): is_ipv6 = True if self._nh_addr.version == 6 else False return self._test.vapi.ip_punt_redirect_dump( sw_if_index=self._rx_index, is_ipv6=is_ipv6)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def query_vpp_config(self): if self.get_vpp_config(): return True return False
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, test, nh, pmtu, table_id=0): self._test = test self.nh = nh self.pmtu = pmtu self.table_id = table_id
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def add_vpp_config(self): self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh, 'table_id': self.table_id, 'path_mtu': self.pmtu}) self._test.registry.register(self, self._test.logger) return self
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def modify(self, pmtu): self.pmtu = pmtu self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh, 'table_id': self.table_id, 'path_mtu': self.pmtu}) return self
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def remove_vpp_config(self): self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh, 'table_id': self.table_id, 'path_mtu': 0})
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def query_vpp_config(self): ds = list(self._test.vapi.vpp.details_iter( self._test.vapi.ip_path_mtu_get)) for d in ds: if self.nh == str(d.pmtu.nh) \ and self.table_id == d.pmtu.table_id \ and self.pmtu == d.pmtu.path_mtu: return True return False
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def object_id(self): return ("ip-path-mtu-%d-%s-%d" % (self.table_id, self.nh, self.pmtu))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def gaussian(data, mean, covariance): """! @brief Calculates gaussian for dataset using specified mean (mathematical expectation) and variance or covariance in case multi-dimensional data.
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, sample, amount): """! @brief Constructs EM initializer.
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def initialize(self, init_type = ema_init_type.KMEANS_INITIALIZATION): """! @brief Calculates initial parameters for EM algorithm: means and covariances using specified strategy.
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __calculate_initial_clusters(self, centers): """! @brief Calculate Euclidean distance to each point from the each cluster. @brief Nearest points are captured by according clusters and as a result clusters are updated.
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __calculate_initial_covariances(self, initial_clusters): covariances = [] for initial_cluster in initial_clusters: if len(initial_cluster) > 1: cluster_sample = [self.__sample[index_point] for index_point in initial_cluster] covariances.append(numpy.cov(cluster_sample, rowvar=False)) else: dimension = len(self.__sample[0]) covariances.append(numpy.zeros((dimension, dimension)) + random.random() / 10.0)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __initialize_random(self): initial_means = []
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __initialize_kmeans(self): initial_centers = kmeans_plusplus_initializer(self.__sample, self.__amount).initialize() kmeans_instance = kmeans(self.__sample, initial_centers, ccore = True) kmeans_instance.process()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self): """! @brief Initializes EM observer.