query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
The following function is used to format the numbers. In the beginning "th, st, nd, rd" are removed
def clean_numbers(self, x): # remove "th" after a number matches = re.findall(r'\b\d+\s*th\b', x) if len(matches) != 0: x = re.sub(r'\s*th\b', " ", x) # remove "rd" after a number matches = re.findall(r'\b\d+\s*rd\b', x) if len(matches) != 0: x = re.sub(r'\s*rd\b', " ", x) # remove "st" after a number matches = re.findall(r'\b\d+\s*st\b', x) if len(matches) != 0: x = re.sub(r'\s*st\b', " ", x) # remove "nd" after a number matches = re.findall(r'\b\d+\s*nd\b', x) if len(matches) != 0: x = re.sub(r'\s*nd\b', " ", x) # replace standalone numbers higher than 10 by # # this function does not touch numbers linked to words like "G-20" matches = re.findall(r'^\d+\s+|\s+\d+\s+|\s+\d+$', x) if len(matches) != 0: x = re.sub('^[0-9]{5,}\s+|\s+[0-9]{5,}\s+|\s+[0-9]{5,}$', ' ##### ', x) x = re.sub('^[0-9]{4}\s+|\s+[0-9]{4}\s+|\s+[0-9]{4}$', ' #### ', x) x = re.sub('^[0-9]{3}\s+|\s+[0-9]{3}\s+|\s+[0-9]{3}$', ' ### ', x) x = re.sub('^[0-9]{2}\s+|\s+[0-9]{2}\s+|\s+[0-9]{2}$', ' ## ', x) # we do include the range from 1 to 10 as all word-vectors include them # x = re.sub('[0-9]{1}', '#', x) return x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _format_numbers(smth: any) -> any:\n if isinstance(smth, int):\n return float(smth)\n elif smth == 'N.V.':\n return 0.0 # meaning, wine is of type 'non-vintage' and is made of grapes from more than one harvest\n else:\n return smth", "def ordinal_filter(value):\n digit = value % 10\n if 10 < value < 20:\n o = 'th'\n elif digit is 1:\n o = 'st'\n elif digit is 2:\n o = 'nd'\n elif digit is 3:\n o = 'rd'\n else:\n o = 'th'\n return '%d%s' % (value, o)", "def format(number):\n number = compact(number)\n return '-'.join([\n number[:2],\n number[2:6],\n number[6:13],\n number[13:]])", "def format(number):\n number = compact(number)\n return ' '.join((number[:2], number[2:5], number[5:8], number[8:]))", "def transform(s):\r\n return 'digit ' + str(s)", "def numerize():\n pass", "def thou(n):\n if pthou:\n return \"{:,d}\".format(n)\n return \"{:d}\".format(n)", "def format_number(separator, n):\n n_s = str(n)\n if len(n_s) <= 3:\n return n_s\n else:\n upper = n_s[:-3]\n lower = n_s[-3:]\n return format_number(separator, upper) + separator + lower", "def _remove_digit_blocks(self, text: str) -> str:\n return re.sub(r\"\\b\\d+\\b\", \" \", str(text))", "def transform(s):\n return 'digit ' + str(s)", "def filter_format_number(val, places: Optional[int] = None, grouping: bool = True) -> str:\n if not isinstance(val, (int, float)):\n return val\n if places is not None:\n format_str = f'%.{places}f'\n elif isinstance(val, int):\n format_str = '%d'\n else:\n format_str = '%.02f'\n\n locale.setlocale(locale.LC_ALL, '')\n return locale.format_string(format_str, val, grouping)", "def remove_nums(self, text):\r\n return text.translate(None, digits)", "def remove_nums(self, text):\r\n return text.translate(None, digits)", "def remove_nums(self, text):\r\n return text.translate(None, digits)", "def _num2str(self, num):\n q, mod = divmod(num, 10)\n suffix = \"th\" if q == 1 else self.SUFFIX_DICT[mod]\n return f\"{num}{suffix}\"", "def clean_numbers(text):\n return regex.sub(\"\\d+\", ' NUM', text)", "def tweet_clean_numbers(word):\n if not re.search(r'[0-9]+', word):\n return word\n if len(word)==4 and re.search(r'[0-9]{4}', word) and 1900 < int(word) < 2019:\n return word\n word = re.sub(r'^([0-9]|[\\+\\-%/\\*\\.:])+[0-9%/\\+\\*\\.x:]*$', '<number>', word)\n return word", "def replace_numbers(words):\n p = inflect.engine()\n new_words = []\n for word in words:\n if word.isdigit():\n new_word = p.number_to_words(word)\n new_words.append(new_word)\n else:\n new_words.append(word)\n return ' '.join(new_words)", "def replace_numbers(words):\n p = inflect.engine()\n new_words = []\n for word in words:\n if word.isdigit():\n new_word = p.number_to_words(word)\n new_words.append(new_word)\n else:\n new_words.append(word)\n return ' '.join(new_words)", "def textualize(num):\n if isinstance(num, float):\n num = int(num)\n # special case\n if num == 0:\n return 'zero'\n\n # if the number is negative, we put the word\n # 'negative' in front of it.\n is_negative = False\n if num < 0:\n is_negative = True\n num = -1 * num\n\n num = str(num)\n # pad with zeroes\n while len(num) % 3 != 0:\n num = ''.join([ '0', num ])\n\n # as groups are textualized, their strings will be\n # appended to this list\n num_string = []\n group_counter = 0\n while len(num) > 0:\n group = num[-3:]\n num = num[:-3]\n text = _textualize_group(group)\n\n # thousand, million, etc.\n if group_counter > 0 and text:\n group_name = group_names[group_counter]\n text = ' '.join([ text, group_name ])\n\n if text:\n num_string.insert(0, text)\n\n group_counter += 1\n\n if is_negative:\n num_string.insert(0, 'negative')\n\n return ' '.join(num_string)", "def suffix(d): \n return \"th\" if 11<=d<=13 else {1:\"st\",2:\"nd\",3:\"rd\"}.get(d%10, \"th\")", "def remove_numbers_fun(self):\n self.doc = re.sub(\"[0-9]\", \"\", self.doc)", "def format_number(num):\n result = \" \" + str(num) + \" \"\n if num < 10:\n result = result + \" \"\n return result", "def strip_numbers(s):\n if s:\n s = u' '.join([x for x in s.split(' ') if not x.isdigit()])\n return s", "def compact(number):\n number = clean(number, ' ').upper().strip()\n if number.startswith('AL'):\n number = number[2:]\n if number.startswith('(AL)'):\n number = number[4:]\n return number", "def compact(number):\n return clean(number, ' -./,').strip()", "def scinotation(self, num):\n num = num.replace(\"D\", \"e\")\n return f\"{decimal.Decimal(num):.9e}\"", "def _remove_digits(self, text: str) -> str:\n return re.sub(r\"\\d+\", \" \", str(text))", "def formatted_number(number):\n try:\n number = int(number)\n if number < 0:\n return '-' + formatted_number(-number)\n result = ''\n while number >= 1000:\n number, number2 = divmod(number, 1000)\n result = \",%03d%s\" % (number2, result)\n return \"%d%s\" % (number, result)\n except Exception:\n return \"\"", "def formatter(t: tuple):\n s = 'The {} numbers are: ' + '{}, '*(len(t)-1) + '{}'\n return s.format(len(t),*t)", "def formatter(text):\n repl_map = {\n \"degC\": \"$^o$C\",\n \"K\": \"$^o$C\",\n \"month-1\": \"month$^{{-1}}$\",\n \"day-1\": \"day$^{{-1}}$\",\n \"d-1\": \"day$^{{-1}}$\",\n \"decade-1\": \"decade$^{{-1}}$\",\n \"year-1\": \"year$^{{-1}}$\",\n \"rcp85\": \"RCP8.5\",\n \"rcp45\": \"RCP4.5\",\n \"rcp26\": \"RCP2.6\",\n \"RCP85\": \"RCP8.5\",\n \"RCP45\": \"RCP4.5\",\n \"RCP26\": \"RCP2.6\",\n \"cmip5-85\": \"RCP8.5\",\n \"cmip5-60\": \"RCP6.0\",\n \"cmip5-45\": \"RCP4.5\",\n \"cmip5-26\": \"RCP2.6\",\n \"ssp585\": \"SSP5-8.5\",\n \"ssp245\": \"SSP2-4.5\",\n \"ssp126\": \"SSP1-2.6\",\n \"SSP585\": \"SSP5-8.5\",\n \"SSP245\": \"SSP2-4.5\",\n \"SSP126\": \"SSP1-2.6\",\n \"cmip6-85\": \"SSP5-8.5\",\n \"cmip6-70\": \"SSP3-7.0\",\n \"cmip6-60\": \"SSP4-6.0\",\n \"cmip6-34\": \"SSP4-3.4\",\n \"cmip6-45\": \"SSP2-4.5\",\n \"cmip6-26\": \"SSP1-2.6\",\n \"cmip6-19\": \"SSP1-1.9\",\n \"1\": \"%\",\n \"era5\": \"ERA5\",\n \"gpcc025x025_v8\": \"GPCC\",\n \"cru\": \"CRU\",\n \"jra55\": \"JRA55\",\n \"HIGHRESMIP\": \"HighResMIP\",\n \" \": \"\",\n }\n for key, val in repl_map.items():\n if key in text:\n text = text.replace(key, val)\n break\n return text", "def compact(number):\n number = clean(number).strip().replace(' ', '-').split('-')\n if len(number) == 4:\n # zero pad the different sections if they are found\n lengths = (2, 4, 7, 3)\n return ''.join(n.zfill(l) for n, l in zip(number, lengths))\n else:\n # otherwise zero pad the account type\n number = ''.join(number)\n return number[:13] + number[13:].zfill(3)", "def remove_ordinal(string):\n o_string = []\n for numWord in string.split():\n regexNum = re.search(r'^[0-9]+', numWord)\n if regexNum:\n num = regexNum.group()\n # ordinal = numWord[regexNum.end():]\n \"\"\"\n django's ordinal method will return\n 23 -> 23rd\n 4 -> th\n 15th - > 15th\n \"\"\"\n if ordinal(num) == numWord:\n numWord = num\n o_string.append(numWord)\n\n o_string = \" \".join(o_string)\n return o_string", "def pretty(self):\n return \"(%s) %s-%s\" %(self.number[:3],\n self.number[3:6],\n self.number[6:])", "def compact(number):\n return clean(number, ' -.').upper().strip()", "def my_formatter(numbers):\n my_list_of_numbers = \", \".join(\"{:d}\".format(my_num) for (my_num) in numbers)\n results = f\"My list of numbers is: {my_list_of_numbers}.\"\n return results", "def english(number):\r\n if number == 0:\r\n return 'zero'\r\n word = ''\r\n for step in itertools.count():\r\n number, rest = divmod(number, 1000)\r\n word = format_num(en3(rest), step) + word\r\n if number == 0:\r\n return word.strip()", "def plurals(num):\r\n if num != 1:\r\n return ('s')\r\n return ('')", "def intspace(value):\n orig = force_unicode(value)\n new = re.sub(\"^(-?\\d+)(\\d{3})\", '\\g<1> \\g<2>', orig)\n if orig == new:\n return new\n else:\n return intspace(new)", "def deltastr(num, include_sign=True, currency=False):\n if num == 0:\n return ''\n elif num > 0:\n b4 = Fore.GREEN\n elif num < 0:\n b4 = Fore.RED\n signage = '+' if include_sign else ''\n b4 += '$' if currency else ''\n numfmt = ',.0f' if currency else ''\n return f'{b4}{num:{signage}{numfmt}}{Style.RESET_ALL}'", "def format_num(number) -> str:\n should_be_padded = isinstance(number, (float, str))\n if not isinstance(number, str):\n number = tqdm.format_num(number)\n if should_be_padded and 'e' not in number:\n if '.' not in number and len(number) < 5:\n try:\n _ = float(number)\n except ValueError:\n return number\n number += '.'\n number += \"0\" * (5 - len(number))\n return number", "def spell_number(num):\n tens, units = num / 10, num % 10\n tens_str = NUMBERS_10[tens]\n units_str = NUMBERS_1[units]\n if tens == 1:\n return NUMBERS_TEEN[units]\n elif tens:\n if units:\n return \"{t} {u}\".format(t=tens_str, u=units_str)\n return \"{t}\".format(t=tens_str)\n else:\n return units_str", "def compact(number):\n return clean(number, ' -').strip()", "def translateNumber(n):\r\n if type(n) != str:\r\n return None\r\n else:\r\n translation = \"\"\r\n word = \"\"\r\n for c in n:\r\n if c != ' ':\r\n word += c\r\n elif word in Numbers:\r\n translation += Numbers[word] + \" \"\r\n else:\r\n translation += word + \" \"\r\n return translation", "def format(number, separator=' '):\n number = compact(number)\n return separator.join((number[0:3], number[3:6], number[6:]))", "def commify(num, separator=','):\n\tnum = '%.0f' %(num)\t # just in case we were passed a numeric value\n\tmore_to_do = 1\n\twhile more_to_do:\n\t\t(num, more_to_do) = regex.subn(r'\\1%s\\2' % separator,num)\n\treturn num", "def format_number(number):\n return f'{number:8,}'", "def format_number_list(x):\n return ' '.join([format_number(y) for y in x])", "def format_engineering( number, unit = \"\" ):\n if math.isnan(number):\n return \"nan\"\n if number == 0.0:\n return 0\n\n convert_table = {-18:'a', -15:'f', -12:'p', -9:'n', -6:'u',\n -3:'m', -2:'c', -1:'d', 0:'', 3:'k',\n 6:'M', 9:'G', 12:'T', 15:'P', 18:'E'}\n l10 = math.log10(abs(number))\n ten_exp = int(l10)\n\n sci_places = int(ten_exp / 3) * 3\n sci_signific = (ten_exp % 3)\n\n expo_char = convert_table[sci_places]\n trailing = number / 10.0 ** sci_places\n\n # print z, ten_exp, sci_places, sci_signific\n if trailing >= 10:\n lead = \"{:d}\".format(int(round(trailing)))\n elif trailing >= 1:\n lead = \"{:.1f}\".format(trailing)\n else:\n lead = \"{:.2f}\".format(trailing)\n return lead + \" \" + expo_char + unit", "def collapse_numbers(text: str):\n groups = re.findall(r\"[\\d|\\s]{1,}\", text)\n\n results = list()\n for numbers in groups:\n squashed = squash(numbers)\n if squashed != \"\":\n results.append(squashed)\n\n return results", "def fnum(num, sf = 0):\n\n\ts = []\n\tnf = 0\n\tppos = -1\n\tfor x in str(num):\n#\t\tprint((x, s))\n\t\tif x == '.':\n\t\t\tppos = len(s)\n\t\t\tcontinue\n\t\tif nf == 0 and ppos < 0 and x == '0':\n\t\t\tcontinue\n\t\ts.append(x)\n\t\tif x != '-' and (x != '0' or nf > 0):\n\t\t\tnf += 1\n\t\tif ppos >= 0 and sf > 0 and nf > sf:\n\t\t\tif int(s[-1]) >= 5:\n\t\t\t\ts[-2] = str(int(s[-2]) + 1)\n\t\t\ts = s[:-1]\n\t\t\tbreak\n\tif len(s) == 0:\n\t\ts = ['0']\n\tif ppos >= 0:\n\t\ts.insert(ppos, '.')\n\t\tif s[0] == '.':\n\t\t\ts.insert(0, '0')\n\t\treturn(''.join(s).rstrip('0').rstrip('.'))\n\telse:\n\t\treturn(''.join(s))", "def format_number(n):\n # locale.setlocale(locale.LC_ALL, 'en_US') # commented by me\n # return locale.format('%d', n, grouping=True)\n return n", "def removeNumbers(self, words):\n\t\treturn re.sub(r'\\d', '', words)", "def phoneDisplay(number):\n return number[0:3] + \"&nbsp;&middot;&nbsp;\" + number[3:6] + \"&nbsp;&middot;&nbsp;\" + number[6:10]", "def myformat(table):\n m = 0\n table = sorted(table, key=itemgetter(0))\n for t in table:\n t = str(t)\n if len(t[0]) > m:\n m = len(t[0])\n m += 10\n fstr = \"{0:}\" + m*\" \" + \"{1:}\"\n s = \"\"\n for x in table:\n try:\n a = float(x[0])\n b = float(x[1])\n s += \"{0:.5f}{1:{width}}\".format(a, b, width=m) + \"\\n\"\n except IndexError:\n pass\n return s\n \"\"\"\n out = \"\"\n for pair in table:\n out += str(pair[0]) + 5*\" \" + str(pair[1]) + \"\\n\"\n return out\"\"\"", "def format_number(n):\n if int(n) < 0:\n raise ValueError(\"positive integer expected\")\n n = str(n)\n return ','.join([n[::-1][x:x+3]\n for x in range(0,len(n),3)])[::-1]", "def normalize(phone):\n d = re.sub('\\D', '', phone)\n return '+7 (%s) %s-%s-%s' % (d[1:4], d[4:7], d[7:9], d[9:11])", "def _convert_words_to_numbers_nl(text, short_scale=True, ordinals=False):\n text = text.lower()\n tokens = tokenize(text)\n numbers_to_replace = \\\n _extract_numbers_with_text_nl(tokens, short_scale, ordinals)\n numbers_to_replace.sort(key=lambda number: number.start_index)\n\n results = []\n for token in tokens:\n if not numbers_to_replace or \\\n token.index < numbers_to_replace[0].start_index:\n results.append(token.word)\n else:\n if numbers_to_replace and \\\n token.index == numbers_to_replace[0].start_index:\n results.append(str(numbers_to_replace[0].value))\n if numbers_to_replace and \\\n token.index == numbers_to_replace[0].end_index:\n numbers_to_replace.pop(0)\n\n return ' '.join(results)", "def normalize_issn(val):\n val = val.replace(\" \", \"\").replace(\"-\", \"\").strip().upper()\n return \"{0}-{1}\".format(val[:4], val[4:])", "def _fmt(x, pos):\n a, b = '{:.2e}'.format(x).split('e')\n b = int(b)\n return r'${} \\times 10^{{{}}}$'.format(a, b)", "def fixNumber(sval):\n\n r, val = VALID_RE.match(sval.strip()).groups()\n parts = VALPARTS_RE.findall(val)\n dpart = parts.pop(-1)\n if parts:\n return (r or \"\") + \"\".join(parts) + \".\" + dpart\n return (r or \"\") + dpart", "def format_nums(nums, sep):\n #See get_long_len(nums) doc string for info on this\n long_len = get_long_len(nums)\n str_nums = map(str, nums)\n for i in range(0, len(str_nums)):\n #Temporary variable to save line space\n num = str_nums[i]\n #Add the separater to the beginning of the number string\n #for each digit space missing from the longest number string\n if len(num) < long_len:\n str_nums[i] = \"\".join ([sep * (long_len - len(num)), num])\n \n return str_nums", "def remove_numbers(self, doc):\n regex = re.compile('[%s]' % re.escape(self.numbers))\n return regex.sub('', doc)", "def _textualize_group(group):\n # The final string. A list is used for performance.\n ret_str = []\n\n ones = int(group[2])\n tens = int(group[1])\n hundreds = int(group[0])\n is_teen = False\n ones_str = ''\n tens_str = ''\n hundreds_str = ''\n\n if hundreds > 0:\n hundreds_str = '{} hundred'.format(ones_place[hundreds])\n\n if tens > 0:\n if tens == 1:\n is_teen = True\n tens_str = teens[ones]\n else:\n tens_str = tens_place[tens]\n if ones > 0 and not is_teen:\n ones_str = ones_place[ones]\n\n # Create the final string\n\n if hundreds_str:\n ret_str.append(hundreds_str)\n # Add a space if there is a tens\n # or ones place digit.\n if tens_str or ones_str:\n ret_str.append(' ')\n\n if tens_str:\n ret_str.append(tens_str)\n # Add a space or hyphen depending\n # on the ones place digit.\n if ones_str:\n if tens > 1:\n ret_str.append('-')\n else:\n ret_str.append(' ')\n\n if ones_str:\n ret_str.append(ones_str)\n return ''.join(ret_str)", "def replace_street(street):\r\n if isinstance(street, str):\r\n for rep in replacements:\r\n street = re.sub(rep, \"\", street)\r\n\r\n streetint = re.findall(r'\\d+', str(street))\r\n if len(streetint) > 0 and int(streetint[0]) < 100:\r\n street = int(streetint[0])\r\n\r\n if street < 10:\r\n street = '0' + str(street) + str(streetnums[str(street)])\r\n elif street < 14:\r\n street = str(street) + 'TH'\r\n else:\r\n street = str(street) + str(streetnums[str(street)[-1]])\r\n\r\n\r\n return street", "def numberFormat(self,num,isImag=False):\n string=str(num)\n if num!=0:\n if num>0:\n string=\"+\" if num==1 else \"+\"+string\n else:\n string=\"-\" if num==-1 else string\n return string+\"i\" if isImag else string\n return \"\"", "def compact_number(value: int) -> str:\n value = float('{:.3g}'.format(value))\n magnitude = 0\n while abs(value) >= 1000:\n magnitude += 1\n value /= 1000.0\n return '{}{}'.format(\n '{:f}'.format(value).rstrip('0').rstrip('.'), ['', 'K', 'M', 'B', 'T'][magnitude]\n )", "def f2s(x, num_digits=6):\n format = '%' + '.%sf' % num_digits\n s = (format % x).rstrip('0')\n return s", "def format_large_numbers(text):\n\n text = re.sub(r\"(?<!\\d)\\$?\\d{1,3}(?=(,\\d{3}|\\s))\", r\" \\g<0> \", text) # pad commas in large numerical values\n return re.sub(r\"(\\d+)?,(\\d+)\", r\"\\1\\2\", text) # remove commas from large numerical values", "def number_formatter(number, pos=None):\n magnitude = 0\n while abs(number) >= 1000:\n magnitude += 1\n number /= 1000.0\n return '%.1f%s' % (number, ['', 'K', 'M', 'B', 'T', 'Q'][magnitude])", "def intword(value, format='%.1f'):\r\n try:\r\n value = int(value)\r\n except (TypeError, ValueError):\r\n return value\r\n\r\n if value < powers[0]:\r\n return str(value)\r\n for ordinal, power in enumerate(powers[1:], 1):\r\n if value < power:\r\n chopped = value / float(powers[ordinal - 1])\r\n return (' '.join([format, _(human_powers[ordinal - 1])])) % chopped\r\n return str(value)", "def int_format(self):\n ...", "def ordinal_conversion(value):\n last_digit = value.group(0)[-1]\n value_map = {'1': 'st', '2':'nd', '3':'rd'}\n if value_map.get(last_digit, False):\n return value.group(0) + value_map[last_digit]\n else:\n return value.group(0) + 'th'", "def printstring(tn,xs): #Printing function\n if (tn > -1) and (tn <= 9):\n for x in range(xs): #Outer loop for line iteration\n print(\"\\n\") #Need this new line to meet the output requirement\n for y in range(xs): #Inner loop for horizontal printing\n print(tn,end=' ') #User defined format, which is a\n #digit followed by a white space\n elif tn == -1:\n for x in range(xs): #Outer loop for line iteration\n print(\"\\n\") #Need this new line to meet the output requirement\n for y in range(xs): #Inner loop for horizontal printing\n print(y,end=' ') #User defined format, which is a\n #digit followed by a white space\n else:\n print(\"Must be single digit, please!\\n\")", "def format_value(value):\n if len(value) < 2:\n return '0' + value\n\n else:\n return value", "def _remove_digits(text: str) -> str:\n table = str.maketrans('', '', digits)\n\n return text.translate(table)", "def cleanInteger(number):\n \n number = str(number).replace(' ', '')\n \n test = number\n for i in range(10):\n test = test.replace(str(i), '')\n \n if test:\n return None\n \n return number", "def format(fmt, st):\n ret = \"\"\n if not st: return ret\n if fmt not in valid_combos:\n return st\n cm = charmap[fmt]\n for c in st:\n ret += cm.get(c, c)\n return ret", "def strip_numbers(text):\n if text is np.nan:\n return text\n regex = re.compile(r\"-?\\d+\")\n return re.sub(regex, \"\", text)", "def text_transform(val):\n if CURRENCY == \"USD\":\n return \"$%d\" % val\n if CURRENCY == \"EUR\":\n return \"‎€%d\" % val\n if CURRENCY == \"GBP\":\n return \"£%d\" % val\n return \"%d\" % val", "def reformat(number):\n if number.find('E') == -1:\n exponent = \"-101\"\n mantissa = number.split(exponent)\n return float(mantissa[0])*10**float(exponent)\n else:\n mantissa, exponent = number.split('E')\n\n return float(mantissa)*10**float(exponent)", "def str_fmt(x):\n if isinstance(x, (list, tuple, np.ndarray)):\n return [str_fmt(x) for x in x]\n if x <= 0.1:\n return f'${x:.2f}$'\n return f'${x:.1f}$' if x <= 1 else f'${int(x)}$'", "def prefix(num):\n # determine which range it lies in, r1/r2 means reduction 1 or reduction 2\n divisors = [1e-24 * pow(10, 3 * x) for x in range(17)]\n prefixes = list(reversed(['Yotta (Y)', 'Zetta (Z)', 'Exa (E)', 'Peta (P)', 'Tera (T)', 'Giga (G)', 'Mega (M)',\n 'Kilo (K)', '', 'Milli (m)', 'Micro ($\\mu$)', 'Nano (n)', 'Pico (p)', 'Femto (f)',\n 'Atto (a)', 'Zepto (z)', 'Yocto (y)']))\n exp = np.floor(np.log10(np.abs(num)))\n if exp < 0:\n exp -= 3\n expIndex = int(exp / 3) + 8\n expIndex = 0 if expIndex < 0 else expIndex\n expIndex = len(prefixes)-1 if expIndex >= len(prefixes) else expIndex\n r1 = prefixes[expIndex]\n num1 = num / divisors[expIndex]\n if expIndex != len(prefixes):\n r2 = prefixes[expIndex + 1]\n num2 = num / divisors[expIndex + 1]\n else:\n num2 = None\n retStr = str(num1) + ' ' + r1\n if num2 is not None:\n retStr += '\\nor\\n' + str(num2) + ' ' + r2\n return retStr", "def remove_digit(self, values, box, digit):\n values[box] = values[box].replace(digit, '')\n return values", "def usd(value):\n return f\"${int(value):,}\"", "def number_as_string(x):\n \n numnames = {1 : \"one\", 2 : \"two\", 3 : \"three\", 4 : \"four\", 5 : \"five\", 6 : \"six\", 7 : \"seven\", 8 : \"eight\", 9 : \"nine\",\n 10 : \"ten\", 11 : \"eleven\", 12 : \"twelve\", 13 : \"thirteen\", 14 : \"fourteen\", 15 : \"fifteen\", 16 : \"sixteen\",\n 17 : \"seventeen\", 18 : \"eighteen\", 19 : \"nineteen\", 20 : \"twenty\", 30 : \"thirty\", 40 : \"forty\", 50 : \"fifty\", \n 60 : \"sixty\", 70 : \"seventy\", 80 : \"eighty\", 90 : \"ninety\"}\n \n numparts = []\n needAnd = (x > 100) and (x % 100)\n if x >= 1000:\n numparts.append(numnames[x/1000])\n numparts.append(\"thousand\")\n x %= 1000\n \n if x >= 100:\n numparts.append(numnames[x/100])\n numparts.append(\"hundred\")\n x %= 100\n \n if needAnd:\n numparts.append(\"and\")\n \n if 11 <= x <= 19:\n numparts.append(numnames[x])\n else:\n if x >= 10:\n numparts.append(numnames[(x/10)*10])\n x %= 10\n\n if x > 0:\n numparts.append(numnames[x])\n \n return \" \".join(numparts)", "def replace_digits(text):\n text = re.sub(r\"\\d+\", \"number\", text)\n \n return text", "def formatted(s):\n matches = re.findall(_format_re, normalize(s))\n if len(matches) == 1 and matches[0][0] != '':\n return matches[0][0]\n def to_fmt(txt_none, txt_sw, txt_rem, txt_em, txt_a):\n if txt_none != '':\n return FORMAT_NONE, txt_none\n elif txt_sw != '':\n return FORMAT_SW, txt_sw\n elif txt_rem != '':\n return FORMAT_REM, txt_rem\n elif txt_em != '':\n return FORMAT_EM, txt_em\n elif txt_a != '':\n return FORMAT_A, txt_a\n return [to_fmt(*m) for m in matches]", "def number2text(integer):\n\n numbers_1_20_char = [\"one\", \"two\", \"three\", \"four\", \"five\",\n \"six\", \"seven\", \"eight\", \"nine\", \"ten\",\n \"eleven\", \"twelve\", \"thirteen\", \"fourteen\", \"fifteen\",\n \"sixteen\", \"seventeen\", \"eighteen\", \"nineteen\", \"twenty\"]\n\n numbers_21_99_int = list(range(20, 100, 10))\n numbers_21_99_char = [\"twenty\", \"thirty\", \"forty\", \"fifty\",\n \"sixty\", \"seventy\", \"eighty\", \"ninety\"]\n\n numbers_100_999_int = list(range(100,1000,100))\n numbers_100_999_char = [\"one hundred\", \"two hundred\", \"three hundred\", \"four hundred\", \"five hundred\",\n \"six hundred\", \"seven hundred\", \"eight hundred\", \"nine hundred\"]\n\n number_1000_int = 1000\n number_1000_char = \"one thousand\"\n\n if integer <= 0:\n raise ValueError(\"The number must be higher than 0, and smaller than 1001\")\n elif 1 <= integer <= 19:\n word = numbers_1_20_char[integer - 1]\n elif 20 <= integer <= 99:\n if integer in numbers_21_99_int:\n word = numbers_21_99_char[int(integer/10) - 2]\n else:\n inBetween = list(str(integer))\n lastword = numbers_1_20_char[int(inBetween[1]) - 1]\n firstword = numbers_21_99_char[int(int(inBetween[0])) - 2]\n word = \"\".join([firstword, lastword])\n elif 100 <= integer <= 999:\n if integer in numbers_100_999_int:\n word = numbers_100_999_char[int(integer/100) - 1]\n else:\n inBetween = list(str(integer))\n firstword = numbers_100_999_char[int(integer / 100) - 1]\n if int(inBetween[2]) == 0:\n if int(inBetween[1]) == 1:\n word = \"\".join([firstword, \"and\", \"ten\"])\n else:\n secondword = numbers_21_99_char[int(int(inBetween[1])) - 2]\n word = \"\".join([firstword, \"and\", secondword])\n else:\n number = (int(inBetween[1])*10) + int(inBetween[2])\n if 1 <= number <= 20:\n secondword = numbers_1_20_char[number - 1]\n word = \"\".join([firstword, \"and\", secondword])\n else:\n secondword = numbers_21_99_char[int(int(inBetween[1])) - 2]\n thirdword = numbers_1_20_char[int(int(inBetween[2])) - 1]\n word = \"\".join([firstword, \"and\", secondword, thirdword])\n elif integer == number_1000_int:\n word = number_1000_char\n\n return word", "def format_number(num):\n rounded = round(float(num), 2)\n rounded_str = \"{:g}\".format(rounded)\n if rounded_str.startswith('-0.'):\n rounded_str = '-' + rounded_str[2:]\n elif rounded_str.startswith('0.'):\n rounded_str = rounded_str[1:]\n return rounded_str", "def humanize_day(day_num):\n if 11 <= day_num <= 13:\n suffix = 'th'\n else:\n r = day_num % 10\n if r == 1:\n suffix = 'st'\n elif r == 2:\n suffix = 'nd'\n elif r == 3:\n suffix = 'rd'\n else:\n suffix = 'th'\n return str(day_num) + suffix", "def _replace_numbers(words):\n p = inflect.engine()\n new_words = []\n for word in words:\n if word.isdigit():\n try:\n new_word = p.number_to_words(word)\n new_words.append(new_word)\n except:\n pass\n else:\n new_words.append(word)\n return new_words", "def __formatDate(self, num):\n if len(num) < 2:\n num = '0'+num\n return num", "def main(num1, num2, text):\n return print(\"%30i\"%num1), print(\"%030i\"%num1), print(\"%.2f\"%num2), print(\"%.12f\"%num2), \\\n print(\"%40s\"%text)", "def clean_num(quote):\n for char in ROMAN:\n quote = quote.replace(*char)\n return quote", "def convert(number):\n out = \"\"\n if number % 3 == 0:\n out = \"Pling\"\n if number % 5 == 0:\n out = out + \"Plang\"\n if number % 7 == 0:\n out = out + \"Plong\"\n if out == \"\":\n out = str(number)\n return out", "def ordinal(n):\n if 11 <= n <= 19:\n return str(n) + \"th\"\n s = str(n)\n last = int(s[-1])\n if 1 <= last <= 3:\n return s + (\"st\", \"nd\", \"rd\")[last-1]\n return s + \"th\"", "def clean_phone(number):\n numberlist = re.findall(\"\\d\",number)\n new_number = \"\".join(numberlist)\n if len(new_number) == 8:\n \tnew_number = \"010\" + new_number\n\tnew_number = new_number[-11:]\n\tif new_number.startswith('1'):\n\t\tnew_number = \"+86-\" + new_number\n\telse:\n\t\tnew_number = \"+86-10-\" + new_number[-8:]\n\treturn new_number", "def intspace(value):\n # http://softwaremaniacs.org/forum/django/19392/\n if value is None:\n return None\n orig = force_str(value)\n new = re.sub(r\"^(-?\\d+)(\\d{3})\", r\"\\g<1> \\g<2>\", orig)\n return new if orig == new else intspace(new)", "def fraction_str(*numbers):\n total = sum(numbers)\n fractions = [number / float(total) for number in numbers]\n return '(' + ', '.join('{:0.03f}'.format(item) for item in fractions) + ')'" ]
[ "0.67999727", "0.62027526", "0.61682063", "0.6136683", "0.6127746", "0.6096326", "0.60839456", "0.6068743", "0.6052151", "0.5988611", "0.5986997", "0.59576005", "0.59576005", "0.59576005", "0.5921066", "0.59175247", "0.59080505", "0.59045625", "0.59045625", "0.58787817", "0.58733374", "0.5869161", "0.58451086", "0.5836542", "0.5835291", "0.5827486", "0.5820193", "0.577612", "0.57738763", "0.5761904", "0.5759452", "0.575383", "0.5742642", "0.5735666", "0.5721164", "0.5706141", "0.57055336", "0.5679153", "0.5677171", "0.5676553", "0.56635505", "0.5657661", "0.5656791", "0.56531394", "0.5652843", "0.5647039", "0.5644065", "0.5616875", "0.5616261", "0.5611479", "0.560364", "0.56020504", "0.5601408", "0.559877", "0.55969673", "0.5586257", "0.55805117", "0.558001", "0.5576955", "0.55751306", "0.5566516", "0.55576956", "0.5557346", "0.5550581", "0.5548465", "0.5540156", "0.5539635", "0.5538764", "0.5538221", "0.553584", "0.55354595", "0.55327487", "0.5527992", "0.5519733", "0.55181354", "0.5517272", "0.5515663", "0.5498118", "0.54950744", "0.5491672", "0.5490615", "0.5488995", "0.54850173", "0.5484259", "0.54790044", "0.5468651", "0.5461075", "0.54514515", "0.54475087", "0.54456216", "0.5441699", "0.54331934", "0.5428406", "0.54279596", "0.54219735", "0.5419447", "0.54179156", "0.54160964", "0.5412888", "0.5411374" ]
0.7203597
0
This function is used to replace "yr,yrs" by year and "hr,hrs" by hour.
def year_and_hour(self, text): # Find matches for "yr", "yrs", "hr", "hrs" matches_year = re.findall(r'\b\d+\s*yr\b', text) matches_years = re.findall(r'\b\d+\s*yrs\b', text) matches_hour = re.findall(r'\b\d+\s*hr\b', text) matches_hours = re.findall(r'\b\d+\s*hrs\b', text) # replace all matches accordingly if len(matches_year) != 0: text = re.sub(r'\b\d+\s*yr\b', "year", text) if len(matches_years) != 0: text = re.sub(r'\b\d+\s*yrs\b', "year", text) if len(matches_hour) != 0: text = re.sub(r'\b\d+\s*hr\b', "hour", text) if len(matches_hours) != 0: text = re.sub(r'\b\d+\s*hrs\b', "hour", text) return text
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def replace_time(text, ori):\n r = ori\n if '**' in text:\n r = 'xxhour'\n else:\n try:\n # handle exceptions with custom rules\n f, s = text.split()\n s = 'am' if s[0] == 'a' else 'pm'\n l, r = f.split(':')\n if l == '' or l == '00':\n if r == '':\n r = str(0).zfill(2)\n l = str(12)\n if int(l) > 12:\n l = str(int(l) % 12)\n f = ':'.join([l, r])\n text = ' '.join([f, s])\n\n d = datetime.strptime(text, '%I:%M %p')\n if d.hour >= 0 and d.hour < 4:\n r = 'xxmidngt'\n elif d.hour >= 4 and d.hour < 8:\n r = 'xxdawn'\n elif d.hour >= 8 and d.hour < 12:\n r = 'xxfore'\n elif d.hour >= 12 and d.hour < 16:\n r = 'xxafter'\n elif d.hour >=16 and d.hour <20:\n r = 'xxdusk'\n else:\n r = 'xxngt'\n except ValueError:\n pass\n return r", "def convert_24hr_12ampm(military_hr): \n\t\n if military_hr == 0:\n hour_ampm_str = \"12am\"\n elif military_hr == 12:\n hour_ampm_str = \"12pm\"\n elif military_hr > 12:\n hour_ampm_str = str(military_hr - 12) + \"pm\"\n else:\n hour_ampm_str = str(military_hr) + \"am\"\n # end of if block\n \n return hour_ampm_str", "def time_input():\n \n year = 2020\n month = 3 # number \n day = 12 # number in month\n hour = 12 # integer between 9 (= 9:00AM) and 17 (= 4:00PM) ## CHECK THIS\n minute = 0 # float between 0 (= 0 min) to 0.983 = 59 min)\n \n date=dt.datetime(year,month,day)\n time = date.timetuple().tm_yday\n time = time + hour/24 + minute/24/60\n \n return year, time", "def normalise_time(time_str):\n\n hour = time_str.split(\":\")[0]\n if int(hour) >= 24:\n normalised_hour = int(hour) % 24\n return time_str.replace(hour, f\"{normalised_hour:02}\")\n\n return time_str", "def normalise_two_digit_year(y):\r\n if y[0] == \"'\":\r\n y = y[1:]\r\n if int(y) < 39:\r\n return '%04d' % (int(y) + 2000)\r\n elif int(y) < 100:\r\n return '%04d' % (int(y) + 1900)\r\n else:\r\n return '%04d' % int(y[:4])", "def test_short_format_contains_year(self):\n locale = {\n 'timeformat': '%H:%M',\n 'dateformat': '%Y-%m-%d',\n 'longdateformat': '%Y-%m-%d',\n 'datetimeformat': '%Y-%m-%d %H:%M',\n 'longdatetimeformat': '%Y-%m-%d %H:%M',\n }\n assert (dt.datetime(2017, 1, 1), dt.datetime(2017, 1, 2), True) == \\\n guessrangefstr('2017-1-1 2017-1-1', locale=locale)", "def do_ry(self, arg):\n self.do_timesheet('report year')", "def readdate(line):\n splitted = line.split('::') \n \n # Convert the date\n date = dt.datetime.strptime(splitted[1].strip(), '%a %b %d %H:%M:%S')\n correctdate = date.replace(year=YEAR)\n return correctdate", "def extract(d):\n \n Y, M, D, W, H = (None for _ in range(5))\n \n def get_hour(groups):\n H, m, s = (int(x) for x in groups[4:7])\n if groups[8] == 'am' and H == 12:\n H = 0\n if groups[8] == 'pm' and 0 < H < 12:\n H += 12\n return H + m/60 + s/3600\n \n if type(d) == str:\n d = d.lower()\n match = re.match(r'^(\\d+)/(\\d+)/(20\\d+)( (\\d+):(\\d+):(\\d+)( (am|pm))?)?', d)\n if match is None:\n match = re.match(r'^(\\d+)-([a-z]+)-(\\d+)( (\\d+):(\\d+):(\\d+)( (am|pm))?)?', d)\n if match is None:\n return\n else:\n month = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec']\n D = int(match.group(1))\n M = month.index(match.group(2)) + 1\n Y = 2000 + int(match.group(3))\n W = datetime.date(Y, M, D).timetuple()[6]\n if match.group(4) is None:\n H = -1\n else:\n H = get_hour(match.groups())\n else:\n M, D, Y = (int(x) for x in (match.groups())[:3])\n W = datetime.date(Y, M, D).timetuple()[6]\n if match.group(4) is None:\n H = -1\n else:\n H = get_hour(match.groups())\n return (Y, M, D, W, H)", "def year_expand(s):\n regex = r\"^((?:19|20)\\d{2})?(\\s*-\\s*)?((?:19|20)\\d{2})?$\"\n try:\n start, dash, end = re.match(regex, ustr(s)).groups()\n start = start or 1900\n end = end or 2099\n except AttributeError:\n return 1900, 2099\n return (int(start), int(end)) if dash else (int(start), int(start))", "def add_time(data, t):\n data['year'] = t.year\n data['month'] = t.month\n data['day'] = t.day\n data['hour'] = t.hour\n data['minute'] = t.minute\n data['second'] = t.second", "def year(cls, year: typing.Union[int, str])->str:\n yearstr: str\n if isinstance(year, int):\n yearstr = str(year)\n else:\n yearstr = year\n return cls.DATE_AND_TIMES_SIGIL + yearstr + \"-01-01T00:00:00/9\"", "def build_convert_to_hours(time_units):\n if time_units not in VALID_TIME_UNITS:\n raise ValueError('Time units must be one of', VALID_TIME_UNITS)\n \n if time_units == 'min':\n return lambda x: x/60\n elif time_units == 'h':\n return lambda x: x", "def test_evaluate_year_expression(self):\n for f, r in (\n (\"year\", 2013),\n (\"month\", 9),\n (\"day\", 1),\n (\"hour\", 10),\n (\"minute\", 56),\n (\"second\", 0)):\n value = self.evaluate_common(\"%s(datetime'2013-09-01T10:56')\" % f)\n self.assertTrue(\n value.type_code == edm.SimpleType.Int32, \"Expected Int32\")\n self.assertTrue(value.value == r)\n try:\n value = self.evaluate_common(\n \"%s(datetimeoffset'2013-09-01T10:56:12-05:00')\" % f)\n self.fail(\"datetimeoffset %s\" % f)\n except odata.EvaluationError:\n pass\n try:\n value = self.evaluate_common(\n \"%s(datetime'2013-09-01T10:56',\"\n \"datetime'2013-09-01T10:57')\" % f)\n self.fail(\"2 parameters\")\n except odata.EvaluationError:\n pass", "def test_get_date_format_code(self):\n\n test_format = self.test_format\n start, end = custom_date.get_date_code_span(\"Y\", test_format)\n self.assertEqual(start, 5)\n self.assertEqual(end, 9)\n\n start, end = custom_date.get_date_code_span(\"H\", test_format)\n self.assertEqual(start, 15)\n self.assertEqual(end, 17)", "def converttime(time, currentformat, newformat):\n\n # Define conversion dictionary\n conversions = {\n \"milliseconds\": {\n \"milliseconds\": \"time\",\n \"seconds\": \"time / 1000\",\n \"minutes\": \"time / 1000 / 60\",\n \"hours\": \"time / 1000 / 60 / 60\",\n \"days\": \"time / 1000 / 60 / 60 / 24\",\n \"weeks\": \"time / 1000 / 60 / 60 / 24 / 7\",\n \"fortnights\": \"time / 1000 / 60 / 60 / 24 / 14\",\n \"years\": \"time / 1000 / 60 / 60 / 24 / 365\",\n \"decades\": \"time / 1000 / 60 / 60 / 24 / 365 / 10\",\n \"centuries\": \"time / 1000 / 60 / 60 / 24 / 365 / 100\",\n \"millenniums\": \"time / 1000 / 60 / 60 / 24 / 365 / 1000\"\n },\n \"seconds\": {\n \"milliseconds\": \"time * 1000\",\n \"seconds\": \"time\",\n \"minutes\": \"time / 60\",\n \"hours\": \"time / 60 / 60\",\n \"days\": \"time / 60 / 60 / 24\",\n \"weeks\": \"time / 60 / 60 / 24 / 7\",\n \"fortnights\": \"time / 60 / 60 / 24 / 14\",\n \"years\": \"time / 60 / 60 / 24 / 365\",\n \"decades\": \"time / 60 / 60 / 24 / 365 / 10\",\n \"centuries\": \"time / 60 / 60 / 24 / 365 / 100\",\n \"millenniums\": \"time / 60 / 60 / 24 / 365 / 1000\"\n },\n \"minutes\": {\n \"milliseconds\": \"time * 60 * 1000\",\n \"seconds\": \"time * 60\",\n \"minutes\": \"time\",\n \"hours\": \"time / 60\",\n \"days\": \"time / 60 / 24\",\n \"weeks\": \"time / 60 / 24 / 7\",\n \"fortnights\": \"time / 60 / 24 / 14\",\n \"years\": \"time / 60 / 24 / 365\",\n \"decades\": \"time / 60 / 24 / 365 / 10\",\n \"centuries\": \"time / 60 / 24 / 365 / 100\",\n \"millenniums\": \"time / 60 / 24 / 365 / 1000\"\n },\n \"hours\": {\n \"milliseconds\": \"time * 60 * 60 * 1000\",\n \"seconds\": \"time * 60 * 60\",\n \"minutes\": \"time * 60\",\n \"hours\": \"time\",\n \"days\": \"time / 24\",\n \"weeks\": \"time / 24 / 7\",\n \"fortnights\": \"time / 24 / 14\",\n \"years\": \"time / 24 / 365\",\n \"decades\": \"time / 24 / 365 / 10\",\n \"centuries\": \"time / 24 / 365 / 100\",\n \"millenniums\": \"time / 24 / 365 / 1000\"\n },\n \"days\": {\n \"milliseconds\": \"time * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 24 * 60 * 60\",\n \"minutes\": \"time * 24 * 60\",\n \"hours\": \"time * 24\",\n \"days\": \"time\",\n \"weeks\": \"time / 7\",\n \"fortnights\": \"time / 14\",\n \"years\": \"time / 365\",\n \"decades\": \"time / 365 / 10\",\n \"centuries\": \"time / 365 / 100\",\n \"millenniums\": \"time / 365 / 1000\"\n },\n \"weeks\": {\n \"milliseconds\": \"time * 7 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 7 * 24 * 60 * 60\",\n \"minutes\": \"time * 7 * 24 * 60\",\n \"hours\": \"time * 7 * 24\",\n \"days\": \"time * 7\",\n \"weeks\": \"time\",\n \"fortnights\": \"time / 2\",\n \"years\": \"time / 52\",\n \"decades\": \"time / 52 / 10\",\n \"centuries\": \"time / 52 / 100\",\n \"millenniums\": \"time / 52 / 1000\"\n },\n \"fortnights\": {\n \"milliseconds\": \"time * 14 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 14 * 24 * 60 * 60\",\n \"minutes\": \"time * 14 * 24 * 60\",\n \"hours\": \"time * 14 * 24\",\n \"days\": \"time * 14\",\n \"weeks\": \"time * 2\",\n \"fortnights\": \"time\",\n \"years\": \"time / 26\",\n \"decades\": \"time / 26 / 10\",\n \"centuries\": \"time / 26 / 100\",\n \"millenniums\": \"time / 26 / 1000\"\n },\n \"years\": {\n \"milliseconds\": \"time * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 256 * 24 * 60\",\n \"hours\": \"time * 256 * 24\",\n \"days\": \"time * 256\",\n \"weeks\": \"time * 52\",\n \"fortnights\": \"time * 26\",\n \"years\": \"time\",\n \"decades\": \"time / 10\",\n \"centuries\": \"time / 100\",\n \"millenniums\": \"time / 1000\"\n },\n \"decades\": {\n \"milliseconds\": \"time * 10 * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 10 * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 10 * 256 * 24 * 60\",\n \"hours\": \"time * 10 * 256 * 24\",\n \"days\": \"time * 10 * 256\",\n \"weeks\": \"time * 10 * 52\",\n \"fortnights\": \"time * 10 * 26\",\n \"years\": \"time * 10\",\n \"decades\": \"time\",\n \"centuries\": \"time / 10\",\n \"millenniums\": \"time / 100\"\n },\n \"centuries\": {\n \"milliseconds\": \"time * 100 * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 100 * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 100 * 256 * 24 * 60\",\n \"hours\": \"time * 100 * 256 * 24\",\n \"days\": \"time * 100 * 256\",\n \"weeks\": \"time * 100 * 52\",\n \"fortnights\": \"time * 100 * 26\",\n \"years\": \"time * 100\",\n \"decades\": \"time * 10\",\n \"centuries\": \"time\",\n \"millenniums\": \"time / 10\"\n },\n \"millenniums\": {\n \"milliseconds\": \"time * 1000 * 256 * 24 * 60 * 60 * 1000\",\n \"seconds\": \"time * 1000 * 256 * 24 * 60 * 60\",\n \"minutes\": \"time * 1000 * 256 * 24 * 60\",\n \"hours\": \"time * 1000 * 256 * 24\",\n \"days\": \"time * 1000 * 256\",\n \"weeks\": \"time * 1000 * 52\",\n \"fortnights\": \"time * 1000 * 26\",\n \"years\": \"time * 1000\",\n \"decades\": \"time * 100\",\n \"centuries\": \"time * 10\",\n \"millenniums\": \"time\"\n }\n }\n\n # Return evaluated value\n return eval(conversions[currentformat][newformat])", "def times_filter(d, times, meets_criteria=matches_timestr):\n mapping = map(type, times)\n if [ str, type(None), type(None) ] == mapping and meets_criteria(times[0]):\n d1 = doytimestr_to_datetime('%d:%s:00' % (d[0].year, times[0].replace('/',':')))\n #return '%s' % d1\n return d1, d1, 0\n elif [ str, str, type(None) ] == mapping and meets_criteria(times[0]) and meets_criteria(times[1]):\n d1 = doytimestr_to_datetime('%d:%s:00' % (d[0].year, times[0].replace('/',':')))\n d2 = doytimestr_to_datetime('%d:%s:00' % (d[1].year, times[1].replace('/',':')))\n #return '%s to %s' % (d1, d2)\n return d1, d2, timedelta_hours(d2-d1)\n else:\n #return ''\n return None, None, None", "def test_short_format_contains_year(self):\n locale = {\n 'timeformat': '%H:%M',\n 'dateformat': '%Y-%m-%d',\n 'longdateformat': '%Y-%m-%d',\n 'datetimeformat': '%Y-%m-%d %H:%M',\n 'longdatetimeformat': '%Y-%m-%d %H:%M',\n }\n assert (dt.datetime(2017, 1, 1), True) == guessdatetimefstr(\n '2017-1-1'.split(), locale=locale, default_day=dt.datetime.today())\n assert (dt.datetime(2017, 1, 1, 16, 30), False) == guessdatetimefstr(\n '2017-1-1 16:30'.split(), locale=locale, default_day=dt.datetime.today())", "def sec_to_hm(t):\n t = int(t)\n s = t % 60\n t //= 60\n m = t % 60\n t //= 60\n return t, m, s", "def scaledTime():\n #return (time.gmtime().tm_wday, time.gmtime().tm_hour)\n epoch = time.strptime(\"2013-02-21 11:30:00\", \"%Y-%m-%d %H:%M:%S\")\n timeInSec = time.mktime(time.gmtime()) - time.mktime(epoch)\n hourSince = timeInSec / Predictor.hourScale\n day = int(hourSince / 24 % 7)\n hour = int(hourSince % 24)\n return (day, hour)", "def check_hour_range(self, hour):\n if 0 <= hour <= 5:\n return 'Early Morning'\n if 6 <= hour <= 11:\n return 'Day Time'\n if 12 <= hour <= 17:\n return 'Afternoon'\n if 18 <= hour <= 23:\n return 'Evening'", "def _set_time(line, old_time, swap_time):\n line = re.sub(str(old_time), str(swap_time), line, 2)\n return line", "def convert_times(value):\r\n day_patern = re.compile('\\d{4}-\\d{2}-\\d{2}')\r\n week_pattern = re.compile('\\d{4}-W\\d{2}')\r\n month_pattern = re.compile('\\d{4}-\\d{2}')\r\n year_pattern = re.compile('\\d{4}')\r\n\r\n if re.match(day_patern, value):\r\n date = datetime.strptime(value, '%Y-%m-%d')\r\n end = date + timedelta(days=1)\r\n return date, end\r\n elif re.match(week_pattern, value):\r\n date = datetime.strptime(value + '-1', '%Y-W%W-%w')\r\n end = date + timedelta(days=7)\r\n return date, end\r\n elif re.match(month_pattern, value):\r\n date = datetime.strptime(value, '%Y-%m')\r\n if date.month == 12:\r\n end = date.replace(year=date.year + 1, month=1)\r\n else:\r\n end = date.replace(month=date.month + 1)\r\n return date, end\r\n elif re.match(year_pattern, value):\r\n date = datetime.strptime(value, '%Y')\r\n end = date.replace(year=date.year + 1)\r\n return date, end\r\n else:\r\n raise ValueError('Date not recognised')", "def test_date_by_yr(self):\n spi_search = \"find date 2002\"\n inv_search = \"year:2002\"\n self._compare_searches(inv_search, spi_search)", "def sanitize(time_string): # Fix non-uniformity in the athletes data to enable sorting\n if '-' in time_string:\n splitter = '-'\n (mins, secs) = time_string.split(splitter)\n elif ':' in time_string:\n splitter = ':'\n (mins, secs) = time_string.split(splitter)\n else:\n return time_string\n return '{0}.{1}'.format(mins, secs)", "def fix_time_fields(self):\n time_fields = {\"Time of day\": lambda time: time.hour, \"Time of year (month)\": lambda time: time.month}\n for time_field in time_fields.keys():\n for i in range(self.df.shape[0]):\n value = self.df[time_field][i]\n if type(value) is datetime.time or type(value) is datetime.datetime:\n self.df[time_field].loc[i] = time_fields[time_field](value)", "def year_tracker(words):\n new_words = []\n for w in words:\n new_word = re.sub(r\"^[1][789][0-9]{2}$\", \"jahreszahl\", w) # for 1700-1999\n new_word = re.sub(r\"^[2][01][0-9]{2}$\", \"jahreszahl\", new_word) # for 2000-2199\n new_words += [new_word]\n return new_words", "def get_year(parameters_dictionary):\n if \"start-year\" in parameters_dictionary.keys():\n year = int(parameters_dictionary[\"start-year\"])\n return str(year) + str(year + 1)\n elif \"end-year\" in parameters_dictionary.keys():\n year = int(parameters_dictionary[\"end-year\"])\n return str(year - 1) + str(year)\n else:\n return str(THIS_YEAR - 1) + str(THIS_YEAR)", "def parse_time(expr):\n # first deal with hour\n hsp = expr.lower().split('h')\n if len(hsp) > 1: h = int(hsp[0])\n else: h = 0\n # now hour is out of the way\n expr = hsp[-1]\n msp = expr.lower().split('m')\n if len(msp) > 1: m = int(msp[0])\n else: m = 0\n return f\"{h:02d}:{m:02d}:00\"", "def translate_years(val):\n if val.find(\"-\") > 0:\n tokens = re.findall(\"[0-9]+\", val)\n one = int(tokens[0])\n two = int(tokens[1])\n one = (1900 + one) if one > 50 else (2000 + one)\n two = (1900 + two) if two > 50 else (2000 + two)\n return range(one, two + 1)\n tokens = re.findall(\"[0-9]+\", val)\n return [int(f\"{'19' if int(t) > 50 else '20'}{t}\") for t in tokens]", "def adjust_date(cmd_args):\n valid_times_6 = [0, 6, 12, 18]\n valid_times_24 = [0, 12]\n\n date_in = datetime.strptime(cmd_args.date, '%Y-%m-%d-%H')\n\n if (cmd_args.period == 99):\n \"\"\"\n Seasonal accumulation\n Set the ending hour to 12z and decrement the day, if necessary\n \"\"\"\n if (date_in.hour < 12):\n date_in = date_in.replace(day = date_in.day - 1)\n date_in = date_in.replace(hour = 12)\n elif (cmd_args.period == 6):\n \"\"\"\n 6-hr accumulation\n Set the hour to the previous synoptic time if necessary\n \"\"\"\n if (date_in.hour not in valid_times_6):\n new_hr = max([i for i in valid_times_6 if date_in.hour > i])\n date_in = date_in.replace(hour = new_hr)\n else:\n if (date_in.hour not in valid_times_24):\n new_hr = max([i for i in valid_times_24 if date_in.hour > i])\n date_in = date_in.replace(hour = new_hr)\n\n return date_in", "def make_release_time(date_time, hour, release):\n release_h = int(release[:2])\n release_m = int(release[2:4])\n \n if release_h == 99:\n return 0 #largest integer number int 64 \n \n else:\n if release_m == 99:\n release_m = 0\n release_date_time = date_time.replace(hour= release_h, minute= release_m) \n \n \"\"\" Here, I have to subtract one day to the release time stamp if the hour of the time stamp is in th evening,\n but the nominal time is reported at midnight hence in the following day. For example 2019 02 20 00 2349 from file VMM00048820 \"\"\"\n if hour == '00':\n if release_h > 20:\n release_date_time = release_date_time - timedelta(days=1)\n else:\n pass\n \n return release_date_time", "def convert_time(time):\n\n s = time.split()[0]\n s_h = int(s.split(':')[0])\n\n am_pm = s.split(':')[1][-2:]\n if s_h == 12:\n s_h = s_h - 12\n if am_pm == 'PM':\n s_h = s_h + 12\n s_h = s_h + 1\n\n e = time.split()[2]\n e_h = int(e.split(':')[0])\n\n am_pm = e.split(':')[1][-2:]\n if e_h == 12:\n e_h = e_h - 12\n if am_pm == 'PM':\n e_h = e_h + 12\n e_h = e_h + 1\n\n hour_list = range(s_h, e_h + 1)\n return hour_list", "def yt_datetime(yt_date_time):\n time_obj = time.strptime(yt_date_time, \"%Y-%m-%dT%H:%M:%S.%fZ\")\n locale_date = time.strftime(\"%x\", time_obj)\n # strip first two digits of four digit year\n short_date = re.sub(r\"(\\d\\d\\D\\d\\d\\D)20(\\d\\d)$\", r\"\\1\\2\", locale_date)\n return time_obj, short_date", "def generate_hour_header():\n cf = config.Config()\n outstr = \"total_rotation,total_acceleration,total_distance,number_missing,\"\n outstr += \"oc1_time,oc2_time,oc3_time,oc4_time,oc5_time,oc6_time,oc7_time,\"\n outstr += \"oc8_time,oc9_time,oc10_time,oc11_time,oc12_time,oc13_time,\"\n outstr += \"oc14_time,oc15_time,oc16_time,oc17_time,oc18_time,oc19_time,\"\n outstr += \"oc20_time,oc21_time,oc22_time,oc23_time,oc24_time,oc25_time,\"\n outstr += \"oc26_time,oc27_time,oc28_time,oc29_time,oc30_time,oc31_time,\"\n outstr += \"oc32_time,oc33_time,\"\n anames = cf.activity_list\n for i in range(len(anames)):\n outstr += anames[i] + \"_time,\"\n outstr += \"attraction_time,house_time,\"\n outstr += \"restaurant_time,road_time,service_time,store_time,work_time,\"\n outstr += \"other_time\"\n return outstr", "def _hour_to_time(num: int):\n return datetime.datetime.now().replace(hour=num).strftime(\"%-I %p\")", "def _date_match_to_int_or_tuple(self, m):\n\n years = set()\n try:\n if m[0] != '':\n # yyyy-dd-mm\n years = int(m[0][0:4])\n elif m[1] != '':\n # year-range derived from a century\n century = int(re.match(re.compile('\\\\d+'), m[1]).group(0))\n\n match = re.compile(self.regexes['capture']['century-plus-suffix']).match(m[1])\n suffix = match.group(2)\n if suffix:\n if re.compile(self.regexes['match']['suffix-bce']).match(suffix) is not None:\n years = (100 * -century, 100 * -century + 99)\n else:\n years = (100 * (century - 1), 100 * (century - 1) + 99)\n else:\n years = (100 * (century - 1), 100 * (century - 1) + 99)\n elif m[2] != '':\n # explicit year-range\n\n # FIXME: spaghetti code, but it works!\n range_of_stuff = []\n i = 0\n first_none = None\n for y in re.sub(self.regexes['substitution']['year-year-splitter'], r'\\1>|<\\2', m[2]).split('>|<'):\n # get rid of whitespace\n y = y.strip()\n match = re.compile(self.regexes['capture']['year']).match(y)\n\n # if there is a suffix, one of these will not be None\n suffix = match.group(2) or match.group(4)\n if suffix:\n if i == 0:\n first_none = False\n\n if re.compile(self.regexes['match']['suffix-bce']).match(suffix) is not None:\n range_of_stuff.append(-1 * int(match.group(1) or match.group(3)))\n else:\n range_of_stuff.append(int(match.group(1) or match.group(3)))\n else:\n if i == 0:\n first_none = True\n range_of_stuff.append(int(match.group(1) or match.group(3)))\n\n i += 1\n if first_none:\n if range_of_stuff[1] <= 0:\n range_of_stuff[0] = -1 * range_of_stuff[0]\n\n years = (range_of_stuff[0], range_of_stuff[1])\n elif m[3] != '':\n # extract single year\n prep = re.sub(self.regexes['substitution']['dd-mon-year-time'], r'\\1', m[3]).strip()\n years = int(prep)\n elif m[4] != '':\n # year with unknown ones\n y = m[4].strip()\n match = re.compile(r'[1-9]\\d{3}').match(y)\n if match is None:\n years = int(self._resolve_unknown_ones(y))\n\n else:\n years = int(match.group(0))\n\n elif m[5] != '':\n # plain old year\n match = re.compile(self.regexes['capture']['year']).match(m[5])\n suffix = match.group(2) or match.group(4)\n if suffix:\n if re.compile(self.regexes['match']['suffix-bce']).match(suffix) is not None:\n years = -1 * int(match.group(1) or match.group(3))\n else:\n years = int(match.group(1) or match.group(3))\n else:\n years = int(match.group(1) or match.group(3))\n else:\n raise Error\n\n except ValueError as e:\n #logger.error('An error occurred while trying to match \"{}\": {}'.format(m, e))\n pass\n\n #logger.debug('Mapping match to years: {} -> {}'.format(m, years))\n return years", "def interpret_time( text ):\n app.logger.debug(\"Decoding time '{}'\".format(text))\n time_formats = [\"ha\", \"h:mma\", \"h:mm a\", \"H:mm\"]\n try: \n as_arrow = arrow.get(text, time_formats).replace(tzinfo=tz.tzlocal())\n as_arrow = as_arrow.replace(year=2016) #HACK see below\n app.logger.debug(\"Succeeded interpreting time\")\n except:\n app.logger.debug(\"Failed to interpret time\")\n flask.flash(\"Time '{}' didn't match accepted formats 13:30 or 1:30pm\"\n .format(text))\n raise\n return as_arrow.isoformat()\n #HACK #Workaround\n # isoformat() on raspberry Pi does not work for some dates\n # far from now. It will fail with an overflow from time stamp out\n # of range while checking for daylight savings time. Workaround is\n # to force the date-time combination into the year 2016, which seems to\n # get the timestamp into a reasonable range. This workaround should be\n # removed when Arrow or Dateutil.tz is fixed.\n # FIXME: Remove the workaround when arrow is fixed (but only after testing\n # on raspberry Pi --- failure is likely due to 32-bit integers on that platform)", "def find_year(self,datelimits): \t \t \n year = \"\"\n\n\t \tmatch = re.search(r\"[I1][\\dG]{3}\",self.string)\n\n\t \tif match: \t \t \t \n\t \t\tif re.search(r\"(\\d{4})\",match.group()):\n\t \t\t\tyear = match.group()\n elif re.search(r\"I\\d{3}\",match.group()):\n\t \t\t\tmatch = re.sub(r\"I(\\d{3})\",r\"1\\1\",match.group())\n\t \t\t\tyear = match\n\t \t\telif re.search(r\"(\\d[G\\d]{3})\",match.group()):\n\t \t\t\tmatch = re.sub(r\"G\",r\"6\",match.group())\n\t \t\t\tyear = match\n\n \n if year == \"\" or int(year) < datelimits[0] or int(year) > datelimits[1]:\n year = \"\"\n \n\n\t \treturn year", "def _add_time_field(self) -> None:\n self.data[\"time\"] = [datetime(int(yyyy), int(mm), int(dd)) + timedelta(hours=hh) for yyyy, mm, dd, hh in zip(self.data[\"year\"], self.data[\"month\"], self.data[\"day\"], self.data[\"hour\"])]\n for key in [\"year\", \"doy\", \"month\", \"day\", \"hour\"]:\n del self.data[key]", "def format_time(t):\n m, s = divmod(t, 60)\n h, m = divmod(m, 60)\n if h:\n return f\"{h:2.0f}hr {m:2.0f}min {s:4.1f}s\"\n elif m:\n return f\"{m:2.0f}min {s:4.1f}s\"\n else:\n return f\"{s:4.1f}s\"", "def format_time(self, time):\n hh = time[0:2]\n mm = time[2:4]\n ss = time[4:]\n return \"%s:%s:%s UTC\" % (hh,mm,ss)", "def test_date_by_yr_mo(self):\n spi_search = \"find date 1976-04\"\n inv_search = 'year:1976-04'\n self._compare_searches(inv_search, spi_search)", "def finish_hour(self):\n\t\tassert len(self.values) >= 4, 'A fully formed update date is needed.'\n\t\tself.values = self.values[:4]", "def fix_years(self, row):\n raise NotImplementedError", "def set_ymd_hms(self):\n file_name = os.path.basename(self.in_file)\n pat = u'\\w+.A(\\d{4})(\\d{3}).(\\d{4}).\\d{3}.\\d+.hdf$'\n g = re.match(pat, file_name)\n if g:\n tt = JDay2Datetime(g.group(1), g.group(2), g.group(3) + '00')\n self.ymd = tt.strftime('%Y%m%d')\n self.hms = g.group(3) + '00'\n else:\n raise ValueError('Cant get the ymdhms from file name.')", "def times_to_axis(times):\r\n #Hour mode if there are 24 times and no date information\r\n hour_mode = len(times) == 24 and np.all(times[:, 0:3] == 0)\r\n \r\n #If hour mode, convert to 24-hour strings\r\n if hour_mode:\r\n axis_times = [hour_format.format(*time[3:5])\r\n for time in times]\r\n #Else, convert times to \"datetime\" objects\r\n else:\r\n axis_times = [datetime.strptime(date_format.format(*time), \r\n \"%Y-%m-%dT%H:%M:%S\")\r\n for time in times]\r\n\r\n #Return converted times and whether times are in hour mode\r\n return (axis_times, hour_mode)", "def conv_time(l, h):\n\t# Function modified from post on ActiveState by John Nielsen\n\n\t#converts 64-bit integer specifying the number of 100-nanosecond\n\t#intervals which have passed since January 1, 1601.\n\t#This 64-bit value is split into the\n\t#two 32 bits stored in the structure.\n\td = 116444736000000000L \n\n\t# Some LNK files do not have time field populated \n\tif l + h != 0:\n\t\tnewTime = (((long(h) << 32) + long(l)) - d)/10000000 \n\telse:\n\t\tnewTime = 0\n\n\treturn time.strftime(\"%Y/%m/%d %H:%M:%S %a\", time.localtime(newTime))", "def ftest_datetime_replace(self,formatresource):\n formatter = formatresource.formatter\n result_type = formatresource.result_type\n\n if not issubclass(result_type,datetime.date):\n return\n\n for i, (data, expected) in enumerate(formatresource.data_result_list):\n if expected is None:\n continue\n\n result = formatter.format(data)\n new = result.replace(year=result.year)\n\n if type(formatter) is Formatters.LooseDatetimeFormatter:\n assert(new.format_str)\n else:\n assert(new.format_str == result.format_str)", "def interpret_time(text):\n app.logger.debug(\"Decoding time '{}'\".format(text))\n time_formats = [\"ha\", \"h:mma\", \"h:mm a\", \"H:mm\"]\n try:\n as_arrow = arrow.get(text, time_formats).replace(tzinfo=tz.tzlocal())\n as_arrow = as_arrow.replace(year=2016) # HACK see below\n app.logger.debug(\"Succeeded interpreting time\")\n except:\n app.logger.debug(\"Failed to interpret time\")\n flask.flash(\"Time '{}' didn't match accepted formats 13:30 or 1:30pm\"\n .format(text))\n raise\n return as_arrow.isoformat()\n # HACK Workaround\n # isoformat() on raspberry Pi does not work for some dates\n # far from now. It will fail with an overflow from time stamp out\n # of range while checking for daylight savings time. Workaround is\n # to force the date-time combination into the year 2016, which seems to\n # get the timestamp into a reasonable range. This workaround should be\n # removed when Arrow or Dateutil.tz is fixed.\n # FIXME: Remove the workaround when arrow is fixed (but only after testing\n # on rasp Pi failure is likely due to 32-bit integers on that platform)", "def _unit_yr(self):\n return ((self.time_base * 60.0) * 24.0) * 365.0", "def replace(self, year=0, month=0, day=0, hour=0, minute=0, second=0, microsecond=0, tzinfo=0):\n return self", "def time_extractor(time_div):\n now = datetime.now(timezone)\n if \"giờ\" in time_div:\n delta = int(re.search('\\d{1,2}').group())\n return int(datetime.timestamp(now - timedelta(hours=delta)))\n if \"lúc\" in time_div:\n time = re.search(\"(?<= )\\d{1,2}:\\d{1,2}\", time_div).group()\n if \"Hôm qua\" in time_div:\n delta = (datetime.strptime(\n f'{now.hour}:{now.minute}', '%H:%M') - datetime.strptime(time, '%H:%M')).total_seconds()\n return int(datetime.timestamp(now - timedelta(days=1))-delta)\n date = re.findall(\"\\d{1,4}\", time_div)\n print(date)\n if len(date) < 5:\n date[2] = now.year\n return int(datetime.timestamp(datetime.strptime(f\"{date[0]} {date[1]} {date[2]} {time}\", \"%d %m %Y %H:%M\")))\n date = re.findall(\"\\d{1,4}\", time_div)\n if len(date) < 3:\n date.append(now.year)\n return int(datetime.timestamp(datetime.strptime(f\"{date[0]} {date[1]} {date[2]}\", \"%d %m %Y\")))", "def set_ymd_hms(self):\n file_name = os.path.basename(self.in_file)\n pat = u'\\w+.A(\\d{4})(\\d{3}).(\\d{4}).*'\n# MOD021KM.A2014181.0035.005.Ref_Tbb.44Ch.Hdr\n g = re.match(pat, file_name)\n if g:\n tt = JDay2Datetime(g.group(1), g.group(2), g.group(3) + '00')\n self.ymd = tt.strftime('%Y%m%d')\n self.hms = g.group(3) + '00'\n else:\n raise ValueError('Cant get the ymdhms from file name.')", "def str2date(str_in, format_in=\"yyyy-mm-dd\", typeout=None):\n if type(str_in) is not str:\n raise TypeError(\"str_in must be a str\")\n if type(format_in) is not str:\n raise TypeError(\"format_in must be a str\")\n if typeout is not dt.date and typeout is not dt.datetime and typeout is not None:\n raise TypeError(\"typeout must be datetime.date, datetime.datetime, or None\")\n\n # Parse the parts of the date (year, month, day, hour, minute, second)\n # Three letter month (Jan, Feb, etc) and two number years need special\n # handling\n\n # First check if the string AM or PM is present in the input date time string,\n # this will indicate that the hour needs to be adjusted\n ampm_match = re.search(\"[aApP][mM]\", str_in)\n if ampm_match is None:\n hr24_bool = True\n else:\n hr24_bool = False\n ampm_str = ampm_match.group(0).lower()\n\n\n # Try finding four number year first (because 'yy' will match 'yyyy' too)\n # If there isn't even a two letter year, set the year to the current one\n yr = __substr_to_int(str_in, format_in, \"yyyy\")\n if yr == 0:\n yr = __substr_to_int(str_in, format_in, \"yy\", def_val=None) # must use None because 00 is a legitimate year\n if yr is None:\n yr = dt.date.today().year\n else:\n curr_yr_tmp = dt.date.today().year\n curr_yr = curr_yr_tmp % 100\n curr_century = curr_yr_tmp - curr_yr\n if yr <= curr_yr:\n yr = yr + curr_century\n else:\n yr = yr + curr_century - 100\n\n # Similar approach for months, except that for three letter months, we\n # need to convert from name to number. Default to Jan if no month given\n mn = __substr_to_month(str_in, format_in) # always searches for \"mmm\"\n if mn is None:\n mn = __substr_to_int(str_in, format_in, \"mm\")\n if mn == 0:\n mn = 1\n\n # Similar again for day, except that it can only ever be specified as \"dd\"\n dy = __substr_to_int(str_in, format_in, \"dd\")\n if dy == 0:\n dy = 1\n\n # Hour, minute, and second are easier because they can be 0...\n hour = __substr_to_int(str_in, format_in, \"HH\")\n # ... but hour needs to handle AM/PM. Afternoon needs 12 hours added\n # (1:00 PM = 1300 hr, 8:00 = 2000 hr, etc) but noon should stay 12 and\n # midnight (12 AM) should become hour = 0\n if not hr24_bool:\n if hour < 1 or hour > 12:\n raise ValueError(\"If using AM/PM format, hour must be between 1 and 12\")\n elif hour != 12 and ampm_str == \"pm\":\n hour += 12\n elif hour == 12 and ampm_str == \"am\":\n hour = 0\n minute = __substr_to_int(str_in, format_in, \"MM\")\n second = __substr_to_int(str_in, format_in, \"SS\")\n\n # If no type out specified, it will be date only if hour, minute, and second\n # are all 0\n if typeout is None:\n if hour == 0 and minute == 0 and second == 0:\n typeout = dt.date\n else:\n typeout = dt.datetime\n\n if typeout is dt.date:\n return dt.date(yr, mn, dy)\n elif typeout is dt.datetime:\n return dt.datetime(yr, mn, dy, hour, minute, second)\n else:\n raise RuntimeError(\"Not implemented: typeout other than datetime.date or datetime.datetime\")", "def from_minutes_and_halves(s):\n s = s.strip()\n half = s.endswith(\"H\")\n s = s.strip(\"H \")\n \n return (int(s) * 60 if s else 0) + (30 if half else 0)", "def replace_timestr(t):\n if isinstance(t, float):\n return None\n if '-' == t:\n return None\n return t", "def format_12hr(minutes):\n hours = (minutes // 60) % 24\n minutes %= 60\n #am_pm = 'AM' if hours < 12 else 'PM'\n #hours = (hours - 1) % 12 + 1\n return '{}:{:02}'.format(hours, minutes)\n #return '{}:{:02} {}'.format(hours, minutes, am_pm)", "def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None):", "def _year_range(m):\n return (m.group(1), m.group(2))", "def parse_year(txt):\n\n txt = txt.strip()\n if \"-\" in txt:\n res = re.sub('[^0-9]', '', txt)\n return [res[0:4], res[4:8]]\n else:\n return [txt, txt]", "def date_to_term(date):\n term = \"H\"\n if date.month < 6:\n term = \"V\"\n elif date.month < 8 or date.month == 8 and date.day < 20:\n term = \"S\"\n return f\"{term}{date.year % 100}\"", "def dechours(self, yr, mn, d, t):\n dt = datetime(int(yr), int(mn), int(d))\n if self.epochhours == 0:\n self.epochhours = self.datehours(dt)\n\n e = self.datehours(dt)\n result=-1.0\n if len(t) > 0:\n (h, m) = t.split(':')\n fh = float(h)\n fm = float(m)\n r = fh + fm/60.0\n result = e + round(r, 2)\n\n # print \"dechours: \" + str(result) + \" yr=\" + str(yr) + \" mn=\" + str(mn) + \" d=\" + str(d) + \" \" + t\n return result", "def replace(\n self,\n hour=None,\n minute=None,\n second=None,\n microsecond=None,\n tzinfo=True,\n *,\n fold=None,\n ):\n if hour is None:\n hour = self.hour\n if minute is None:\n minute = self.minute\n if second is None:\n second = self.second\n if microsecond is None:\n microsecond = self.microsecond\n if tzinfo is True:\n tzinfo = self.tzinfo\n if fold is None:\n fold = self._fold\n return time(hour, minute, second, microsecond, tzinfo, fold=fold)", "def change_time_units(var):\n century18 = dt.datetime(1800,1,1,0)\n #for i,j in enumerate(var[:]):\n # date = dt.datetime.utcfromtimestamp(j)\n # seconds = (date - century18).total_seconds()\n # hours = int( seconds / 60 / 60 )\n # var[i] = hours\n def change_unit(date):\n date = dt.datetime.utcfromtimestamp(date)\n seconds = (date - century18).total_seconds()\n hours = int( seconds / 60 / 60 )\n return hours\n\n vfunc = np.vectorize(change_unit)\n new_data = vfunc(var[:])\n var[:] = new_data\n setattr(var, 'standard_name', \"time\")\n setattr(var, 'long_name', \"time\")\n setattr(var, \"units\",\"hours since 1800-01-01 00:00:00.0\")\n setattr(var, \"calendar\", \"proleptic_gregorian\")\n return var", "def test_interval_to_seconds_with_years(self):\n self.assert_interval_to_seconds(0, \"0y\", \"0year\", \"0years\")\n self.assert_interval_to_seconds(31536000, \"1y\", \"1year\", \"1years\")\n self.assert_interval_to_seconds(5 * 31536000, \"5y\", \"5year\", \"5years\")\n self.assert_interval_to_seconds(\n 123 * 31536000, \"123y\", \"123year\", \"123years\")\n self.assert_interval_to_seconds(\n 2 * 31536000, \"02y\", \"02year\", \"02years\")", "def hour(n=0):\r\n now = datetime.datetime.now()\r\n h = now + datetime.timedelta(hours=n)\r\n return h.strftime('%Y%m%d'), h.strftime('%Y%m%d%H')", "def sectoFracYear(stime):\n\n ltime = convertCtimeToYdate(stime)\n atemp = re.split(':', ltime)\n year= int(atemp[0])\n ydate = int(atemp[1])\n hours = int(atemp[2])\n minutes = int(atemp[3])\n seconds = int(atemp[4])\n \n chk = 4.0 * int(0.25 * year)\n if chk == year:\n base = 366\n else:\n base = 365\n \n day = ydate + hours / 24.0 + minutes / 1440.0 + seconds / 86400.0\n \n return year + day / base", "def getISOTimeElts(dtStr):\n\n match = ISO_DT_RE.match(dtStr)\n if match: year, month, day, hour, minute, second = map(int,match.groups())\n else:\n match = ISO_DT_ONLY_RE.match(dtStr)\n if match:\n year, month, day = map(int,match.groups())\n hour, minute, second = 0,0,0\n else: raise RuntimeError(\"Failed to recognize date format: %s\" % dtStr)\n return year, month, day, hour, minute, second", "def format_time(t):\r\n # again, workaround dateformat input requirement\r\n dt = aware_datetime(2000, 1, 1, t.hour, t.minute, t.second)\r\n return dateformat.format(dt, 'H:i:s O')", "def hmstora(rah,ram,ras):\n\thrs = (float(rah)+(float(ram)/60)+(float(ras)/3600.0)) % 24\n\n\treturn 15*hrs", "def formatSRTTime(self, secTime):\n sec, micro = str(secTime).split('.')\n m, s = divmod(int(sec), 60)\n h, m = divmod(m, 60)\n return \"{:02}:{:02}:{:02},{}\".format(h,m,s,micro)", "def search_year(self,strz):\t\n\t\tyr_pattern = compile(\"(19[56789]\\d|20[01]\\d)\")\n\t\tyr = yr_pattern.search(strz)\t\t\n\t\tif yr is None:\n\t\t\treturn strz\t#not find\n\t\telse:\n\t\t\tyr= yr.group(1)\n\t\t\tself.release_year=yr\n\t\t\treturn strz.replace(yr,\"\")", "def _adjustHour(hour: int, meridiem: str) -> int:\r\n if(meridiem == \"PM\" and hour < 12):\r\n hour += 12\r\n elif(meridiem == \"AM\" and hour == 12):\r\n hour = 0\r\n return hour", "def Data_formatting(dz, y, timestep, TZ):\r\n \r\n #reindex data by datetime\r\n dz.index = pd.to_datetime(dz['DATE'])\r\n \r\n #Isolate temperature data\r\n dz = dz[['TMP']]\r\n \r\n #Delete data mistake\r\n dz = dz[dz['TMP'] != \"+9999,9\"]\r\n \r\n #Format data\r\n dz['TMP'] = dz['TMP'].str.replace(',', '.')\r\n dz['TMP'] = pd.to_numeric(dz['TMP'], errors='coerce')\r\n \r\n #Delete NaN data\r\n dz = dz.dropna()\r\n \r\n #Convert temperature\r\n dz['TMP'] = dz['TMP'] / 10\r\n dz['TMP'] = dz['TMP'] * (9/5) + 32\r\n \r\n #Convert datetime index utc to specified timezone\r\n dz.index = dz.index.tz_localize(pytz.utc).tz_convert(pytz.timezone(str(TZ))).strftime(\"%Y-%m-%d %H:%M:%S\")\r\n dz.index = pd.to_datetime(dz.index)\r\n \r\n #Resample data by average on timestep\r\n dz = dz.resample(rule = str(timestep)).mean()\r\n \r\n #Define the first date of the instance year\r\n fdy = dt.datetime.strptime(\"01/01/\"+str(y)+\" 00:00\", '%m/%d/%Y %H:%M')\r\n #Convert first date of the year to timezone\r\n fdy = Date_calibration(fdy, 0, TZ)\r\n \r\n #If we collect the date from the current year we limit the collect from 2days before now\r\n \r\n #Define the datetime 2 days before now\r\n dbeyest = dt.datetime.now(tz=pytz.timezone(str(TZ))) - dt.timedelta(days=2)\r\n \r\n #If the instance year is the current year\r\n if(y == dbeyest.year):\r\n #We limit the collect 2 days before now\r\n ldy = dt.datetime.strptime(str(dbeyest.month)+\"/\"+str(dbeyest.day)+\"/\"+str(y)+\" 23:59\", '%m/%d/%Y %H:%M')\r\n else:\r\n #Else, we collect the full year\r\n ldy = dt.datetime.strptime(\"12/31/\"+str(y)+\" 23:59\", '%m/%d/%Y %H:%M')\r\n \r\n #Convert the last date of the year to specified timezone\r\n ldy = Date_calibration(ldy, 0, TZ)\r\n \r\n #Set up dataframe for the specified datetime index and timestep\r\n ph = pd.DataFrame(index=pd.DatetimeIndex(start=fdy, end=ldy, freq=str(timestep)))\r\n \r\n #Past original data temperature in the time fitted dataframe (with the datetimeindex position)\r\n ph['TMP'] = dz['TMP']\r\n \r\n #Calculate the quality of the instance data\r\n nb_nan = ph['TMP'].isnull().sum()\r\n qual = (1 - (nb_nan) / len(ph)) * 100\r\n \r\n return dz, qual", "def format_time(self, time):\n hours = time // 3600\n time = time - hours*3600\n minutes = time // 60\n seconds = time - minutes*60\n return ('%d:%d:%d' %(hours, minutes, seconds))", "def _external_time_format(int_time):\n simple_iso_time = True\n if simple_iso_time:\n ext_time = int_time.replace(tzinfo=SimpleUtc()).isoformat()\n else:\n ext_time = int_time.isoformat() + \"Z\"\n return ext_time", "def format_time(time: int) -> str:\n minute = time % 100\n hour = int(time / 100)\n\n if hour >= 12:\n if hour > 12:\n hour = hour -12\n response = f'{hour} {minute} pm'\n else:\n response = f'{hour} {minute} am'\n\n return response", "def preprocess_dates(args):\n if 'date' in args:\n if args.get('period') == 'range' and 'end_date' in args:\n args['date'] = '{},{}'.format(args['date'],\n args['end_date'])\n return args", "def format_date(txt):\n pattern1 = r\"(?P<day>0?[1-9]|[12][0-9]|3[01])([/.-]|\\s|\\s/\\s|\\.\\s)\" \\\n r\"(?P<month>0[1-9]|1[012])([/.-]|\\s?|\\s/\\s|\\.\\s)\" \\\n r\"(?P<y1>19|20)(?P<y2>[0-9][0-9])\"\n txt = re.sub(pattern1, r\"\\g<y1>\\g<y2>\\g<month>\\g<day>\", txt)\n\n pattern_month = \"(?P<month>janvier|janv.|févr.|février|mars|avr.|avril|mai|juin|juill.|juillet|août|sept.|septembre|oct.|octobre|nov.|novembre|déc.|décembre)\"\n pattern2 = r\"(?P<day>0?[1-9]|[12][0-9]|3[01]|(1er)|(1°)) %s (?P<y1>19|20)(?P<y2>[0-9][0-9])\" % pattern_month\n txt = re.sub(pattern2, convert_month, txt)\n\n pattern3 = r\"(?P<day>0?[1-9]|[12][0-9]|3[01])([/.-]|\\s|(\\s/\\s)|(.\\s))\" \\\n r\"(?P<month>0[1-9]|1[012])([/.-]|\\s|(\\s/\\s)|(.\\s))\" \\\n r\"(?P<y2>[0-9][0-9])\"\n txt = re.sub(pattern3, convert_date_year_2_digits, txt)\n return txt", "def test_wk2yr(self):\n result = TimeUnit(-34, 'wk', 'yr')\n self.assertRaises(ValueError, lambda: result.doconvert())", "def convert_to_24_hours(time, ap):\r\n if ap.lower() == 'p' and time <= 12:\r\n time += 12\r\n\r\n return time", "def time_trans(datetime_str):\n\t\tif re.compile(\"(\\d+)-(\\d+)-(\\d+) (\\d+):(\\d+):(\\d+)\").match(datetime_str):\n\t\t\treturn datetime.strptime(datetime_str, \"%Y-%m-%d %H:%M:%S\")", "def format_time(self, data):\r\n if self.datetime_formatting == 'rfc-2822':\r\n return format_time(data)\r\n\r\n return data.isoformat()", "def format_datetimes(self, datetimes, format=\"%B %d %Y %I:%M %p\"):\n date, times, space_character = datetimes.split(\", \")\n start_time, end_time = times.split(\" - \")\n year = datetime.now().strftime(\"%Y\")\n return (\n datetime.strptime(\n date + \" \" + year + \" \" + start_time.replace(\".\", \"\"), format\n ),\n datetime.strptime(\n date + \" \" + year + \" \" + end_time.replace(\".\", \"\"), format\n ),\n )", "def tidy_time_string(time):\n\n # TODO - :return date_range: Where date_status is \"centred\", date_range is a tuple (`first_date`, `last_date`) of\n # `datetime64[D]` objects. Otherwise will return a tuple of Not a Time objects.\n # TODO - warnings/logging\n # TODO - change date offsets to rounding using MonthEnd/MonthBegin\n # https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html\n # TODO - allow mulitple `date_status`es (circa and centred).\n\n date_status = 'not_converted'\n date = pd.NaT\n original_time_string = str(time)\n\n # IS THE STRING ALREADY PARSABLE AS AN EXACT TIME:\n if '-' not in time: # to avoid accidentally parsing ranges as exact times. e.g. \"25-27 june\".\n\n try:\n date = pd.to_datetime(time)\n date_status = 'exact'\n return date, date_status\n except:\n pass\n\n # IS THE STRING \"CIRCA\" SOMETHING:\n if (('c' in time) or (('[' in time) or (']' in time))):\n if 'c' in time: # contains 'c' (not in a month, e.g. Dec), so \" c. \", \"c \", t\n time = re.sub(r'(?<!\\w)(c[.]?\\s?)', '', time)\n\n if ('[' in time) and (']' in time): # contains square brackets\n\n # We don't attempt to fix multiple pairs of brackets with one missing bracket\n num_sq_brackets = time.count(']') + time.count(']')\n if num_sq_brackets >= 3 and (num_sq_brackets % 2) != 0:\n logging.info(\"Cannot fix multiple pairs of brackets with one missing bracket.\")\n return date, date_status\n\n reg2 = re.findall(r'\\[(.*?)\\]', time)\n if reg2 is not None:\n # remove square brackets\n for in_brackets in reg2:\n time = time.replace(f\"[{in_brackets}]\", in_brackets)\n elif '[' in time:\n time = time.replace('[', '')\n elif ']' in time:\n time = time.replace(']', '')\n\n time = time.strip()\n\n try:\n date = pd.to_datetime(time)\n date_status = 'circa'\n return date, date_status\n except:\n pass\n\n # IS THE STRING A RANGE OF DATES? WHICH WE CAN AVERAGE OR CENTRE:\n # We are assuming an '[1,2]\\d{2}0)s' pattern (e.g. 1970s, 1980s, 1730s, 1900s) implies a decade.\n if ('s' in time) or ('-') in time:\n if ('s' in time) and ('-' not in time):\n reg3 = re.findall(r'([1,2]\\d{2}0)s', time)\n for reg in reg3:\n time = time.replace(f\"{reg}s\", str(int(reg) + 5)) # centre is 5 years later\n date = pd.to_datetime(time, format='%Y')\n date_status = 'centred'\n\n elif ('-' in time):\n if time.count('-') > 1:\n print('many hyphens', original_time_string)\n # Not attempting to deal with multiple hyphens at the moment.\n pass\n else:\n time = re.sub(r'\\s?-\\s?', '-', time)\n reg4 = re.match(r'(.*?)-(.*)$', time)\n\n first = time.replace(reg4.group(0), reg4.group(1))\n last = time.replace(reg4.group(0), reg4.group(2))\n\n if 's' in first:\n reg5 = re.findall(r'([1,2]\\d{2}0)s', time)\n for reg in reg5:\n first = first.replace(f\"{reg}s\", reg)\n\n if not re.search(r'[1,2]\\d{3}', first): # no year:\n if not re.search(r'\\d+', first): # no days in `first` => varying month:\n # Take the year from last and add it on\n reg5 = re.findall(r'[1,2]\\d{3}', last)\n first = f\"{first} {reg5[0]}\"\n else: # days in `first` => varying days:\n # Take the month and year from last and add it on.\n reg6 = re.findall(r'\\w+ [1,2]\\d{3}', last)\n if len(reg6) > 0:\n first = f\"{first} {reg6[0]}\"\n\n if 's' in last:\n reg7 = re.findall(r'([1,2]\\d{2}0)s', time)\n for reg in reg7:\n last = last.replace(f\"{reg}s\", str(int(reg) + 10)) # end is 10 years later.\n\n if re.match(r'\\w+\\s\\d+', last): # assuming month and year\n time_delta = pd.tseries.offsets.DateOffset(months=1)\n elif re.match(r'[a-zA-Z]', last): # assuming it's a month\n time_delta = pd.tseries.offsets.DateOffset(months=1)\n elif re.match(r'[1,2]\\d{3}', last): # assuming it's a year\n time_delta = pd.tseries.offsets.DateOffset(months=12)\n elif re.match(r'\\d+', last).span()[1] - re.match(r'\\d+', last).span()[0] <= 2: # assuming it's a day:\n time_delta = pd.tseries.offsets.DateOffset(months=0)\n else:\n logging.info(f\"Can't guess format of {last} from {original_time_string}\")\n return date, date_status\n\n try:\n last = pd.to_datetime(last)\n except:\n logging.info(f\"Could not parse `last` ({last}) into `datetime` format.\")\n\n return date, date_status\n\n last = last + time_delta\n\n try:\n first = pd.to_datetime(first)\n except:\n logging.info(f\"Could not parse `first` ({first}) into `datetime` format.\")\n\n return date, date_status\n\n centre_date = first + (last - first) / 2\n date_status = 'centred'\n return centre_date, date_status\n\n return date, date_status", "def new_years_eve(year):\n return (year, DEC, 31)", "def replace_misc(text): \n # replace different types of \"year old\" with \n # matches: y.o., y/o, years old. year old, yearold\n text = re.sub(r'-?\\byears? ?-?old\\b|\\by(?:o|r)*[ ./-]*o(?:ld)?\\b', ' yo', text, flags=re.IGNORECASE)\n\n # Does the same thing as above but copied from https://arxiv.org/abs/1808.02622v1\n text = re.sub(r'(\\d+)\\s*(year\\s*old|y.\\s*o.|yo|year\\s*old|year-old|-year-old|-year old)', r'\\1 yo', text, flags=re.IGNORECASE)\n \n # replaces yr, yr's, yrs with years\n text = re.sub(r'\\byr[\\'s]*\\b', 'years', text, re.IGNORECASE)\n \n # replace Pt and pt with patient, and IN/OUT/OT PT with patient\n # Note: PT also refers to physical therapy and physical therapist\n text = re.sub(r'\\b[P|p]t.?|\\b(IN|OU?T) PT\\b', 'patient ', text)\n\n # replace sex with consistant token\n text = re.sub(r'\\b(gentlman|male|man|m|M)(?!\\S)\\b', 'male', text)\n text = re.sub(r'\\b(female|woman|f|F)(?!\\S)\\b', 'female', text)\n \n # replace time types\n text = re.sub(r'\\d{0,2}:\\d{0,2} \\b[A|P]\\.?M\\.?\\b', replace_time, text, flags=re.IGNORECASE)\n text = re.sub(r'\\[\\*\\*(\\d{2})\\*\\*\\] \\b[a|p].?m.?\\b', replace_time, text, flags=re.IGNORECASE)\n \n # finally remove leftover redacted stuff (mostly empty)\n text = re.sub(r'\\[\\*\\*(.*?)\\*\\*\\]', '', text, flags=re.IGNORECASE)\n\n return text", "def fix_time_units( timeunits ):\n imon = timeunits.find(\"months since \")\n if imon==0:\n since=\"months since \"\n else:\n iday = timeunits.find(\"days since \")\n if iday==0:\n since=\"days since \"\n else:\n ihour = timeunits.find(\"hours since \")\n if ihour==0:\n since=\"hours since \"\n else:\n return timeunits\n date = timeunits[len(since):]\n date_is_bc = False\n if date.find('B.C.')>0: # I've seen one example like this!\n # B.C. fixup isn't tested!\n date_is_bc = True\n # e.g. \"January 1, 4713 B.C.\" Note exactly one space before B. And not BC etc.\n matchobject = re.search( r\"\\d+\\sB\\.C\\.\" ) # not tested\n if matchobject is None:\n return timeunits\n pre_yr = matchobject.start()\n pre_bc = matchobject.end() - 5 #2 spaces before B.C. would need -6 or another re\n yr_bc = date[pre_yr:pre_bc]\n yr_ad = str(1 - int(yr))\n # The parser won't understand negative years, but cdtime will. So don't\n # fix the date quite yet...\n date = date[0:pre_bc]\n new_date = str( dateutil.parser.parse( date, default=datetime(1850,1,1,0,0)) )\n if date_is_bc:\n pre_yr = new_date.find(yr_bc)\n new_date = new_date[0:pre_yr]+yr_ad+new_date[pre_yr+len(yr_bc)]\n return since+new_date", "def get_year(string): \n return int(string[11:15])", "def printTime(t):\n if t < 2 * MINUTE:\n return \"%d seconds\" % (t / SECOND)\n if t < 5 * HOUR:\n return \"%d minutes\" % (t / MINUTE)\n if t < 3 * DAY:\n return \"%d hours\" % (t / HOUR)\n if t < YEAR:\n return \"%d days\" % (t / DAY)\n if (t % YEAR) == 0:\n return \"%d years\" % (t / YEAR)\n else:\n return \"%5.1f years\" % (t / YEAR)", "def main():\n date_time_conversion('2018-12-30T09:37:56.000001Z', '2020-07-12T07:56:43.000001Z', 0, 0, 0, 0)", "def setHour(self, *args):\n return _libsbml.Date_setHour(self, *args)", "def _change_time_format(time_string):\n datetime_object = parser.isoparse(time_string)\n return datetime_object", "def get_hour(hour):\n if int(hour) == 0:\n return 12\n elif int(hour) > 12:\n return int(hour) - 12\n else:\n return hour", "def date_calculator(years, days, hours, minutes):\n now = datetime.datetime.now()\n\n modified_dt = datetime.datetime(now.year + years, now.month, now.day,\n now.hour, now.minute)\n delta = datetime.timedelta(days=days, hours=hours, minutes=minutes)\n modified_dt += delta\n\n print(format_date(modified_dt))", "def unit_yr(self):\n return ((self.time_base * 60.0) * 24.0) * 365.0", "def timeConversion(s):\n\n if s[-2] == 'P' and int(s[:2]) < 12:\n hour = int(s[:2]) + 12\n s = str(hour) + s[2:-2]\n\n elif s[:2] == \"12\" and s[-2] == 'A':\n s = \"00\" + s[2:-2]\n\n else:\n s = s[:-2]\n\n return s", "def preprocess_date_and_time(params: Dict) -> None:\n start_date = date.fromisoformat(params[\"start_date\"])\n end_date = date.fromisoformat(params[\"end_date\"])\n\n if end_date < start_date:\n raise Exception(f\"End date is earlier than start date.\")\n \n start_time = time.fromisoformat(params[\"start_time\"])\n end_time = time.fromisoformat(params[\"end_time\"])\n\n if end_time != time.min and end_time <= start_time:\n raise Exception(\"End time is earlier or equal than start time\")\n \n actual_start = time(start_time.hour + 1 if start_time.minute + start_time.second + start_time.microsecond > 0 \n else start_time.hour)\n actual_end = time(end_time.hour)\n\n if actual_end == time.min and end_time != time.min:\n raise Exception(\"Non available blocks to use\")\n \n params.update({\n \"start_date\": start_date,\n \"end_date\": end_date,\n \"start_time\": actual_start,\n \"end_time\": actual_end\n })", "def _convert_runtime(runtime):\n regex = re.compile(\n r\"[-]?((?P<hours>\\d+?)\\s?hr)?\\s?[-]?((?P<minutes>\\d+?)\\s?min)?\"\n )\n parts = regex.match(runtime)\n if not parts:\n return runtime\n parts = parts.groupdict()\n time_params = {}\n for (name, param) in parts.items():\n if param:\n time_params[name] = int(param)\n return int(datetime.timedelta(**time_params).total_seconds() / 60)" ]
[ "0.61535865", "0.54928744", "0.54547983", "0.5405917", "0.5310719", "0.5267757", "0.52620614", "0.5259947", "0.51755303", "0.5144724", "0.51272285", "0.5085674", "0.50707275", "0.50623596", "0.5054484", "0.505168", "0.5038544", "0.50103486", "0.500148", "0.498647", "0.49656528", "0.49499217", "0.49470493", "0.49417716", "0.49321747", "0.49294227", "0.49093542", "0.48932773", "0.489069", "0.48901463", "0.48853242", "0.48801956", "0.4849207", "0.4846597", "0.48309457", "0.48308727", "0.4829211", "0.4826466", "0.48145527", "0.4802445", "0.47926947", "0.4779194", "0.4777553", "0.47753137", "0.47722077", "0.47683135", "0.47675383", "0.4767449", "0.47639978", "0.47620878", "0.47586504", "0.4754825", "0.4754617", "0.47520712", "0.47320333", "0.47300577", "0.47286543", "0.47217894", "0.4710068", "0.47087997", "0.47071648", "0.4702615", "0.4699544", "0.46989092", "0.46977505", "0.46970788", "0.46962014", "0.46923858", "0.46891153", "0.4682292", "0.46787816", "0.46760845", "0.46732652", "0.46715766", "0.46643862", "0.46627587", "0.46560702", "0.46534538", "0.4652484", "0.46445775", "0.46396926", "0.46349198", "0.46291083", "0.46267927", "0.46237725", "0.4623501", "0.46157637", "0.4611956", "0.46070153", "0.46057242", "0.4603877", "0.46018347", "0.45967042", "0.45938268", "0.4593203", "0.4591698", "0.4590212", "0.45897245", "0.45694426", "0.45564935" ]
0.7474295
0
Loads a vocabulary file into a dictionary.
def load_vocab(vocab_file): vocab = collections.OrderedDict() index = 0 with open(vocab_file, "r", encoding="utf-8") as reader: while True: token = reader.readline() if not token: break token = token.strip() vocab[token] = index index += 1 return vocab
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_vocabulary():\n global vocabulary_list, vocabulary_dict\n vocabulary_list = []\n vocabulary_dict = {}\n\n with open(_VOCABULARY_PATH, 'r') as f:\n for index, line in enumerate(f):\n line = line.strip()\n vocabulary_dict[line] = index\n vocabulary_list.append(line)", "def load_vocab(fn):\n return corpora.Dictionary.load(fn)", "def load_vocab(filename):\n try:\n d = dict()\n with open(filename, encoding='utf-8') as f:\n for idx, word in enumerate(f):\n word = word.strip()\n d[word] = idx\n\n except IOError:\n raise MyIOError(filename)\n return d", "def load_vocab(vocab_file):\r\n vocab = {}\r\n index = 0\r\n with open(vocab_file, \"r\", encoding=\"utf-8\") as reader:\r\n while True:\r\n token = reader.readline()\r\n if not token:\r\n break\r\n token = token.strip()\r\n vocab[token] = index\r\n index += 1\r\n return vocab", "def loadVoc(vocabFName):\n f = open(vocabFName, 'r')\n lines = f.readlines()\n f.close()\n result = dict()\n for idx, val in enumerate(lines):\n word = val.strip('\\n')\n result[word] = idx\n return result", "def load_vocab(vocab_file):\n index = 0\n itos = {}\n stoi = {}\n with open(vocab_file, \"r\") as reader:\n while True:\n token = reader.readline()\n if not token:\n break\n token = token.strip()\n itos[index] = token\n stoi[token] = index\n index += 1\n itos[index] = 'style_options'\n stoi['style_options'] = index\n itos[index+1] = 'ambience'\n stoi['ambience'] = index + 1\n return {'itos': itos, 'stoi': stoi, 'len': len(itos)}", "def load_vocab(vocab_file):\n vocab = collections.OrderedDict()\n index = 0\n with open(vocab_file, 'r', encoding='utf-8') as reader:\n while True:\n token = reader.readline()\n if not token:\n break\n token = token.strip()\n vocab[token] = index\n index += 1\n return vocab", "def load_vocab(vocab_file):\n vocab = collections.OrderedDict()\n index = 0\n with tf.gfile.GFile(vocab_file, \"r\") as reader:\n while True:\n token = convert_to_unicode(reader.readline())\n if not token:\n break\n token = token.strip()\n vocab[token] = index\n index += 1\n return vocab", "def load_vocab(vocab_file):\n vocab = collections.OrderedDict()\n with open(vocab_file, \"r\", encoding=\"utf-8\") as reader:\n tokens = reader.readlines()\n for index, token in enumerate(tokens):\n token = token.rstrip('\\n')\n vocab[token] = index\n return vocab", "def load_vocab(vocab_file):\n vocab = collections.OrderedDict()\n index = 0\n path = keras.utils.get_file(\"bert_vocab.txt\", vocab_file)\n with tf.io.gfile.GFile(path, \"r\") as reader:\n while True:\n token = convert_to_unicode(reader.readline())\n if not token:\n break\n token = token.strip()\n vocab[token] = index\n index += 1\n return vocab", "def load_vocabulary(self):\n vocab_file = open(vocabulary_path, \"r\")\n self.vocab_list = vocab_file.read().split(\"\\n\")\n vocab_file.close()\n print(\"[INFO] Reading vocabulary...\")\n print(self.vocab_list[0:15])", "def initialize_vocabulary(vocabulary_path):\n if os.path.exists(vocabulary_path):\n rev_vocab = []\n with codecs_open(vocabulary_path, \"rb\", encoding=\"utf-8\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [line.strip() for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def load_vocab(vocab_file, encoding='utf8'):\n vocab = OrderedDict()\n index = 0\n with open(vocab_file, encoding=encoding) as reader:\n while True:\n token = convert_to_unicode(reader.readline())\n if not token:\n break\n token = token.strip()\n vocab[token] = index\n index += 1\n return vocab", "def hload_vocab(vocab_path):\n vocab = collections.OrderedDict()\n index = 0\n with open(vocab_path, \"r\", encoding=\"utf-8\") as reader:\n while True:\n token = reader.readline()\n if not token:\n break\n token = token.strip()\n vocab[token] = index\n index += 1\n return vocab", "def load_tags_vocab(load_path: str) -> dict:\n tags_vocab = dict()\n with open(load_path, 'r') as f:\n lines = f.readlines()\n f.close()\n\n for line in lines:\n key, val = line[:-1].split(' ') # \"\\t\"\n tags_vocab[key] = val\n\n return tags_vocab", "def load(self, path='dict.pic'):\n with open(path, 'rb') as f:\n self.__dictionary = pickle.load(f)\n self.__vocab_size = len(self.__dictionary)\n\n if self.__verbose:\n print('Loading Tokenizer, vocab size:', self.vocab_size())", "def initialize_vocabulary(vocabulary_path):\n if gfile.Exists(vocabulary_path):\n rev_vocab = []\n with gfile.GFile(vocabulary_path, mode=\"rb\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [line.strip() for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def load_vocab(self, fn):\n vocab = load_vocab(fn)\n self.vocab = vocab\n self.has_vocab = True", "def initialize_vocabulary(vocabulary_path):\n if gfile.Exists(vocabulary_path):\n rev_vocab = []\n with gfile.GFile(vocabulary_path, mode=\"r\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [line.strip() for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def _load_vocab(vocab_file_name, language):\n vocab = []\n vocab_size = 0\n #with codecs.getreader(\"utf-8\")(tf.gfile.GFile(vocab_file_name), \"rb\") as f:\n with tf.gfile.GFile(vocab_file_name) as f:\n for word in f:\n vocab.append(word.strip())\n vocab_size += 1\n\n if not EOS in vocab:\n vocab = [EOS] + vocab\n if not SOS in vocab:\n vocab = [SOS] + vocab\n if not UNK in vocab:\n vocab = [UNK] + vocab\n\n reverse_dictionary = {}\n new_vocab_file_name = vocab_file_name + \".new\"\n with tf.gfile.GFile(new_vocab_file_name, \"wb\") as f:\n reverse_dictionary = {}\n i = 0\n for word in vocab:\n f.write(\"%s\\n\" % word)\n reverse_dictionary.update({i : word})\n i+=1\n\n vocab_table = tf.contrib.lookup.index_table_from_file(new_vocab_file_name, default_value = 0)\n\n eos_id_tensor = tf.cast(vocab_table.lookup(tf.constant(EOS)), tf.int32)\n sos_id_tensor = tf.cast(vocab_table.lookup(tf.constant(SOS)), tf.int32)\n\n return Vocab(lang=language,\n table=vocab_table,\n size=vocab_size,\n reverse_dict=reverse_dictionary,\n sos_id_tensor=sos_id_tensor,\n eos_id_tensor=eos_id_tensor)", "def initialize_vocabulary(vocabulary_path):\n if gfile.Exists(vocabulary_path):\n rev_vocab = []\n with gfile.GFile(vocabulary_path, mode=\"rb\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [line.strip() for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def initialize_vocabulary(vocabulary_path):\n if gfile.Exists(vocabulary_path):\n rev_vocab = []\n with gfile.GFile(vocabulary_path, mode=\"rb\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [line.strip() for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def load_vocab(self):\n keys = []\n values = []\n with open(self.embed_file, 'r') as f:\n lines = f.readlines()\n\n for line in lines:\n key = line.split(\" \")[0]\n value = line.split(\" \")[1:]\n keys.append(key)\n values.append(value)\n # form <dict>\n # vocab = dict(zip(keys, values))\n return keys, values", "def initialize_vocabulary(vocabulary_path):\n if gfile.Exists(vocabulary_path):\n rev_vocab = []\n with gfile.GFile(vocabulary_path, mode=\"rb\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [tf.compat.as_bytes(line.strip()) for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def vocab_from_pickle(path: str) -> Dict:\n with open(path, \"rb\") as inp:\n vocab = pickle.load(inp)\n logger.info('Vocabulary (%d words) loaded from \"%s\"', len(vocab), path)\n return vocab", "def initialize_vocabulary(vocabulary_path):\n if gfile.Exists(vocabulary_path):\n rev_vocab = []\n with gfile.GFile(vocabulary_path, mode=\"r\") as f:\n rev_vocab.extend(f.readlines())\n rev_vocab = [line.strip() for line in rev_vocab]\n vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])\n return vocab, rev_vocab\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def read_dictionary():\n with open(FILE, 'r') as f:\n for vocabulary in f:\n if vocabulary[0].strip() not in dict_txt:\n dict_txt[vocabulary[0].strip()] = [vocabulary.strip()]\n else:\n dict_txt[vocabulary[0].strip()].append(vocabulary.strip())", "def _load_vocab(self, path):\n self._token2id_feq = self._init_dict()\n N = len(self._token2id_feq)\n\n if path.endswith(\".json\"):\n\n with open(path, encoding='utf-8') as f:\n _dict = json.load(f)\n # Word to word index and word frequence.\n for ww, vv in _dict.items():\n if isinstance(vv, int):\n self._token2id_feq[ww] = (vv + N, 0)\n else:\n self._token2id_feq[ww] = (vv[0] + N, vv[1])\n else:\n with open(path) as f:\n for i, line in enumerate(f):\n ww = line.strip().split()[0]\n self._token2id_feq[ww] = (i + N, 0)", "def load_vocab(path, encoding=\"UTF-9\"):\n vocab = []\n\n if not os.path.exists(path):\n return vocab\n\n with open(path, encoding=encoding) as fin:\n for line in fin.readlines():\n line = line.strip()\n word, freq = line.split(\"\\t\")\n vocab.append((word,int(freq)))\n\n return vocab", "def load_embedding_file(self):\n if self.language == 'en':\n embed_file_dir = self.embedding_path\n wv = KeyedVectors.load_word2vec_format(embed_file_dir, binary=True)\n self.pretrained_embedding = {}\n for word in wv.vocab.keys():\n normalized_word = normalization.process(self.language.upper(), word, letters_to_keep='', letters_to_remove='',\n lowercase=True, remove_repetitions_count=-1, remove_punct=True,\n remove_digits=True, remove_vowels=False, remove_diacritics=True,\n remove_spaces=False, remove_apostrophe=True, copy_through=False,\n keep_romanized_text=False)\n self.pretrained_embedding[normalized_word] = wv[word]\n self.embed_dim = 300\n\n else:\n embed_file_dir = self.embedding_path\n fin = open(embed_file_dir, 'r', encoding='utf-8', newline='\\n', errors='ignore')\n data = {}\n for line in fin:\n if len(line.split()) == 2: # header\n continue\n tokens = line.rstrip().split(' ')\n word = tokens[0]\n normalized_word = normalization.process(self.language.upper(), word, letters_to_keep='', letters_to_remove='',\n lowercase=True, remove_repetitions_count=-1, remove_punct=True,\n remove_digits=True, remove_vowels=False, remove_diacritics=True,\n remove_spaces=False, remove_apostrophe=True, copy_through=False,\n keep_romanized_text=False)\n data[normalized_word] = np.array(tokens[1:])\n self.pretrained_embedding = data\n self.embed_dim = 300", "def load_vocab(vocab):\r\n\tvocab = [line.split()[0] for line in open(\r\n\t\t'{}{}'.format(pm.vocab_path, vocab), 'r', encoding='utf-8').read().splitlines()\r\n\t\t\t if int(line.split()[1]) >= pm.word_limit_size]\r\n\tword2idx_dic = {word: idx for idx, word in enumerate(vocab)}\r\n\tidx2word_dic = {idx: word for idx, word in enumerate(vocab)}\r\n\treturn word2idx_dic, idx2word_dic", "def load_vocab(self):\n\n if self.vocabulary_path: \n # For now, the file format is derived from the file extension.\n if self.vocabulary_path.endswith('csv'):\n self.logger.info(\"Filter spymaster vocabulary by csv-file: {}\".format(self.vocabulary_path))\n with open(self.vocabulary_path, 'r') as fin:\n reader = csv.reader(fin)\n header = next(reader)\n for row in reader:\n word = row[1].lower()\n self.update_vocab(word) \n elif self.vocabulary_path.endswith('txt'):\n self.logger.info(\"Filter spymaster vocabulary by txt-file: {}\".format(self.vocabulary_path))\n with open(self.vocabulary_path, 'r') as fin:\n for line in fin:\n word = line.strip()\n self.update_vocab(word)\n else:\n raise ValueError(\"Unknown file format for filter spymaster vocabulary.\") \n else:\n self.logger.info(\"Load spymaster vocabulary from gensim.models.KeyedVectors.\")\n self.vocab = self.model.vocab\n self.vocab_size = len(self.vocab)\n\n self.logger.info(\"Spymaster vocabulary size is {}\".format(self.vocab_size))", "def load_vector_dictionary():\n return read_word2vecs_from_file(VECTOR_FILE)", "def load(filename):\n\n print \"Loading dictionary...\"\n dictionary = Dictionary()\n print \" Loading file...\"\n whole_file = file(filename).read().upper()\n print \" Splitting file...\"\n words = whole_file.split()\n print \" Removing unsuitable words...\"\n words = dictionary.remove_unsuitable_words(words)\n print \" Building data structures...\"\n dictionary.set_words(words)\n\n print \" Loaded %d words\" % len(dictionary.words)\n print \" Unique letter size:\"\n print \" No blanks: %d\" % len(dictionary.letters_map)\n print \" One blank: %d\" % len(dictionary.letters_map_one_blank)\n print \" Two blanks: %d\" % len(dictionary.letters_map_two_blanks)\n\n return dictionary", "def load_vocab(path: str) -> Vocab:\n return torch.load(path, map_location=lambda storage, loc: storage)['args'].vocab", "def load(cls, filepath) -> 'Word2VecEmbedding':\n with open(filepath, 'rb') as f:\n embedding = pickle.load(f)\n embedding.word2idx = {spell: idx for idx, spell in enumerate(embedding.vocab.idx2word)}\n return embedding", "def load_target_vocab(self):\n vocab = [line.split()[0] for line in open(os.path.join('preprocessed', 'all_vocab.txt'), 'r').read().splitlines()]\n self.word2idx = {word: idx for idx, word in enumerate(vocab)}\n self.idx2word = {idx: word for idx, word in enumerate(vocab)}\n self.vocab_size = len(self.word2idx)", "def load_embeddings(filepath, vocabulary, retain):\n \n word2index = dict()\n word_vectors = list()\n\n def add_entry(word, vector):\n word2index[word] = len(word2index)\n word_vectors.append(vector)\n\n model = gensim.models.KeyedVectors.load(filepath)\n\n # adding special tokens <FIL>, <UNK> and <NUM>\n dim = model.vector_size\n add_entry('<fil>', np.zeros((dim,)))\n for special in ['<unk>', '<num>']:\n vector = np.random.uniform(-0.025, 0.025, (dim,))\n add_entry(special, vector)\n\n if retain:\n for word, _ in model.vocab.items():\n add_entry(word, model[word])\n else:\n for word in vocabulary:\n if word in model:\n add_entry(word, model[word])\n\n vocabulary = vocabulary.intersection(word2index.keys())\n return word2index, np.asarray(word_vectors)", "def initialize_vocabulary(self,vocabulary_path):\n if tf.gfile.Exists(vocabulary_path):\n vocab = corpora.Dictionary.load(vocabulary_path)\n print(\"vocab length: \",len(vocab.token2id))\n\n return vocab.token2id, vocab.token2id.keys()\n else:\n raise ValueError(\"Vocabulary file %s not found.\", vocabulary_path)", "def load_dict(path_to_vec):\n emb = {}\n with open(path_to_vec, 'r', errors='ignore', encoding='utf8') as f:\n for line in f:\n values = line.split()\n word = values[0]\n vector = np.asarray(values[1:], \"float32\")\n emb[word] = vector\n return emb", "def load_vocab(vocab_files, preserve_token=None):\n if preserve_token is None:\n preserve_token = []\n vocab = collections.OrderedDict()\n index = 0\n if preserve_token is not None:\n for token in preserve_token:\n vocab[token] = index\n index += 1\n vocab_files = vocab_files.split(\",\")\n for vocab_file in vocab_files:\n with tf.gfile.GFile(vocab_file, \"r\") as reader:\n while True:\n token = utils.convert_to_unicode(reader.readline())\n if not token:\n break\n token = token.strip()\n if token not in vocab:\n vocab[token] = index\n index += 1\n return vocab", "def get_vocab(self, filename):\n return read_file(filename) #TODO(tilo): the-FAQ!", "def load_words_from_file(path, voc_path=None):\n label_to_idx = {}\n dict_size = 0\n label_ids = []\n with open(path, \"r\") as fin:\n for label in fin:\n if label not in label_to_idx:\n label_to_idx[label] = dict_size\n dict_size += 1\n label_ids.append(label_to_idx[label])\n if voc_path:\n with open(voc_path, \"w+\") as fout:\n json.dump(label_to_idx, fout)\n return torch.tensor(label_ids)", "def vocab_from_json(path: str) -> Dict:\n with open(path, encoding=VOCAB_ENCODING) as inp:\n vocab = json.load(inp)\n logger.info('Vocabulary (%d words) loaded from \"%s\"', len(vocab), path)\n return vocab", "def __init__(self, file):\n with open(file, 'r') as f:\n self.vocab = json.loads(f.read())", "def _load_dict(self, dict_name=None):\n if dict_name is None:\n for name in self.dict_names:\n self._load_dict(name)\n else:\n dict_idx = self.dict_names.index(dict_name)\n if not os.path.exists(self.dict_files[dict_idx]):\n self.logger.warn(\"Not exists %s for %s\" % (\n self.dict_files[dict_idx], dict_name))\n else:\n dict_map = self.dicts[dict_idx]\n id_to_vocab_dict_map = self.id_to_vocab_dict_list[dict_idx]\n if dict_name != self.DOC_LABEL:\n dict_map[self.VOCAB_PADDING] = 0\n dict_map[self.VOCAB_UNKNOWN] = 1\n dict_map[self.VOCAB_PADDING_LEARNABLE] = 2\n id_to_vocab_dict_map[0] = self.VOCAB_PADDING\n id_to_vocab_dict_map[1] = self.VOCAB_UNKNOWN\n id_to_vocab_dict_map[2] = self.VOCAB_PADDING_LEARNABLE\n\n for line in open(self.dict_files[dict_idx], \"r\"):\n vocab = line.strip(\"\\n\").split(\"\\t\")\n dict_idx = len(dict_map)\n dict_map[vocab[0]] = dict_idx\n id_to_vocab_dict_map[dict_idx] = vocab[0]", "def load_embedding(fpath, VOCAB):\n print(\"Loading embeddings...\")\n emb = dict()\n wv_from_bin = KeyedVectors.load_word2vec_format(fpath, limit=VOCAB)\n for word, vector in tqdm(zip(wv_from_bin.vocab, wv_from_bin.vectors)):\n coefs = np.asarray(vector, dtype='float32')\n if word not in emb:\n emb[word] = coefs\n return emb", "def load_terms_dict():\n \tfnm = \"../datasets/bbc/bbc.terms\"\n \tterm_dict = {}\n \twith open(fnm, \"r\") as f:\n \t\tfor wordid, line in enumerate(f.readlines()):\n \t\t\tword = line.strip()\n \t\t\tterm_dict[wordid] = word\n \treturn term_dict", "def create_vocabulary(sentences, path):\n print('creating vocab..')\n\n word_dict = dict(); vocabulary = dict()\n for sentence in sentences:\n for word in nltk.word_tokenize(sentence):\n if word not in word_dict:\n word_dict[word] = ''\n word_dict['<s>'] = ''\n word_dict['</s>'] = ''\n\n with open(path, encoding=\"utf8\") as f:\n for line in f:\n word, vec = line.split(' ', 1)\n if word in word_dict:\n vocabulary[word] = np.fromstring(vec, sep=' ')\n\n print('vocabulary was created successfully!')\n return vocabulary", "def load_preprocessed(self):\n with open(self.words_vocab_file, 'rb') as f:\n self.word_to_id, self.unk_word_list = pickle.load(f)\n self.word_vocab_size = len(self.word_to_id)\n\n if self.unit != \"word\":\n with open(self.sub_vocab_file, 'rb') as f:\n if self.unit == \"char\":\n self.max_word_len = self.get_max_word_length(self.word_to_id) + 2\n self.char_to_id, self.unk_char_list, self.max_word_len = pickle.load(f)\n self.subword_vocab_size = len(self.char_to_id)\n elif self.unit == \"char-ngram\":\n self.ngram_to_id, self.unk_char_list, self.unk_ngram_list, \\\n self.max_ngram_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.ngram_to_id)\n elif self.unit == \"morpheme\":\n self.morpheme_to_id, self.unk_char_list, self.unk_morph_list, \\\n self.max_morph_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.morpheme_to_id)\n elif self.unit == \"oracle\":\n self.morpheme_to_id, self.max_morph_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.morpheme_to_id)\n else:\n sys.exit(\"Unknown unit\")", "def load_glove_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n for i,line in enumerate(f):\n L = line.split()\n word = L[0].lower()\n if word in vocab:\n word_vecs[word] = np.array(L[1:], dtype='float32')\n return word_vecs", "def load_vectors(path: str, vocabulary: set) -> (Optional[str], dict):\n print(f\"Started loading vectors from {path} @ {datetime.now()}\")\n print(f\"No. of words in vocabulary: {len(vocabulary)}\")\n words = dict()\n try:\n with open(file=path, mode=\"r\", encoding=\"utf-8\") as source_file:\n # Get the first line. Check if there's only 2 space-separated strings (hints a dimension)\n dimensions = str(next(source_file))\n if len(dimensions.split(\" \")) == 2:\n # We have a dimensions line. Keep it in the variable, continue with the next lines\n pass\n else:\n # We do not have a dimensions line\n line = dimensions.split(' ', 1)\n key = line[0]\n if key in vocabulary:\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n dimensions = None\n for line in source_file:\n line = line.split(' ', 1)\n key = line[0]\n if key in vocabulary:\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n except:\n print(\"Unable to read word vectors, aborting.\")\n return None\n print(f\"Finished loading a total of {len(words)} vectors @ {datetime.now()}\")\n return dimensions, normalise(words)", "def load(cls, path: str) -> 'Vocab':\n with open(path, 'r', encoding='utf-8') as f:\n return cls.from_json(f.read())", "def load_words():\n with open(DICTIONARY) as f:\n return [line.strip() for line in f]", "def load_embeddings(embeddings_path):\n\n embeddings_index = {}\n f = open(embeddings_path, encoding='utf-8')\n for line in tqdm(f):\n values = line.rstrip().split(' ')\n word = values[0]\n coefs = np.asarray(values[1:], dtype='float32')\n embeddings_index[word] = coefs\n f.close()\n print('Found {} word vectors.'.format(len(embeddings_index)))\n return embeddings_index", "def load_dict(dict_path):\n result_dict = {}\n for idx, line in enumerate(io.open(dict_path, \"r\", encoding='utf8')):\n terms = line.strip(\"\\n\")\n result_dict[idx] = terms\n return result_dict", "def load_pretrained_words_data(embeddings_filename, vocab):\n words = dict()\n emb_dim = None\n with gzip.open(cached_path(embeddings_filename), 'rb') as embeddings_file:\n for line in embeddings_file:\n fields = line.decode('utf-8').strip().split(' ')\n if len(fields) == 0:\n continue\n word = fields[0]\n if emb_dim is None:\n emb_dim = len(fields) - 1\n if emb_dim < 10: # my pretrained file is poisonous 😭\n emb_dim = None\n else:\n assert emb_dim == len(fields) - 1, \"{}, {}\".format(emb_dim, len(fields) - 1)\n words.update({word: [float(i) for i in fields[1:]]})\n print(\"Embedding dim: {}\".format(emb_dim))\n tokens = vocab.get_index_to_token_vocabulary(\"tokens\")\n n_tokens = len(tokens)\n data = []\n for i in tokens:\n if tokens[i] in words:\n data.append(words[tokens[i]])\n else:\n data.append([0] * emb_dim)\n return torch.tensor(data), emb_dim", "def get_vocab(self):\n if os.path.exists(self.vocab_file) & self.vocab_from_file:\n f = open(self.vocab_file, \"rb\")\n vocab = pickle.load(f)\n self.word2idx = vocab.word2idx\n self.idx2word = vocab.idx2word\n f.close()\n else:\n self.build_vocab()\n with open(self.vocab_file, 'wb') as f:\n pickle.dump(self, f)", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word).lower()\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs", "def load_vectors_novocab(path: str) -> (Optional[str], dict):\n print(f\"Started loading vectors from {path} @ {datetime.now()}\")\n words = dict()\n try:\n with open(file=path, mode=\"r\", encoding=\"utf-8\") as source_file:\n # Get the first line. Check if there's only 2 space-separated strings (hints a dimension)\n dimensions = str(next(source_file))\n if len(dimensions.split(\" \")) == 2:\n # We have a dimensions line. Keep it in the variable, continue with the next lines\n pass\n else:\n # We do not have a dimensions line\n line = dimensions.split(' ', 1)\n key = line[0]\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n dimensions = None\n for line in source_file:\n line = line.split(' ', 1)\n key = line[0]\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n except OSError:\n print(\"Unable to read word vectors, aborting.\")\n return {}\n print(f\"Finished loading a total of {len(words)} vectors @ {datetime.now()}\")\n return dimensions, normalise(words)", "def read_dictionary():\n\tglobal dictionary\n\twith open(FILE, \"r\") as f:\n\t\tfor words in f:\n\t\t\tdictionary += words.split()", "def load_vectors(fname):\r\n # taken from: https://fasttext.cc/docs/en/english-vectors.html\r\n vectors_data = vocab.Vectors(name=fname)\r\n\r\n return vectors_data", "def load_word2vect(self, file_path):\n self.embeddings = []\n self.word_to_idx = {'<pad>' : 0}\n self.vocab = ['<pad>']\n\n model = w2v.load(file_path)\n self.embedding_size = model.vectors.shape[1]\n pad_embedding = np.zeros(self.embedding_size, \"float32\")\n self.embeddings.append(pad_embedding)\n\n train_words_set = set([word for text in self.train_data for word in\n text[1].split(\" \")])\n\n for w in model.vocab:\n if w in train_words_set:\n self.word_to_idx[w] = len(self.vocab)\n self.vocab.append(w)\n self.embeddings.append(model[w])\n\n del model", "def initialize_vocabulary(vocabulary_path):\n characters_class = 9999\n\n if os.path.exists(vocabulary_path):\n with codecs.open(vocabulary_path, 'r', encoding='utf-8') as voc_file:\n rev_vocab = [line.strip() for line in voc_file]\n\n vocab = {x: y for (y, x) in enumerate(rev_vocab)}\n\n reserved_char_size = characters_class - len(rev_vocab)\n if reserved_char_size < 0:\n raise ValueError(\"Number of characters in vocabulary is equal or larger than config.characters_class\")\n\n for _ in range(reserved_char_size):\n rev_vocab.append('')\n\n # put space at the last position\n vocab[' '] = len(rev_vocab)\n rev_vocab.append(' ')\n return vocab, rev_vocab\n\n raise ValueError(\"Initializing vocabulary ends: %s\" % vocabulary_path)", "def _parse_tsv_vocab_file(self, vocab_file: str):\n with open(vocab_file, \"r\", encoding=\"utf-8\") as f:\n for (index, line) in enumerate(f):\n title, count = line.rstrip().split(\"\\t\")\n entity = Entity(title, None)\n self.vocab[entity] = index\n self.counter[entity] = int(count)\n self.inv_vocab[index] = [entity]", "def _load_vocabulary(self) -> Dict[str, int]:\n\n df_existing_vocab = self._db_connection.get_dataframe(table_name='tfidf_vocabulary', schema='encoded_articles')\n\n df_existing_vocab.set_index('word', inplace=True)\n\n return df_existing_vocab['feature_matrix_index'].to_dict()", "def init_embeddings_from_file(self, filepath, mode=None, **kwargs):\n words = self.d.vocab\n weight, words = EmbeddingLoader(filepath, mode).load(words, **kwargs)\n self.init_embeddings(weight, words)", "def loadGLOVE(filename, vocab):\n dct = {}\n vectors = array.array('d')\n current_idx = 0\n with codecs.open(filename, \"r\", encoding=\"utf-8\") as f:\n for _, line in enumerate(f):\n tokens = line.split(\" \")\n word = tokens[0]\n entries = tokens[1:]\n if not vocab or word in vocab:\n dct[word] = current_idx\n vectors.extend(float(x) for x in entries)\n current_idx += 1\n word_dim = len(entries)\n num_vectors = len(dct)\n tf.logging.info(\"Found {} out of {} vectors in Glove\".format(num_vectors, len(vocab)))\n return [np.array(vectors).reshape(num_vectors, word_dim), dct]", "def load_vec(fname, vocab, binary = True):\n print(\" Loading word2vec...\")\n #w2v_cache = \"cache\\\\w2v\"\n #if os.path.isfile(w2v_cache):\n # return cPickle.load(open(w2v_cache,\"rb\"))\n\n mode = (\"rb\" if binary else \"r\")\n word_vecs = {}\n with open(fname, mode) as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = numpy.dtype('float32').itemsize * layer1_size\n\n def getline():\n if binary:\n return numpy.fromstring(f.read(binary_len), dtype='float32')\n else:\n return numpy.array(f.readline().split(), dtype='float32')\n\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = getline()\n else:\n getline()\n print(\" Loaded word2vec...\")\n# cPickle.dump(word_vecs, open(w2v_cache, \"wb\"))\n return word_vecs", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs", "def load_bin_vec(self, fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n print header\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32')\n # logger.info(word_vecs[word])\n else:\n f.read(binary_len)\n # logger.info(\"num words already in word2vec: \" + str(len(word_vecs)))\n return word_vecs", "def load_wordlist(filename):\n # YOUR CODE HERE\n words = {}\n f = open(filename, 'rU')\n text = f.read()\n text = text.split('\\n')\n for line in text:\n words[line] = 1\n f.close()\n return words", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs", "def load_dictionary(filepath):\r\n # context manager read binary\r\n with open(filepath, 'rb') as file:\r\n # pickle load\r\n return pickle.load(file)", "def load_model(self, file=FILENAME, dim=DIMENSION, normalize=False):\n print(\"Loading pretrained Glove vectors from file {}\".format(FILENAME))\n self.dimension = dim\n self.normalize = normalize\n with open(file, \"r\", encoding=\"utf-8\") as textfile:\n self.num_tokens = count_lines(textfile)\n self.tokens_arr = [\"\" for i in range(self.num_tokens)]\n self.embeddings_mat = np.zeros((self.num_tokens, self.dimension))\n\n for idx, line in enumerate(textfile):\n line = line.split()\n token = ''.join(line[:-self.dimension])\n self.tokens_arr[idx] = token\n self.token_to_idx[token] = idx \n vec = list(map(float, line[-self.dimension:]))\n if self.normalize: \n # normalize the vectors as they are put into the matrix\n vec = vec / np.linalg.norm(vec)\n self.embeddings_mat[idx] = vec \n if (idx+1) % 200000 == 0:\n print(\" --{}% loaded.\".format(round(idx/self.num_tokens*100, 2)))\n print(\"Finished loading Glove model. {} vectors loaded\".format(self.num_tokens))", "def load(self):\n \n with open(os.path.join(self.output_dir, 'terms.dict'), 'rb') as f:\n self.term_id_map = pkl.load(f)\n with open(os.path.join(self.output_dir, 'docs.dict'), 'rb') as f:\n self.doc_id_map = pkl.load(f)", "def load_glove_vectors(filename, vocab):\n dct = {}\n vectors = array.array('d')\n current_idx = 0\n with open(filename, \"r\", encoding=\"utf-8\") as f:\n for _, line in enumerate(f):\n tokens = line.split(\" \")\n word = tokens[0]\n entries = tokens[1:]\n if not vocab or word in vocab:\n dct[word] = current_idx\n vectors.extend(float(x) for x in entries)\n current_idx += 1\n word_dim = len(entries)\n num_vectors = len(dct)\n return [np.array(vectors).reshape(num_vectors, word_dim), dct]", "def _load_glove_vec(fname, vocab):\n print 'load glove...'\n word_vecs = {}\n cnt = 0\n l = open(fname,'r').readline()\n embedding_size = len(l.strip().split()) -1\n print 'embedding vector size: %d'%(embedding_size)\n with open(fname, \"r\") as f:\n for l in f:\n stemp = l.strip().split(' ',1)\n assert len(stemp) == 2\n word = stemp[0]\n if word in vocab:\n word_vecs[stemp[0]] = np.fromstring(' '.join(stemp[1:]),sep = ' ')\n cnt+=1\n if cnt%10000==0:\n print '%d lines...'%cnt\n return (word_vecs,embedding_size)", "def load_dictionary(filename):\n\n word_list = []\n freq_sum = 0\n\n # nacitanie zo suboru\n with open(filename) as f:\n for line in f:\n freq, val = line.split()\n word_list.append(Word(int(freq), val))\n freq_sum += int(freq)\n\n # lexikograficke usporiadanie slov\n word_list_sorted = sorted(word_list, key=operator.attrgetter('value'))\n\n return word_list_sorted, freq_sum", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n # print(vocab_size)\n for line in range(vocab_size):\n # print(line)\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n # print(word)\n if word in vocab:\n # print(word)\n word_vecs[word] = np.frombuffer(f.read(binary_len), dtype='float32')\n else:\n f.read(binary_len)\n\n return word_vecs", "def load_vocab():\n # vocab loaded internally at google\n unused = r.sp_model\n del unused\n return r", "def load_embeddings(path):\r\n\r\n embeds = dict() # dictionary mapping words to vectors\r\n for line in open(path, encoding='utf-8'):\r\n row = line.strip().split('\\t')\r\n embeds[row[0]] = np.array(row[1:], dtype=np.float32)\r\n\r\n embeddings_dim = embeds[list(embeds)[0]].shape[0]\r\n\r\n return embeds, embeddings_dim", "def construct_dict(self):\n i = 0\n self.word2idx = dict()\n fi = open(self.config.word_vec_fi_glove, 'r')\n\n for line in fi:\n self.word2idx[line.split(\" \")[0]] = i\n i += 1\n\n self.vocab_size = i\n self.write_dict()\n fi.close()", "def load_sequences_from_file(path, voc_path):\n table = str.maketrans('', '', string.punctuation)\n \n with open(voc_path) as f:\n word_to_idx = dict(json.load(f))\n dict_size = len(word_to_idx)\n corpus = []\n with open(path, \"r\") as fin:\n for line in fin:\n line = line.lower()\n sentence = []\n for word in line.split():\n word = word.translate(table) # remove punctuation\n sentence.append(word_to_idx[word])\n corpus.append(sentence)\n\n for sentence in corpus:\n for i in range(max_len-len(sentence)):\n sentence.append(PAD)\n\n return torch.tensor(corpus)", "def load_embedding(fname, vocab):\n model = gensim.models.Word2Vec.load(fname)\n embedding = model.wv # keep only the embedding dictionary\n del model # frees up memory used to store Word2Vec model\n\n k = len(embedding['a']) # dimension of embedding\n unknown_vec = lambda: np.random.normal(0,0.17,k) #TODO check these parameters\n \n restricted_embedding = {word: default_get(embedding, word, unknown_vec()) for word in vocab}\n return restricted_embedding", "def load_reverse_dict(dict_path):\n result_dict = {}\n # TODO 字和词模型\n for idx, line in enumerate(io.open(dict_path, \"r\", encoding='utf8')):\n terms = line.strip(\"\\n\")\n result_dict[terms] = idx\n return result_dict", "def read_vocabulary(vocabulary_id):\n query = (\n select([vocabulary_table])\n .where(vocabulary_table.c.id == vocabulary_id))\n res = query.execute().first()\n if res is not None:\n return dict(res)", "def __init__(self, vocab_file, max_size):\n\t\tself._word_to_id = {}\n\t\tself._id_to_word = {}\n\t\tself._count = 0 # keeps track of total number of words in the Vocab\n\n\t\t# [UNK], [PAD], [START] and [STOP] get the ids 0,1,2,3.\n\t\tfor w in [UNKNOWN_TOKEN, PAD_TOKEN, START_DECODING, STOP_DECODING]:\n\t\t\tself._word_to_id[w] = self._count\n\t\t\tself._id_to_word[self._count] = w\n\t\t\tself._count += 1\n\n\t\t# Read the vocab file and add words up to max_size\n\t\twith open(vocab_file, 'r') as vocab_f:\n\t\t\tfor line in vocab_f:\n\t\t\t\tpieces = line.split()\n\t\t\t\tif len(pieces) != 2:\n\t\t\t\t\tprint ('Warning: incorrectly formatted line in vocabulary file: %s\\n' % line)\n\t\t\t\t\tcontinue\n\t\t\t\tw = pieces[0]\n\t\t\t\tif w in [SENTENCE_START, SENTENCE_END, UNKNOWN_TOKEN, PAD_TOKEN, START_DECODING, STOP_DECODING]:\n\t\t\t\t\traise Exception(\n\t\t\t\t\t\t'<s>, </s>, [UNK], [PAD], [START] and [STOP] shouldn\\'t be in the vocab file, but %s is' % w)\n\t\t\t\tif w in self._word_to_id:\n\t\t\t\t\traise Exception('Duplicated word in vocabulary file: %s' % w)\n\t\t\t\tself._word_to_id[w] = self._count\n\t\t\t\tself._id_to_word[self._count] = w\n\t\t\t\tself._count += 1\n\t\t\t\tif max_size != 0 and self._count >= max_size:\n\t\t\t\t\tprint (\"max_size of vocab was specified as %i; we now have %i words. Stopping reading.\" % (\n\t\t\t\t\tmax_size, self._count))\n\t\t\t\t\tbreak\n\n\t\tprint (\"Finished constructing vocabulary of %i total words. Last word added: %s\" % (\n\t\tself._count, self._id_to_word[self._count - 1]))", "def update_from_vocabulary(self, vocab_path):\n with open(vocab_path, 'r') as vocab_file:\n for word in vocab_file:\n word = word.strip()\n self._add_new_word(word)", "def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n cPickle.dump(word_vecs, open(\"./data/word_vecs.pkl\", \"w\"))\n return word_vecs", "def load_embeddings(filename):\n count = 0\n matrix = []\n word_map = {}\n with open(filename, encoding=\"utf8\") as f:\n # with open(filename) as f:\n for line in f:\n line = line.strip()\n items = line.split()\n word = items[0]\n rest = items[1:]\n # print(\"word:\", word)\n word_map[word] = count\n count += 1\n\n rest = list(map(float, rest))\n matrix.append(rest)\n matrix = np.array(matrix)\n return word_map, matrix", "def load_data_for_dict(path, voc_path, need_rem_punkt=True):\n \n table = str.maketrans('', '', string.punctuation)\n\n with open(voc_path) as f:\n word_to_idx = dict(json.load(f))\n dict_size = len(word_to_idx)\n \n with open(path, \"r\") as fin:\n for line in fin:\n line = line.lower()\n sentence = []\n for word in line.split():\n if need_rem_punkt: \n word = word.translate(table) # remove punctuation\n if word not in word_to_idx:\n word_to_idx[word] = dict_size\n dict_size += 1\n\n with open(voc_path, \"w+\") as fout:\n json.dump(word_to_idx, fout)", "def get_vocabulary(text_fname, vocab_fname):\n with codecs.open(text_fname,'r','utf-8') as infile, \\\n codecs.open(vocab_fname,'w','utf-8') as outfile: \n\n count_map={}\n for line in infile:\n sent=line.strip().split(' ')\n for w in sent:\n count_map[w]=count_map.get(w,0.0)+1.0\n\n for w,c in count_map.iteritems(): \n outfile.write(u'{}|{}\\n'.format(w,c))", "def load_dictionary(cls, filename, non_lang_syms=None):\n return AsrDictionary.load(filename, f_non_lang_syms=non_lang_syms)", "def load_embeddings(embedding_path):\n print('loading word embeddings from %s' % embedding_path)\n weight_vectors = []\n word_idx = {}\n with codecs.open(embedding_path, encoding='utf-8') as f:\n for line in f:\n word, vec = line.split(u' ', 1)\n word_idx[word] = len(weight_vectors)\n weight_vectors.append(np.array(vec.split(), dtype=np.float32))\n # Annoying implementation detail; '(' and ')' are replaced by '-LRB-' and\n # '-RRB-' respectively in the parse-trees.\n word_idx[u'-LRB-'] = word_idx.pop(u'(')\n word_idx[u'-RRB-'] = word_idx.pop(u')')\n # Random embedding vector for unknown words.\n weight_vectors.append(np.random.uniform(\n -0.05, 0.05, weight_vectors[0].shape).astype(np.float32))\n return np.stack(weight_vectors), word_idx", "def load_train_word_dict():\n train_dict = {}\n with open(TRANSCRIPTION_PATH) as file:\n for line in file:\n if int(line[0:3]) < 300:\n word_id, transcript = str.split(line, \" \")\n train_dict[word_id] = transcript.rstrip('\\n')\n return train_dict", "def load_word2vec(emb_path, id_to_word, word_dim, old_weights):\n new_weights = old_weights\n print('Loading pretrained embeddings from {}...'.format(emb_path))\n pre_trained = {}\n emb_invalid = 0\n for i, line in enumerate(codecs.open(emb_path, 'r', 'utf-8')):\n line = line.rstrip().split()\n if len(line) == word_dim + 1:\n pre_trained[line[0]] = np.array(\n [float(x) for x in line[1:]]\n ).astype(np.float32)\n else:\n emb_invalid += 1\n if emb_invalid > 0:\n print('WARNING: %i invalid lines' % emb_invalid)\n c_found = 0\n c_lower = 0\n c_zeros = 0\n n_words = len(id_to_word)\n # Lookup table initialization\n for i in range(n_words):\n word = id_to_word[i]\n if word in pre_trained:\n new_weights[i] = pre_trained[word]\n c_found += 1\n elif word.lower() in pre_trained:\n new_weights[i] = pre_trained[word.lower()]\n c_lower += 1\n elif re.sub('\\d', '0', word.lower()) in pre_trained:\n new_weights[i] = pre_trained[\n re.sub('\\d', '0', word.lower())\n ]\n c_zeros += 1\n print('Loaded %i pretrained embeddings.' % len(pre_trained))\n print('%i / %i (%.4f%%) words have been initialized with '\n 'pretrained embeddings.' % (\n c_found + c_lower + c_zeros, n_words,\n 100. * (c_found + c_lower + c_zeros) / n_words)\n )\n print('%i found directly, %i after lowercasing, '\n '%i after lowercasing + zero.' % (\n c_found, c_lower, c_zeros\n ))\n return new_weights" ]
[ "0.840865", "0.8027993", "0.80113626", "0.79590535", "0.77948564", "0.77901465", "0.77684295", "0.7714422", "0.76952124", "0.758633", "0.7529703", "0.7526938", "0.7503534", "0.74811476", "0.7436023", "0.7410485", "0.7398859", "0.73721033", "0.73700345", "0.73358095", "0.7330817", "0.7330817", "0.7323118", "0.7306603", "0.7305617", "0.7301545", "0.7263794", "0.7159349", "0.7095656", "0.70768726", "0.70573807", "0.70392656", "0.69846004", "0.69648796", "0.6961817", "0.6918543", "0.68952274", "0.6885908", "0.68847847", "0.68846065", "0.6805469", "0.6805046", "0.6784391", "0.67706734", "0.6766519", "0.67652756", "0.6759732", "0.6750333", "0.6721617", "0.6714102", "0.6670206", "0.6667548", "0.66569775", "0.66450113", "0.6632688", "0.6623776", "0.66142476", "0.66003203", "0.6582038", "0.65693045", "0.6558861", "0.65333015", "0.65293527", "0.65217626", "0.65148354", "0.65134543", "0.6512986", "0.65086466", "0.6478958", "0.6471379", "0.6471379", "0.6466827", "0.6466494", "0.6465524", "0.64638203", "0.6440785", "0.64331526", "0.6432885", "0.64248323", "0.6424581", "0.6422639", "0.640883", "0.6379853", "0.63687015", "0.6368411", "0.6361957", "0.6357631", "0.6356034", "0.63506055", "0.633432", "0.6334238", "0.6332405", "0.63233626", "0.6322225", "0.63010454", "0.6284033", "0.62748307", "0.62693554", "0.62673736" ]
0.7847063
5
Performs an HTTP request set in 'method'. Returns requests object The method will try to catch some of the typical errors and gather error messages from Newrelic API Each known error has a corresponding exception. All exceptions are inherited from generic NewRelicException If HTTP return code is not known a generic NewRelicException is raised.
def _request(self, method, *args, **kwargs): try: r = getattr(requests, method)(*args, **kwargs) except AttributeError: raise NewRelicException( 'Method {} is unsupported by requests module' .format(method) ) except requests.exceptions.Timeout: raise Timeout('Request timed out after {} seconds' .format(self.timeout)) if r.status_code < 200 or r.status_code > 299: # Try to work out all known errors into separate exceptions if r.status_code == 401: try: error_message = r.json()['error']['title'] except (KeyError, ValueError): raise UnathorizedError( 'User is not authorized to perform requested operation' ) else: raise UnathorizedError(error_message) if r.status_code == 402: raise ChecksLimitExceeded( "Creating the monitor will increase your scheduled checks " "past your account's purchased check limit." ) elif r.status_code == 404: try: error_message = r.json()['error']['title'] except (KeyError, ValueError): raise ItemNotFoundError( 'Requested item not found. ' 'No error message was provided by server.' ) else: raise ItemNotFoundError(error_message) else: # If we don't know what to do with specific error code # ( most likely it's 400 ) # We at least try to get error message from the response try: response_errors = r.json()['errors'] raise NewRelicException( "The following errors were returned by server:\n{}" .format('\n' .join( [x['error'] for x in response_errors] )) ) # Sometimes API does not return any useful information. # In this case that's just an HTML page # reporting 400 instead of JSON. # We will just return an error code in this case. except ValueError: raise NewRelicException( 'Got unexpected response code {}. ' 'No additional information provided by server.' .format(r.status_code) ) return r
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_request(method, url, data=None, headers=None):\n try:\n if method == 'GET':\n resp = requests.get(url, headers=headers)\n return resp\n elif method == 'POST':\n resp = requests.post(url, json=data, headers=headers)\n return resp\n elif method == 'PATCH':\n resp = requests.patch(url, json=data, headers=headers)\n return resp\n except Exception, e:\n print \"Retry {} with {}, {}\".format(str(e), url, data)\n raise e", "def make_http_request(url, method='get', **kwargs):\n try:\n r = getattr(requests, method)(url, data=kwargs, verify=False)\n except AttributeError:\n r = requests.get(url, data=kwargs, verify=False)\n if 200 < r.status_code < 300:\n raise HTTPError(u'Expected HTTP response code \"2xx\" but received \"{}\"'.format(r.status_code))\n return r.content", "def _make_request(self):\n try:\n self.response = requests.request(\n method=self.method,\n url=self.url,\n params=self.params,\n data=self.data,\n )\n\n logger.debug(f\"Request URL: {self.response.url}\")\n\n self.response.raise_for_status()\n\n # wrap all `requests` library error and serve as custom application error\n except RequestException as e:\n logger.error(e.__str__(), exc_info=True)\n raise ExternalAPIError(\n \"Error while communication with External API\"\n )", "def request( # pylint: disable=arguments-differ\n self, method: str, url: str, **kwargs\n ) -> object:\n if self.base_url is not None and not url.startswith('https'):\n url = f'{self.base_url}{url}'\n\n # this kwargs value is used to signal 429 handling that this is a retry, but the super\n # method doesn't expect it so it needs to be removed.\n tc_is_retry = kwargs.pop('tc_is_retry', False)\n\n response: Response = super().request(method, url, **kwargs)\n\n if response.status_code == 429 and not tc_is_retry:\n too_many_requests_handler = self.too_many_requests_handler\n time.sleep(too_many_requests_handler(response))\n kwargs['tc_is_retry'] = True\n return self.request(method, url, **kwargs)\n\n # APP-79 - adding logging of request as curl commands\n if not response.ok or self.log_curl:\n try:\n self.log.debug(\n self.requests_to_curl.convert(\n response.request,\n mask_body=self.mask_body,\n mask_headers=self.mask_headers,\n mask_patterns=self.mask_patterns,\n proxies=self.proxies,\n verify=self.verify,\n )\n )\n except Exception: # nosec\n pass # logging curl command is best effort\n\n self.log.debug(\n f'feature=external-session, request-url={response.request.url}, '\n f'status_code={response.status_code}, elapsed={response.elapsed}'\n )\n\n return response", "def request(self, method, url, **kwargs):\n kwargs.setdefault(\"headers\", kwargs.get(\"headers\", {}))\n kwargs['headers'].setdefault('Accept', 'application/json')\n kwargs[\"headers\"][\"User-Agent\"] = self.user_agent\n if self.original_ip:\n kwargs[\"headers\"][\"Forwarded\"] = \"for=%s;by=%s\" % (\n self.original_ip, self.user_agent)\n if self.timeout is not None:\n kwargs.setdefault(\"timeout\", self.timeout)\n kwargs.setdefault(\"verify\", self.verify)\n if self.cert is not None:\n kwargs.setdefault(\"cert\", self.cert)\n self.serialize(kwargs)\n\n self._http_log_req(method, url, kwargs)\n if self.timings:\n start_time = time.time()\n resp = self.http.request(method, url, **kwargs)\n if self.timings:\n self.times.append((\"%s %s\" % (method, url),\n start_time, time.time()))\n self._http_log_resp(resp)\n\n if resp.status_code >= 400:\n LOG.debug(\n \"Request returned failure status: %s\",\n resp.status_code)\n raise exceptions.from_response(resp, method, url)\n\n return resp", "def make(self, method, extras=None):\n query = self.url_for_request(method, extras)\n logging.info(query)\n\n req = urllib2.Request(query)\n if self.shouldGzip:\n req.add_header('Accept-encoding', 'gzip')\n req.add_header('User-agent', 'Last.fm Explorer')\n\n result = { 'success' : False }\n\n max_retries = 2 \n attempt = 0\n\n while not result['success'] and attempt < max_retries:\n attempt += 1\n try:\n r = urllib2.urlopen(req, timeout=60).read()\n result['data'] = self.__unzip(r) if self.shouldGzip else r\n result['success'] = True\n if self.saveResponses:\n self.__save_response(method, extras, result['data'])\n\n except urllib2.HTTPError, e:\n logging.error(\"Requestor errored accessing \" + query + \" - \" + str(e.code))\n result['error'] = { 'code' : e.code, 'message' : e.msg }\n\n except urllib2.URLError, e:\n logging.error(\"Requestor failed to fetch \" + query + ' - URLError.')\n result['error'] = { 'message' : e.reason }\n\n except BadStatusLine:\n logging.error(\"Requestor caught BadStatusLine, attempt %d\" % (attempt,))\n result['error'] = { 'message' : \"Request gave BadStatusLine\" }\n\n except IOError, e:\n logging.error(\"Requestor caught IOError, attempt %d\" % (attempt,))\n result['error'] = { 'message' : \"Request gave IOError: \" + str(e) }\n\n except Exception as instance:\n logging.error(\"Requestor caught unknown exception for request \" + query + \" - \" + str(type(instance)))\n logging.error(traceback.format_exc())\n result['error'] = { 'messasge' : \"Unknown problem\" }\n\n return result", "def make_request(method, url, headers=None, data=None, retries=3):\n no_retry_status = [404, 401, 403]\n may_retry_status = [408, 500, 502, 503]\n\n if not retries:\n return requests.request(method=method,\n url=url,\n headers=headers,\n data=data)\n\n while retries:\n r = requests.request(method=method,\n url=url,\n headers=headers,\n data=data)\n if r.status_code in no_retry_status:\n return r\n\n elif r.status_code in may_retry_status:\n retries -= 1\n time.sleep(1)\n\n if retries == 0:\n return r\n continue\n\n else:\n return r", "def _issue_http_request(method, path, headers):\n http_client = _obtain_http_client()\n try:\n http_client.request(method, path, headers=headers)\n return http_client.getresponse()\n except (socket.error, http.client.HTTPException):\n LOGGER.exception('Error occurred while issuing http request.')\n raise errors.MetadataServerHttpError", "def _request(self, method, uri, **kwargs):\n url = self.url + uri\n self.logger.debug(\"Requesting {} on {}\".format(method, url))\n response = requests.request(method, url, verify=self.verify, **kwargs)\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError:\n self.logger.error(\"Bad http code {} requesting Clair\".format(response.status_code))\n if response.reason == \"Not Found\":\n raise ResourceNotFoundException(\"Resource not found\")\n raise ClairConnectionError(response)\n return response", "def _request(self, method, url, retries=None, **kwargs):\n if retries is None:\n retries = self.retries\n\n try:\n LOG.debug(\"Attempting: %s %s\", method, kwargs)\n if 'SSL_CA' in os.environ:\n return method(url, verify=os.environ['SSL_CA'], **kwargs)\n else:\n return method(url, **kwargs)\n except (requests.exceptions.SSLError, OpenSSL.SSL.Error):\n if 'SSL_CA' in os.environ:\n LOG.info(\"SSL verification failed, trying default certs.\")\n return method(url, **kwargs)\n else:\n LOG.error(\"SSL verification failed.\")\n raise\n except Exception:\n if retries > 0:\n self._request(method, url, retries=retries-1, **kwargs)\n else:\n raise", "def _make_request(self, url: str, parameters: dict = None,\n method: str = 'GET', *args, **kwargs):\n response = requests.request(\n method=method,\n url=build_url(\n self.BASE_API_URL, url, parameters\n ),\n headers={\n 'Authorization': 'Bearer {}'.format(self._access_token)\n }, **kwargs\n )\n if response.ok:\n return response.json()\n raise MondoApiException(response.json()['message'])", "def request(self, *args, **kwargs):\n try:\n return self._http.request(*args, timeout=TIMEOUT, **kwargs)\n except Exception as exc:\n raise RequestException(exc, args, kwargs)", "def _make_request(self, method: str, params: Dict) -> Dict:\n\n # Define a new session.\n request_session = requests.Session()\n request_session.verify = True\n\n # Define a new request.\n request_request = requests.Request(\n method=method.upper(),\n url=self.bea_url,\n params=params\n ).prepare()\n\n # Send the request.\n response: requests.Response = request_session.send(\n request=request_request\n )\n\n # Close the Session\n request_session.close()\n\n print(response.url)\n\n # If the response is OK then return it.\n if response.ok and self._format == 'JSON':\n return response.json()\n elif response.ok and self._format == 'XML':\n return response.text\n else:\n raise requests.ConnectionError()", "def http_request(method, url, params=None):\n if method.lower() not in _request_methods:\n raise NotImplementedError(\"HTTP request method not implemented\")\n\n\n return _request_methods[method.lower()](url, params)", "def _request(self, method, url, params=None, data=None, request_type=PRIVATE, headers={}):\n self._is_valid_request_option(request_type=request_type)\n\n request_headers = copy.deepcopy(self.BASE_HEADERS)\n request_headers.update(headers)\n\n response = getattr(requests, method.lower())(\n url,\n headers=request_headers,\n params=params,\n data=data\n )\n\n return self._handle_response(response)", "def request(self, *args, **kwargs):\n\n ratelimit_retries, temporary_error_retries, ident_retries = 0, 0, {}\n\n while True:\n try:\n try:\n return self._request(*args, **kwargs)\n except Exception as exc:\n self.error_processor(exc)\n raise\n\n except Retry as exc:\n ident_retries.setdefault(exc.retry_ident, 0)\n ident_retries[exc.retry_ident] += 1\n if ident_retries[exc.retry_ident] <= exc.retry_count:\n self.logger.warning('Retry(%s) after calls(%s/%s) since(%s) on: %s',\n ident_retries[exc.retry_ident], self.calls_count,\n self.calls_elapsed_seconds, self.first_call_time,\n exc.retry_ident)\n if exc.wait_seconds:\n self.sleep(exc.wait_seconds,\n log_reason='retry request: {}'.format(exc.retry_ident))\n else:\n raise self.RetryExceeded(\n exc.result, retry_ident=exc.retry_ident, retry_count=exc.retry_count)\n\n except RatelimitError as exc:\n ratelimit_retries += 1\n if ratelimit_retries <= self.ratelimit_retries:\n self.logger.warning('Retry(%s) after calls(%s/%s) since(%s) on error: %r',\n ratelimit_retries, self.calls_count,\n self.calls_elapsed_seconds, self.first_call_time, exc)\n self.sleep(exc.wait_seconds is not None and exc.wait_seconds\n or self.ratelimit_wait_seconds,\n log_reason='ratelimit wait')\n else:\n if ratelimit_retries - 1:\n raise self.RetryExceeded(exc, retry_count=ratelimit_retries - 1)\n raise\n\n except TemporaryError as exc:\n temporary_error_retries += 1\n if temporary_error_retries <= self.temporary_error_retries:\n self.logger.debug('Retry(%s) after calls(%s/%s) since(%s) on error: %r',\n temporary_error_retries, self.calls_count,\n self.calls_elapsed_seconds, self.first_call_time, exc)\n self.sleep(exc.wait_seconds is not None and exc.wait_seconds\n or self.temporary_error_wait_seconds,\n log_reason='temporary error wait')\n else:\n if temporary_error_retries - 1:\n raise self.RetryExceeded(exc, retry_count=temporary_error_retries - 1)\n raise", "def get_request(self):\n url = self.get_url()\n r = requests.get(url)\n r.raise_for_status()\n return r", "def _request(self, url, params, base_url=None, first_request_time=None, verbose=False, requests_kwargs=None):\n\n if not first_request_time:\n first_request_time = datetime.now()\n\n if base_url is None:\n base_url = self.base_url\n\n elapsed = datetime.now() - first_request_time\n # TODO: to catch timeouts\n # if elapsed > self.retry_timeout:\n # raise TimeOutException()\n\n # create url :: self._generate_query_url(url, params)\n query_url = url\n\n # url encoding of params\n # TODO: use urlencoding here on params\n\n requests_kwargs = requests_kwargs or {}\n final_requests_kwargs = dict(self.requests_kwargs, **requests_kwargs)\n\n # method\n requests_method = self.session.get\n\n try:\n response = requests_method(\n base_url + query_url,\n params=params,\n **final_requests_kwargs)\n\n # temporary, for logging\n if verbose:\n pretty_print_POST(response.request)\n\n except requests.exceptions.Timeout:\n raise TimeOutException()\n except Exception as e:\n raise TransportError(e)\n\n result = self._get_body(response)\n\n return result", "def do_request(\n self,\n version: str,\n action: str,\n protocol: str,\n method: str,\n pathname: str,\n request: dict,\n headers: Dict[str, str],\n runtime: util_models.RuntimeOptions,\n ) -> dict:\n runtime.validate()\n _runtime = {\n 'timeouted': 'retry',\n 'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),\n 'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),\n 'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),\n 'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),\n 'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),\n 'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),\n 'maxIdleTimeMillis': self._max_idle_time_millis,\n 'keepAliveDuration': self._keep_alive_duration_millis,\n 'maxRequests': self._max_requests,\n 'maxRequestsPerHost': self._max_requests_per_host,\n 'retry': {\n 'retryable': runtime.autoretry,\n 'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)\n },\n 'backoff': {\n 'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),\n 'period': UtilClient.default_number(runtime.backoff_period, 1)\n },\n 'ignoreSSL': runtime.ignore_ssl,\n # 资源定位信息\n }\n _last_request = None\n _last_exception = None\n _now = time.time()\n _retry_times = 0\n while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):\n if _retry_times > 0:\n _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)\n if _backoff_time > 0:\n TeaCore.sleep(_backoff_time)\n _retry_times = _retry_times + 1\n try:\n _request = TeaRequest()\n _request.protocol = UtilClient.default_string(self._protocol, protocol)\n _request.method = method\n _request.pathname = pathname\n _request.query = {\n 'method': action,\n 'version': version,\n 'sign_type': 'HmacSHA1',\n 'req_time': AntchainUtils.get_timestamp(),\n 'req_msg_id': AntchainUtils.get_nonce(),\n 'access_key': self._access_key_id,\n 'base_sdk_version': 'TeaSDK-2.0',\n 'sdk_version': '1.8.95',\n '_prod_code': 'BOT',\n '_prod_channel': 'undefined'\n }\n if not UtilClient.empty(self._security_token):\n _request.query['security_token'] = self._security_token\n _request.headers = TeaCore.merge({\n 'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),\n 'user-agent': UtilClient.get_user_agent(self._user_agent)\n }, headers)\n tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))\n _request.body = UtilClient.to_form_string(tmp)\n _request.headers['content-type'] = 'application/x-www-form-urlencoded'\n signed_param = TeaCore.merge(_request.query,\n RPCUtilClient.query(request))\n _request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)\n _last_request = _request\n _response = TeaCore.do_action(_request, _runtime)\n raw = UtilClient.read_as_string(_response.body)\n obj = UtilClient.parse_json(raw)\n res = UtilClient.assert_as_map(obj)\n resp = UtilClient.assert_as_map(res.get('response'))\n if AntchainUtils.has_error(raw, self._access_key_secret):\n raise TeaException({\n 'message': resp.get('result_msg'),\n 'data': resp,\n 'code': resp.get('result_code')\n })\n return resp\n except Exception as e:\n if TeaCore.is_retryable(e):\n _last_exception = e\n continue\n raise e\n raise UnretryableException(_last_request, _last_exception)", "def requests_get(*args, **kwargs):\n\n logger = kwargs.pop('logger', None)\n s = requests.Session()\n s.headers[\n 'User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.131 Safari/537.36'\n\n try:\n return s.get(*args, **kwargs)\n except RequestException as exc:\n if logger:\n logger.warning('Request failed (%s). Retrying ...', exc)\n return s.get(*args, **kwargs)", "def http_request(self, method: str, url_suffix: str, params: dict = None, json_data: dict = None, **kwargs):\n response = self._http_request(method, url_suffix, params=params, json_data=json_data, resp_type=\"response\",\n ok_codes=[200, *list(HTTP_ERRORS.keys())], raise_on_status=False, **kwargs)\n if response.status_code == 400 and response.json() and response.json().get('Message'):\n raise DemistoException(\n HTTP_ERRORS[response.status_code].format(\"Message:\" + response.json().get(\"Message\")))\n elif response.status_code in list(HTTP_ERRORS.keys()):\n raise DemistoException(HTTP_ERRORS[response.status_code])\n return response.json()", "def make_request(self,\n method, # type: str\n url, # type: str\n retry=False, # type: Union[bool, int]\n cookies=None, # type: Optional[AnyCookiesContainer]\n headers=None, # type: Optional[AnyHeadersContainer]\n **kwargs, # type: Any\n ): # type: (...) -> AnyResponseType\n retries = int(retry) if retry is not None else 0\n cookies = CaseInsensitiveDict(cookies or {})\n headers = CaseInsensitiveDict(headers or {})\n cookies.update(self.get_auth_cookies())\n headers.update(self.headers.copy())\n headers.update(self.get_auth_headers())\n response = request_extra(method, url=url, settings=self.settings, retries=retries,\n headers=headers, cookies=cookies, **kwargs)\n return response", "def request(self, uri, method=\"GET\", body=None, headers=None,\n max_redirects=None, connection_type=None):\n if max_redirects is None:\n max_redirects = self.max_redirects\n if headers is None:\n headers = {}\n # Prepare headers\n headers.pop('cookie', None)\n req = DummyRequest(uri, headers)\n self.cookiejar.lock.acquire()\n try:\n self.cookiejar.add_cookie_header(req)\n finally:\n self.cookiejar.lock.release()\n headers = req.headers\n\n # Wikimedia squids: add connection: keep-alive to request headers\n # unless overridden\n headers['connection'] = headers.pop('connection', 'keep-alive')\n\n # determine connection pool key and fetch connection\n (scheme, authority, request_uri,\n defrag_uri) = httplib2.urlnorm(httplib2.iri2uri(uri))\n conn_key = scheme + \":\" + authority\n\n connection = self.connection_pool.pop_connection(conn_key)\n if connection is not None:\n self.connections[conn_key] = connection\n\n # Redirect hack: we want to regulate redirects\n follow_redirects = self.follow_redirects\n self.follow_redirects = False\n pywikibot.debug(u\"%r\" % (\n (uri.replace(\"%7C\", \"|\"), method, body,\n headers, max_redirects,\n connection_type),\n ), _logger)\n try:\n (response, content) = httplib2.Http.request(\n self, uri, method, body, headers,\n max_redirects, connection_type\n )\n except Exception as e: # what types?\n # return exception instance to be retrieved by the calling thread\n return e\n self.follow_redirects = follow_redirects\n\n # return connection to pool\n self.connection_pool.push_connection(conn_key,\n self.connections[conn_key])\n del self.connections[conn_key]\n\n # First write cookies\n self.cookiejar.lock.acquire()\n try:\n self.cookiejar.extract_cookies(DummyResponse(response), req)\n finally:\n self.cookiejar.lock.release()\n\n # Check for possible redirects\n redirectable_response = ((response.status == 303) or\n (response.status in [300, 301, 302, 307] and\n method in [\"GET\", \"HEAD\"]))\n if (self.follow_redirects and (max_redirects > 0) and\n redirectable_response):\n (response, content) = self._follow_redirect(\n uri, method, body, headers, response, content, max_redirects)\n\n return response, content", "def do_request(\n self,\n version: str,\n action: str,\n protocol: str,\n method: str,\n pathname: str,\n request: dict,\n headers: Dict[str, str],\n runtime: util_models.RuntimeOptions,\n ) -> dict:\n runtime.validate()\n _runtime = {\n 'timeouted': 'retry',\n 'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),\n 'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),\n 'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),\n 'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),\n 'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),\n 'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),\n 'maxIdleTimeMillis': self._max_idle_time_millis,\n 'keepAliveDuration': self._keep_alive_duration_millis,\n 'maxRequests': self._max_requests,\n 'maxRequestsPerHost': self._max_requests_per_host,\n 'retry': {\n 'retryable': runtime.autoretry,\n 'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)\n },\n 'backoff': {\n 'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),\n 'period': UtilClient.default_number(runtime.backoff_period, 1)\n },\n 'ignoreSSL': runtime.ignore_ssl,\n # 创建凭证One\n }\n _last_request = None\n _last_exception = None\n _now = time.time()\n _retry_times = 0\n while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):\n if _retry_times > 0:\n _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)\n if _backoff_time > 0:\n TeaCore.sleep(_backoff_time)\n _retry_times = _retry_times + 1\n try:\n _request = TeaRequest()\n _request.protocol = UtilClient.default_string(self._protocol, protocol)\n _request.method = method\n _request.pathname = pathname\n _request.query = {\n 'method': action,\n 'version': version,\n 'sign_type': 'HmacSHA1',\n 'req_time': AntchainUtils.get_timestamp(),\n 'req_msg_id': AntchainUtils.get_nonce(),\n 'access_key': self._access_key_id,\n 'base_sdk_version': 'TeaSDK-2.0',\n 'sdk_version': '1.6.10',\n '_prod_code': 'SHUZIWULIU',\n '_prod_channel': 'undefined'\n }\n if not UtilClient.empty(self._security_token):\n _request.query['security_token'] = self._security_token\n _request.headers = TeaCore.merge({\n 'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),\n 'user-agent': UtilClient.get_user_agent(self._user_agent)\n }, headers)\n tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))\n _request.body = UtilClient.to_form_string(tmp)\n _request.headers['content-type'] = 'application/x-www-form-urlencoded'\n signed_param = TeaCore.merge(_request.query,\n RPCUtilClient.query(request))\n _request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)\n _last_request = _request\n _response = TeaCore.do_action(_request, _runtime)\n raw = UtilClient.read_as_string(_response.body)\n obj = UtilClient.parse_json(raw)\n res = UtilClient.assert_as_map(obj)\n resp = UtilClient.assert_as_map(res.get('response'))\n if AntchainUtils.has_error(raw, self._access_key_secret):\n raise TeaException({\n 'message': resp.get('result_msg'),\n 'data': resp,\n 'code': resp.get('result_code')\n })\n return resp\n except Exception as e:\n if TeaCore.is_retryable(e):\n _last_exception = e\n continue\n raise e\n raise UnretryableException(_last_request, _last_exception)", "def do_request(\n self,\n version: str,\n action: str,\n protocol: str,\n method: str,\n pathname: str,\n request: dict,\n headers: Dict[str, str],\n runtime: util_models.RuntimeOptions,\n ) -> dict:\n runtime.validate()\n _runtime = {\n 'timeouted': 'retry',\n 'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),\n 'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),\n 'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),\n 'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),\n 'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),\n 'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),\n 'maxIdleTimeMillis': self._max_idle_time_millis,\n 'keepAliveDurationMillis': self._keep_alive_duration_millis,\n 'maxRequests': self._max_requests,\n 'maxRequestsPerHost': self._max_requests_per_host,\n 'retry': {\n 'retryable': runtime.autoretry,\n 'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)\n },\n 'backoff': {\n 'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),\n 'period': UtilClient.default_number(runtime.backoff_period, 1)\n },\n 'ignoreSSL': runtime.ignore_ssl,\n # 无分组设备\n }\n _last_request = None\n _last_exception = None\n _now = time.time()\n _retry_times = 0\n while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):\n if _retry_times > 0:\n _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)\n if _backoff_time > 0:\n TeaCore.sleep(_backoff_time)\n _retry_times = _retry_times + 1\n try:\n _request = TeaRequest()\n _request.protocol = UtilClient.default_string(self._protocol, protocol)\n _request.method = method\n _request.pathname = pathname\n _request.query = {\n 'method': action,\n 'version': version,\n 'sign_type': 'HmacSHA1',\n 'req_time': AntchainUtils.get_timestamp(),\n 'req_msg_id': AntchainUtils.get_nonce(),\n 'access_key': self._access_key_id,\n 'base_sdk_version': 'TeaSDK-2.0',\n 'sdk_version': '1.0.45'\n }\n if not UtilClient.empty(self._security_token):\n _request.query['security_token'] = self._security_token\n _request.headers = TeaCore.merge({\n 'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),\n 'user-agent': UtilClient.get_user_agent(self._user_agent)\n }, headers)\n tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))\n _request.body = UtilClient.to_form_string(tmp)\n _request.headers['content-type'] = 'application/x-www-form-urlencoded'\n signed_param = TeaCore.merge(_request.query,\n RPCUtilClient.query(request))\n _request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)\n _last_request = _request\n _response = TeaCore.do_action(_request, _runtime)\n raw = UtilClient.read_as_string(_response.body)\n obj = UtilClient.parse_json(raw)\n res = UtilClient.assert_as_map(obj)\n resp = UtilClient.assert_as_map(res.get('response'))\n if AntchainUtils.has_error(raw, self._access_key_secret):\n raise TeaException({\n 'message': resp.get('result_msg'),\n 'data': resp,\n 'code': resp.get('result_code')\n })\n return resp\n except Exception as e:\n if TeaCore.is_retryable(e):\n _last_exception = e\n continue\n raise e\n raise UnretryableException(_last_request, _last_exception)", "def _api_request(self, path, method, data=None, query=None):\n\n url = request_url(\n self.config['secure'],\n self.config['hostname'],\n self.config['port'],\n path,\n query,\n )\n\n try:\n resp = request(\n url,\n method,\n self._headers(),\n data,\n self.config['timeout'],\n )\n\n return Response(\n resp.get('meta', {}),\n # Response info may have 'object' or 'objects' key, depending\n # on whether there are 1 or multiple results.\n resp.get('object', resp.get('objects'))\n )\n except HTTPError as e:\n response = e.read()\n fallback = '{0} {1}'.format(e.code, e.msg)\n\n if isinstance(response, bytes):\n data = response.decode('utf8')\n else:\n data = response\n\n error = json.loads(data).get('error', {})\n message = error.get('message', fallback)\n raise HTTPResponseError(message, status_code=e.code, cause=e)", "def _doRequest(self, httpClientMethod, *args):\n try:\n resp = httpClientMethod(*args)\n return resp.json()\n except RequestException as e:\n raise checkedError(e)", "def _do_request(self, url: str):\n\n self.debug.ok('method', self.method)\n\n if self.client.fake_response_path:\n with open(self.client.fake_response_path, 'r') as f:\n return constants.ResponseCode.OK, f.read()\n\n elif self.method == constants.RequestConst.GET:\n response = requests.get(\n url, headers=self._headers(), timeout=self._timeout\n )\n\n self.debug.ok(\n constants.RequestConst.QUERY_PARAMETERS,\n self.parameters[constants.RequestConst.QUERY]\n )\n self.debug.ok(constants.ResponseConst.RESPONSE_OBJECT, response)\n\n return response.status_code, response.text\n\n elif self.method in [\n constants.RequestConst.POST,\n constants.RequestConst.PUT,\n constants.RequestConst.DELETE\n ]:\n if self.method == constants.RequestConst.POST:\n send_request = requests.post\n elif self.method == constants.RequestConst.PUT:\n send_request = requests.put\n elif self.method == constants.RequestConst.DELETE:\n send_request = requests.delete\n\n response = send_request(\n url, json=self.parameters[constants.RequestConst.QUERY],\n headers=self._headers(), timeout=self._timeout\n )\n\n self.debug.ok('payload', self.parameters[\n constants.RequestConst.QUERY\n ])\n self.debug.ok(constants.ResponseConst.RESPONSE_OBJECT, response)\n\n return response.status_code, response.text\n\n else:\n return constants.ResponseCode.NOT_FOUND, {}", "def do_request(\n self,\n version: str,\n action: str,\n protocol: str,\n method: str,\n pathname: str,\n request: dict,\n headers: Dict[str, str],\n runtime: util_models.RuntimeOptions,\n ) -> dict:\n runtime.validate()\n _runtime = {\n 'timeouted': 'retry',\n 'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),\n 'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),\n 'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),\n 'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),\n 'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),\n 'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),\n 'maxIdleTimeMillis': self._max_idle_time_millis,\n 'keepAliveDuration': self._keep_alive_duration_millis,\n 'maxRequests': self._max_requests,\n 'maxRequestsPerHost': self._max_requests_per_host,\n 'retry': {\n 'retryable': runtime.autoretry,\n 'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)\n },\n 'backoff': {\n 'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),\n 'period': UtilClient.default_number(runtime.backoff_period, 1)\n },\n 'ignoreSSL': runtime.ignore_ssl,\n # 金额\n }\n _last_request = None\n _last_exception = None\n _now = time.time()\n _retry_times = 0\n while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):\n if _retry_times > 0:\n _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)\n if _backoff_time > 0:\n TeaCore.sleep(_backoff_time)\n _retry_times = _retry_times + 1\n try:\n _request = TeaRequest()\n _request.protocol = UtilClient.default_string(self._protocol, protocol)\n _request.method = method\n _request.pathname = pathname\n _request.query = {\n 'method': action,\n 'version': version,\n 'sign_type': 'HmacSHA1',\n 'req_time': AntchainUtils.get_timestamp(),\n 'req_msg_id': AntchainUtils.get_nonce(),\n 'access_key': self._access_key_id,\n 'base_sdk_version': 'TeaSDK-2.0',\n 'sdk_version': '1.1.2',\n '_prod_code': 'DEFINCASHIER',\n '_prod_channel': 'undefined'\n }\n if not UtilClient.empty(self._security_token):\n _request.query['security_token'] = self._security_token\n _request.headers = TeaCore.merge({\n 'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),\n 'user-agent': UtilClient.get_user_agent(self._user_agent)\n }, headers)\n tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))\n _request.body = UtilClient.to_form_string(tmp)\n _request.headers['content-type'] = 'application/x-www-form-urlencoded'\n signed_param = TeaCore.merge(_request.query,\n RPCUtilClient.query(request))\n _request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)\n _last_request = _request\n _response = TeaCore.do_action(_request, _runtime)\n raw = UtilClient.read_as_string(_response.body)\n obj = UtilClient.parse_json(raw)\n res = UtilClient.assert_as_map(obj)\n resp = UtilClient.assert_as_map(res.get('response'))\n if AntchainUtils.has_error(raw, self._access_key_secret):\n raise TeaException({\n 'message': resp.get('result_msg'),\n 'data': resp,\n 'code': resp.get('result_code')\n })\n return resp\n except Exception as e:\n if TeaCore.is_retryable(e):\n _last_exception = e\n continue\n raise e\n raise UnretryableException(_last_request, _last_exception)", "def do_request(\n self,\n version: str,\n action: str,\n protocol: str,\n method: str,\n pathname: str,\n request: dict,\n headers: Dict[str, str],\n runtime: util_models.RuntimeOptions,\n ) -> dict:\n runtime.validate()\n _runtime = {\n 'timeouted': 'retry',\n 'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),\n 'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),\n 'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),\n 'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),\n 'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),\n 'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),\n 'maxIdleTimeMillis': self._max_idle_time_millis,\n 'keepAliveDuration': self._keep_alive_duration_millis,\n 'maxRequests': self._max_requests,\n 'maxRequestsPerHost': self._max_requests_per_host,\n 'retry': {\n 'retryable': runtime.autoretry,\n 'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)\n },\n 'backoff': {\n 'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),\n 'period': UtilClient.default_number(runtime.backoff_period, 1)\n },\n 'ignoreSSL': runtime.ignore_ssl,\n # 身份\n }\n _last_request = None\n _last_exception = None\n _now = time.time()\n _retry_times = 0\n while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):\n if _retry_times > 0:\n _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)\n if _backoff_time > 0:\n TeaCore.sleep(_backoff_time)\n _retry_times = _retry_times + 1\n try:\n _request = TeaRequest()\n _request.protocol = UtilClient.default_string(self._protocol, protocol)\n _request.method = method\n _request.pathname = pathname\n _request.query = {\n 'method': action,\n 'version': version,\n 'sign_type': 'HmacSHA1',\n 'req_time': AntchainUtils.get_timestamp(),\n 'req_msg_id': AntchainUtils.get_nonce(),\n 'access_key': self._access_key_id,\n 'base_sdk_version': 'TeaSDK-2.0',\n 'sdk_version': '1.0.212',\n '_prod_code': 'DEMO',\n '_prod_channel': 'undefined'\n }\n if not UtilClient.empty(self._security_token):\n _request.query['security_token'] = self._security_token\n _request.headers = TeaCore.merge({\n 'host': UtilClient.default_string(self._endpoint, 'centre-openapi.antchain.antgroup.com'),\n 'user-agent': UtilClient.get_user_agent(self._user_agent)\n }, headers)\n tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))\n _request.body = UtilClient.to_form_string(tmp)\n _request.headers['content-type'] = 'application/x-www-form-urlencoded'\n signed_param = TeaCore.merge(_request.query,\n RPCUtilClient.query(request))\n _request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)\n _last_request = _request\n _response = TeaCore.do_action(_request, _runtime)\n raw = UtilClient.read_as_string(_response.body)\n obj = UtilClient.parse_json(raw)\n res = UtilClient.assert_as_map(obj)\n resp = UtilClient.assert_as_map(res.get('response'))\n if AntchainUtils.has_error(raw, self._access_key_secret):\n raise TeaException({\n 'message': resp.get('result_msg'),\n 'data': resp,\n 'code': resp.get('result_code')\n })\n return resp\n except Exception as e:\n if TeaCore.is_retryable(e):\n _last_exception = e\n continue\n raise e\n raise UnretryableException(_last_request, _last_exception)", "def common_http_validator(method=None,url=None,data=None,header=None):\r\n status_code = 500\r\n error_msg = None\r\n response_data = None\r\n\r\n try:\r\n req = requests.request(method=method,url=url,data=data,headers=header)\r\n # print req.request.method #Getting the method\r\n\r\n except (requests.RequestException,requests.HTTPError,requests.ConnectionError,requests.Timeout) as e:\r\n error_msg = 'Connection/Timeout/General Exception: {}'.format(e)\r\n\r\n except Exception as e:\r\n error_msg = 'Connection/Timeout/General Exception: {}'.format(e)\r\n\r\n else:\r\n status_code = req.status_code\r\n response_data = req.content\r\n\r\n return status_code, response_data, error_msg", "def request(self, http_method, url, payload=None, querystring=None,\r\n all_pages=None):\r\n\r\n try:\r\n\r\n response = self.fetch_response(\r\n http_method, url, payload=payload, querystring=querystring)\r\n try:\r\n if self.is_valid_response(response):\r\n response_json = None\r\n if response.status_code != 204:\r\n if response.status_code == 201 and response.content == b'':\r\n pass\r\n else:\r\n response_json = response.json()\r\n # check 'all_pages' required, response received is\r\n # partial(code 206) and contains info about total size of\r\n # the collection\r\n content_range = response.headers.get('content-range')\r\n if all_pages and response.status_code == 206 and\\\r\n content_range:\r\n # 'content-range': '0-99/789'\r\n total_size = self.get_total_size_from_content_range(\r\n content_range)\r\n myranges = [\r\n \"{0}-{1}\".format(i, i + constants.MAX_LIMIT)\r\n for i in range(constants.OFFSET, total_size,\r\n constants.MAX_LIMIT)]\r\n for myrange in myranges:\r\n response = self.fetch_response(\r\n http_method, url, payload=payload,\r\n querystring=querystring, myrange=myrange)\r\n if self.is_valid_response(response):\r\n response_json.extend(response.json())\r\n else:\r\n self.raise_http_exception(response)\r\n\r\n return response_json\r\n else:\r\n self.raise_http_exception(response)\r\n\r\n except ValueError as ex:\r\n # its low-level or response level error caused by\r\n # response.json() and not in requests.exceptions\r\n error_msg = \"ValueError: '{0}' for Method: '{1}' URL: '{2}'\"\\\r\n \" PayLoad: '{3}' QueryString: '{4}'\".format(\r\n str(ex), http_method, url, payload, querystring)\r\n LOG.error(error_msg)\r\n raise PowerStoreException(PowerStoreException.VALUE_ERROR,\r\n error_msg)\r\n except socket.error as exception:\r\n LOG.error(str(exception))\r\n raise PowerStoreException(PowerStoreException.SOCKET_ERR,\r\n str(exception))\r\n except SSLError as exception:\r\n LOG.error(str(exception))\r\n raise PowerStoreException(PowerStoreException.SSL_ERROR,\r\n str(exception))\r\n except ConnectionError as exception:\r\n LOG.error(str(exception))\r\n raise PowerStoreException(PowerStoreException.CONNECTION_ERROR,\r\n str(exception))\r\n except TooManyRedirects as exception:\r\n LOG.error(str(exception))\r\n raise PowerStoreException(\r\n PowerStoreException.TOO_MANY_REDIRECTS_ERROR, str(exception))\r\n except Timeout as exception:\r\n LOG.error(str(exception))\r\n raise PowerStoreException(PowerStoreException.TIMEOUT_ERROR,\r\n str(exception))", "def sendHttpRequest(self, endpoint, payload=None, method=None, xDepth=None):\n\n response = None\n request = self.createHttpRequest(endpoint, payload, method, xDepth)\n opener = self.opener\n\n if ((payload is None) or (method == \"GET\")):\n\n logger.debug(\"Sending HTTP GET to \"+request.get_full_url())\n else:\n if ((method is None) or (method == \"PATCH\")):\n logger.debug(\"Sending HTTP PATCH to \"+request.get_full_url())\n elif (method == \"POST\"):\n logger.debug(\"Sending HTTP POST to \"+request.get_full_url())\n\n try:\n response = opener.open(request)\n\n except urllib2.HTTPError as e:\n \"\"\"Preserve error response body and put it into exception message\"\"\"\n \"\"\"But do not catch specific error conditions here because the method should be generic\"\"\"\n\n if hasattr(e, 'read'):\n error_message = e.read()\n newMsg = \"{:s}, Body: {}\".format(e.msg,error_message)\n newEx = urllib2.HTTPError(e.url, e.code, newMsg, e.hdrs, e.fp)\n newEx.read = lambda: error_message\n raise urllib2.HTTPError, newEx, sys.exc_info()[2]\n else:\n raise\n\n\n logger.debug(\"Response code {}\".format(response.getcode()))\n return response\n\n # this is only for documentation of possible TMS cases. Exceptions are not catched here, but in\n # upper level functions\n\n #if (e.code == 400):\n # raise RuntimeError(\"Bad Request one of the provided fields contains an invalid value\",\"TMS_BAD_REQUEST_ERR\")\n #if (e.code == 401):\n # raise RuntimeError(\"Logon not possible with provided TMS credentials\",\"WRONG_CREDENTIALS_ERR\")\n #if (e.code == 404):\n # raise RuntimeError(\"No endpoint found under the provided url\",\"WRONG_ENDPOINT_ERR\")\n #if (e.code == 409):\n # raise RuntimeError(\"Uid doesn't match the latest Uid version of the resource\",\"UUID_VERSION_MISMATCH\")\n #if (e.code == 423):\n # raise RuntimeError(\"Version uid correct but resource is locked\",\"TMS_RESOURCE_IS_LOCKED\")\n #if (e.code == 500):\n # raise RuntimeError(\"TMS Global-lookup inactive\",\"TMS_GLOBAL_LOOKUP_ERR\")", "def execute(self, request):\n url = request.uri\n if request.parameters:\n url += '?' + urlencode(request.parameters)\n\n if request.headers:\n headers = dict(self._headers, **request.headers)\n else:\n headers = self._headers\n\n \n retry = 0\n server = getattr(self._local, \"server\", None)\n while True:\n if not server:\n self._local.server = server = self._get_server()\n try:\n parse_result = urlparse(server)\n if not self._use_ssl:\n conn = get_pool().connection_from_host(\n host=parse_result.hostname,\n port=parse_result.port,\n scheme=parse_result.scheme\n )\n else:\n pool_kwargs = {\n 'cert_file': self._client_cert,\n 'key_file': self._client_key,\n 'ca_certs': self._ca_certs,\n 'cert_reqs': self.CERT_REQS\n }\n conn = get_pool().connection_from_host(\n host=parse_result.hostname,\n port=parse_result.port,\n scheme=parse_result.scheme,\n pool_kwargs=pool_kwargs\n )\n kwargs = dict(\n method=Method._VALUES_TO_NAMES[request.method],\n url=parse_result.path + url,\n body=request.body,\n headers=headers,\n timeout=self._timeout,\n )\n\n response = conn.urlopen(**kwargs)\n return RestResponse(status=response.status,\n body=response.data,\n headers=response.headers)\n except (IOError, urllib3.exceptions.HTTPError) as ex:\n self._drop_server(server)\n self._local.server = server = None\n if retry >= self._max_retries:\n logger.error(\"Client error: bailing out after %d failed retries\",\n self._max_retries, exc_info=1)\n raise NoServerAvailable(ex)\n logger.exception(\"Client error: %d retries left\", self._max_retries - retry)\n retry += 1", "def http_request(self, method, path, data=None, params=None):\n\n s = Session()\n url = urljoin(self.BASE_URL, path)\n full_url = url\n try:\n full_url = full_url + \"?\" + urlencode(params)\n except:\n pass\n\n headers = self.request_headers(method, full_url)\n\n req = Request(\n method,\n url,\n headers=headers,\n data=data,\n params=params\n )\n prepped = req.prepare()\n resp = s.send(prepped, timeout=self.timeout)\n if resp.status_code == 429:\n raise errors.APIRateLimitError(\"Threat Stack API rate limit exceeded\")\n else:\n return self.handle_response(resp)", "def create_request(url, headers, attempts, request_type, data=None):\n request_func = getattr(requests, request_type)\n kwargs = {\"url\": url, \"headers\": headers}\n if request_type == \"post\" or request_type == \"patch\":\n kwargs[\"json\"] = data\n try:\n req = request_func(**kwargs)\n status_code = req.status_code\n time.sleep(1)\n while status_code >= 400 and attempts < 5:\n req = request_func(**kwargs)\n status_code = req.status_code\n attempts += 1\n time.sleep(1)\n return req\n except Exception as e:\n print(\"[ERROR] There was an error with the request, details:\")\n print(e)\n return None", "def request(self, method: str, url: str, **kwargs) -> requests.Response:\n url = parse.urljoin(self.http_address, url)\n return requests.request(method, url, **kwargs)", "def request(self, request_method, url, json_data=None):\n for i in range(int(str(self.retry_n))):\n LOG.debug(\n \"JovianDSS: Sending request of type %(type)s to %(url)s \\\n Attempt: %(num)s.\",\n {'type': request_method,\n 'url': url,\n 'num': i})\n\n if json_data is not None:\n LOG.debug(\n \"JovianDSS: Sending data: %s.\", str(json_data))\n try:\n\n ret = self.request_routine(url, request_method, json_data)\n\n # Work aroud for case when we have backend internal Fail.\n # OS Fail\n if ret[\"code\"] == 500:\n if ret[\"error\"] is not None:\n if (\"errno\" in ret[\"error\"]) and \\\n (\"class\" in ret[\"error\"]):\n if (ret[\"error\"][\"errno\"] is 2) and\\\n (ret[\"error\"][\"class\"] ==\n \"exceptions.OSError\"):\n LOG.debug(\n \"JovianDSS: Facing exceptions.OSError!\")\n continue\n\n return ret\n except requests.HTTPError as err:\n LOG.debug(\"Unable to execute: %s\", err)\n continue\n except requests.ConnectionError as err:\n LOG.debug(\"Unable to execute: %s\", err)\n\n msg = (_('%(times) faild in a row') % {'times': i})\n\n raise exception.JDSSRESTProxyException(host=url, reason=msg)", "def dispatch_request(self, *args, **kwargs):\n try:\n return super().dispatch_request(*args, **kwargs)\n except HTTPException as e:\n logger.error(\"HTTP Error on APIResource %s\", e, exc_info=1)\n return return_response({\n \"code\": e.code,\n \"message\": e.description\n }, e.code)\n except BaseException as e:\n logger.error(\"Error occurred in APIResource %s\", e, exc_info=1)\n return return_response({\n \"code\": 500,\n \"message\": str(e)\n }, 500)", "def _request(self, method, *args, **kwargs):\n if not \"headers\" in kwargs:\n kwargs[\"headers\"] = self._headers\n return self._session.request(method, self._url(*args), **kwargs)", "def _make_request(self, method, path, **kwargs):\r\n headers = {\r\n 'Content-Type': 'application/json',\r\n 'User-Agent': USER_AGENT,\r\n }\r\n headers.update(kwargs.get('headers', {}))\r\n kwargs['headers'] = headers\r\n kwargs['auth'] = self.auth\r\n\r\n url = '/'.join((self.endpoint, 'v1', self.account_id, path))\r\n resp = requests.request(method, url, **kwargs)\r\n resp.raise_for_status()\r\n return resp", "def make_request(self, method, path, headers=None, body='', host=None,\n auth_path=None, sender=None, override_num_retries=None,\n params=None, retry_handler=None):\n if params is None:\n params = {}\n http_request = self.build_base_http_request(method, path, auth_path,\n params, headers, body, host)\n return self._mexe(http_request, sender, override_num_retries,\n retry_handler=retry_handler)", "def _send_request(self, method='post', headers=None, json=None):\n response = getattr(requests, method)(self.url, headers=headers, json=json)\n return response", "def _handle_error(self, path, reqs, headers, get=True):\n call = requests.get if get else requests.post\n resp = None\n dump = json.dumps(reqs)\n wait = self.config.start_reconnect_wait\n while resp is None:\n if wait > self.config.max_reconnect_wait:\n raise Exception(\"To many reconnect attempts\")\n time.sleep(wait)\n try:\n resp = call(path, dump, headers=headers)\n except requests.exceptions.ConnectionError:\n resp = None\n wait *= 2\n return resp", "def request(method,\n url,\n timeout=3.0,\n is_success=_default_is_success,\n to_error=_default_to_error,\n **kwargs):\n\n try:\n if 'headers' in kwargs:\n request = requests.Request(method=method, url=url, **kwargs)\n else:\n request = requests.Request(\n method=method,\n url=url,\n headers={'Accept': 'application/json'},\n **kwargs)\n\n logger.info(\n 'Sending HTTP [%r] to [%r]: %r',\n request.method,\n request.url,\n request.headers)\n\n with requests.Session() as session:\n response = session.send(request.prepare(), timeout=timeout)\n except Exception as ex:\n raise DCOSException(to_error(DefaultError(str(ex))).error())\n\n logger.info('Received HTTP response [%r]: %r',\n response.status_code,\n response.text)\n\n if is_success(response.status_code):\n return response\n else:\n raise DCOSException(to_error(response).error())", "def _request(self, uri, method='GET', headers=None, body=None,\n status=(200, 304), **kwargs):\n uri = str(uri)\n if headers is not None:\n headers = headers.copy()\n else:\n headers = {}\n if self.user_agent is not None:\n headers['User-Agent'] = self.user_agent\n if isinstance(body, dict):\n if method not in ('POST', 'PUT'):\n method = 'POST'\n if is_multipart(body):\n body, boundary = encode_multipart_data(body)\n headers.update(MULTIPART_HEADERS)\n headers['Content-Type'] = MULTIPART_HEADERS['Content-Type'] + \\\n boundary\n else:\n body = urlencode(body, True)\n headers.update(FORMENCODE_HEADERS)\n (response, content) = self.http_obj.request(uri,\n method=method, headers=headers, body=body, **kwargs)\n assert response.status in status, \\\n \"%s %s\" % (response.status, response.reason)\n return (response, content)", "def perform_request(self,\n request: RequestBase,\n method: str='POST'\n ):\n headers = {\n 'Accept': 'application/json',\n 'User-Agent': self.user_agent()\n }\n if APIAuthentication.use_http_auth:\n headers['Authorization'] = 'Basic {auth}'.format(auth=self.get_auth())\n\n # Lazy loader for api credentials.\n if request.requires_api_token() and ParamValidator.is_empty(request.api_token)\\\n and ParamValidator.not_empty(APIAuthentication.api_token):\n request.api_token = APIAuthentication.api_token\n if request.requires_service_id() and ParamValidator.is_empty(request.service_id)\\\n and ParamValidator.not_empty(APIAuthentication.service_id):\n request.service_id = APIAuthentication.service_id\n\n # Build url\n url = \"{0}/{1}\".format(PAYNL_END_POINT, request.get_url())\n parameters = request.get_parameters()\n if APIAuthentication.use_http_auth and 'token' in parameters:\n del parameters['token']\n\n if self.print_debug:\n print(\"Calling {} using {}\".format(url, method))\n print(\"HTTP Headers: {}\".format(json.dumps(headers)))\n print(\"Params: {}\".format(json.dumps(parameters)))\n\n if method.upper() == 'GET':\n response = requests.get(url, verify=True, headers=headers, params=parameters)\n else:\n response = requests.post(url, verify=True, headers=headers, data=parameters)\n\n if response.status_code not in self.__supported_status_codes:\n response.raise_for_status()\n\n if self.print_debug:\n print(\"Response object: {}\".format(response))\n print(\"Raw response: {}\".format(response.text))\n\n # Now the we have a response, let the request class handle the response.\n request.raw_response = response.text\n\n if self.print_debug:\n print(type(request.response))\n\n if request.response.is_error():\n raise ErrorException(request.response.request)", "def _get_http_request(url, retry=0):\n try:\n return requests.get(url).json()\n except Exception:\n if retry > 0:\n retry = retry - 1\n # FIXME use invenio-logging?\n return _get_http_request(url=url, retry=retry)", "def _request(http, project, method, data, base_url, client_info):\n user_agent = client_info.to_user_agent()\n headers = {\n \"Content-Type\": \"application/x-protobuf\",\n \"User-Agent\": user_agent,\n connection_module.CLIENT_INFO_HEADER: user_agent,\n }\n api_url = build_api_url(project, method, base_url)\n\n response = http.request(url=api_url, method=\"POST\", headers=headers, data=data)\n\n if response.status_code != 200:\n error_status = status_pb2.Status.FromString(response.content)\n raise exceptions.from_http_status(\n response.status_code, error_status.message, errors=[error_status]\n )\n\n return response.content", "def _request(self, request_method, url, *args, **kwargs):\n\n full_url = self.get_full_url(url)\n\n self.logger.info('Calling %s url: %s', request_method, full_url)\n\n request_args = self.get_request_args(kwargs)\n\n request = NapRequest(request_method, full_url, *args, **request_args)\n\n for mw in self.model._meta['middleware']:\n request = mw.handle_request(request)\n\n resource_response = request.send()\n response = NapResponse(\n url=request.url,\n status_code=resource_response.status_code,\n headers=resource_response.headers,\n content=resource_response.content,\n request_method=request_method,\n )\n\n for mw in reversed(self.model._meta['middleware']):\n response = mw.handle_response(request, response)\n\n return response", "def request(self, method, url, params, timeout=10, retries=3, data=None):\n\n sleep_s = 0.1\n factor = 2\n\n if data is None:\n data = {}\n self.logger.debug('{} to {}'.format(method, url))\n resp = None\n for i in range(retries):\n try:\n resp = requests.request(method, url, params=params, data=data, timeout=timeout)\n except requests.Timeout:\n self.logger.error('timeout to {}\\n'.format(url))\n time.sleep(sleep_s)\n sleep_s *= factor\n factor *= factor\n if resp is not None:\n break\n try:\n return json.loads(resp.content.decode())\n except (json.JSONDecodeError, TypeError, Exception, ) as e:\n self.logger.error('Exception {} during GET to {} \\n'.format(e, url))", "def request(self, method, url, headers=None, params=None,\n data=None, raw_response=False):\n kwargs = dict(self.requests, **{\n 'headers': headers or {},\n 'params': params or {},\n 'data': data or {},\n })\n\n if 'Content-Type' not in kwargs['headers'] and method in ('post',\n 'put'):\n kwargs['data'] = json.dumps(data)\n kwargs['headers']['Content-Type'] = 'application/json'\n\n if self.impersonate is not None:\n kwargs['headers']['X-Redmine-Switch-User'] = self.impersonate\n\n # We would like to be authenticated by API key by default\n if self.key is not None:\n kwargs['params']['key'] = self.key\n if self.username and self.password:\n kwargs['auth'] = (self.username, self.password)\n if self.auth_cookie:\n kwargs['cookies'] = dict(auth_pubtkt=self.auth_cookie)\n\n response = getattr(requests, method)(url, **kwargs)\n\n if response.status_code in (200, 201):\n if raw_response:\n return response\n elif not response.content.strip():\n return True\n else:\n return response.json()\n elif response.status_code == 401:\n raise AuthError\n elif response.status_code == 404:\n raise ResourceNotFoundError\n elif response.status_code == 409:\n raise ConflictError\n elif response.status_code == 412 and self.impersonate is not None:\n raise ImpersonateError\n elif response.status_code == 413:\n raise RequestEntityTooLargeError\n elif response.status_code == 422:\n raise ValidationError(to_string(', '.join(\n response.json()['errors'])))\n elif response.status_code == 500:\n raise ServerError\n\n raise UnknownError(response.status_code)", "def custom_request(self, url, method='GET', params=None, data=None,\n expected_response_code=200, headers=None):\n url = \"{0}/{1}\".format(self._baseurl, url)\n\n if headers is None:\n headers = self._headers\n\n if params is None:\n params = {}\n\n if isinstance(data, (dict, list)):\n data = json.dumps(data)\n\n # Try to send the request more than once by default (see #103)\n retry = True\n _try = 0\n while retry:\n try:\n response = self._session.request(\n method=method,\n url=url,\n auth=(self._username, self._password),\n params=params,\n data=data,\n headers=headers,\n proxies=self._proxies,\n verify=self._verify_ssl,\n timeout=self._timeout\n )\n break\n except requests.exceptions.ConnectionError as e:\n self._session = requests.Session()\n _try += 1\n if self._retries != 0:\n retry = _try < self._retries\n except requests.exceptions.ChunkedEncodingError as e:\n logging.warn(\"Case of broken HTTP session, retring w/ new session\")\n self._session = requests.Session()\n _try += 1\n if self._retries != 0:\n retry = _try < self._retries\n else:\n raise requests.exceptions.ConnectionError\n\n if 500 <= response.status_code < 600:\n raise InfluxDBServerError(response.content)\n elif response.status_code == expected_response_code:\n return response\n else:\n raise InfluxDBClientError(response.content, response.status_code)", "def _external_request(self, method, url, *args, **kwargs):\n self.last_url = url\n if url in self.responses.keys() and method == 'get':\n return self.responses[url] # return from cache if its there\n\n headers = kwargs.pop('headers', None)\n custom = {'User-Agent': useragent}\n if headers:\n headers.update(custom)\n kwargs['headers'] = headers\n else:\n kwargs['headers'] = custom\n\n response = getattr(requests, method)(url, *args, **kwargs)\n\n if self.verbose:\n print(\"Got Response: %s\" % url)\n\n if response.status_code == 503:\n raise SkipThisService(\"Service returned 503 - Temporarily out of service.\")\n\n if method == 'get':\n self.responses[url] = response # cache for later\n\n self.last_raw_response = response\n return response", "async def _request(method, url, session=None, **kwargs):\n\n loop = asyncio.get_event_loop()\n\n client = session or aiohttp.ClientSession(loop=loop)\n try:\n resp = await client.request(method, url, **kwargs)\n status = resp.status\n content = await resp.read()\n await resp.release()\n finally:\n await client.close()\n\n r = Response(status, content)\n if r.status >= 400:\n raise HTTPRequestError(r.status, r.text)\n return r", "def request(self, url, method, log=True, **kwargs):\n # Copy the kwargs so we can reuse the original in case of redirects\n kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))\n kwargs['headers'].setdefault('User-Agent', USER_AGENT)\n if self.auth_token:\n kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)\n else:\n kwargs['headers'].update(self.credentials_headers())\n if self.auth_url:\n kwargs['headers'].setdefault('X-Auth-Url', self.auth_url)\n if self.region_name:\n kwargs['headers'].setdefault('X-Region-Name', self.region_name)\n if self.tenant_name:\n kwargs['headers'].setdefault('X-Project-Id', self.tenant_name)\n\n self.log_curl_request(url, method, kwargs)\n\n if self.cert_file and self.key_file:\n kwargs['cert'] = (self.cert_file, self.key_file)\n\n if self.verify_cert is not None:\n kwargs['verify'] = self.verify_cert\n\n if self.timeout is not None:\n kwargs['timeout'] = float(self.timeout)\n\n # Allow the option not to follow redirects\n follow_redirects = kwargs.pop('redirect', True)\n\n # Since requests does not follow the RFC when doing redirection to sent\n # back the same method on a redirect we are simply bypassing it. For\n # example if we do a DELETE/POST/PUT on a URL and we get a 302 RFC says\n # that we should follow that URL with the same method as before,\n # requests doesn't follow that and send a GET instead for the method.\n # Hopefully this could be fixed as they say in a comment in a future\n # point version i.e.: 3.x\n # See issue: https://github.com/kennethreitz/requests/issues/1704\n allow_redirects = False\n try:\n resp = requests.request(\n method,\n self.endpoint_url + url,\n allow_redirects=allow_redirects,\n **kwargs)\n except socket.gaierror as e:\n message = (\"Error finding address for %(url)s: %(e)s\" %\n {'url': self.endpoint_url + url, 'e': e})\n raise exc.InvalidEndpoint(message=message)\n except (socket.error,\n socket.timeout,\n requests.exceptions.ConnectionError) as e:\n endpoint = self.endpoint\n message = (\"Error communicating with %(endpoint)s %(e)s\" %\n {'endpoint': endpoint, 'e': e})\n raise exc.CommunicationError(message=message)\n\n if log:\n self.log_http_response(resp)\n\n if 'X-Auth-Key' not in kwargs['headers'] and \\\n (resp.status_code == 401 or\n (resp.status_code == 500 and\n \"(HTTP 401)\" in resp.content)):\n raise exc.HTTPUnauthorized(\"Authentication failed. Please try\"\n \" again.\\n%s\"\n % resp.content)\n elif 400 <= resp.status_code < 600:\n raise exc.from_response(resp)\n elif resp.status_code in (301, 302, 305):\n # Redirected. Reissue the request to the new location,\n # unless caller specified follow_redirects=False\n if follow_redirects:\n location = resp.headers.get('location')\n path = self.strip_endpoint(location)\n resp = self.request(path, method, **kwargs)\n elif resp.status_code == 300:\n raise exc.from_response(resp)\n\n return resp", "def request(self, method, url, headers=None, params=None, data=None, raw=False):\n kwargs = dict(self.requests, **{\n 'headers': headers or {},\n 'params': params or {},\n 'data': data or {},\n })\n\n if not 'Content-Type' in kwargs['headers'] and method in ('post', 'put'):\n kwargs['data'] = json.dumps(data)\n kwargs['headers']['Content-Type'] = 'application/json'\n\n if self.impersonate is not None:\n kwargs['headers']['X-Redmine-Switch-User'] = self.impersonate\n\n # We would like to be authenticated by API key by default\n if 'key' not in kwargs['params'] and self.key is not None:\n kwargs['params']['key'] = self.key\n else:\n kwargs['auth'] = (self.username, self.password)\n\n if raw:\n kwargs['stream'] = True\n\n response = getattr(requests, method)(url, **kwargs)\n\n if response.status_code in (200, 201):\n if raw:\n return response\n if not response.content.strip():\n return True\n return json_response(response.json)\n elif response.status_code == 401:\n raise AuthError()\n elif response.status_code == 404:\n raise ResourceNotFoundError\n elif response.status_code == 409:\n raise ConflictError\n elif response.status_code == 412 and self.impersonate is not None:\n raise ImpersonateError()\n elif response.status_code == 413:\n raise RequestEntityTooLargeError()\n elif response.status_code == 422:\n raise ValidationError(to_string(', '.join(json_response(response.json)['errors'])))\n elif response.status_code == 500:\n raise ServerError()\n\n raise UnknownError(response.status_code)", "def send_request(url, method):\n headers = {'User-Agent': user_agent}\n try:\n if method == \"GET\":\n r = requests.get(url, headers=headers)\n else:\n data = \"\"\n r = requests.post(url, headers=headers, data=data)\n except Exception as e:\n print(bad + \" Problem with request! \" + end)\n print(e)\n exit(-1)\n\n if (r.status_code == 302):\n print(bad + \" Redirected. Try this instead: \" +\n r.headers['Location'] + end)\n elif (r.status_code == 401):\n print(bad + \" Status: \" + str(r.status_code) + end)\n return(r.status_code)\n elif (r.status_code == 415):\n return(r.status_code)\n elif (r.status_code == 200):\n print(info + \" Status: \" + str(r.status_code) + end)\n return(r.text)\n else:\n print(info + \" Something went wrong! \" + end)\n print(bad + \" Status: \" + str(r.status_code) + str(r.content) + end)\n exit(-1)", "def retry(num=5):\n s = requests.Session()\n retries = Retry(total=num, backoff_factor=0.1,\n status_forcelist=[500, 502, 503, 504])\n s.mount('http://', HTTPAdapter(max_retries=retries))\n\n return s", "def make_request(self, method, path, headers=None, data='', host=None,\r\n auth_path=None, sender=None, override_num_retries=None):\r\n http_request = self.build_base_http_request(method, path, auth_path,\r\n {}, headers, data, host)\r\n return self._mexe(http_request, sender, override_num_retries)", "def request( self, method, location, parameters, headers, secure ):\n\t\tif self.__current_proxy != self.proxy:\n\t\t\tself.reset()\n\t\t\tprint \"proxy changed: %r\" % (self,)\n\t\t\n\t\tif self.proxy_must_match:\n\t\t\tif ( self.proxy is None ) or ( not self.proxy_must_match.search(self.proxy) ):\n\t\t\t\traise ValueError(\"Invalid proxy %r!!! Conflicts with proxy_must_match value!\" % (self.proxy,))\n\t\t\n\t\tif self.print_requests:\n\t\t\tprint \"%s %s %r %r\" % (secure and 'HTTPS' or 'HTTP', method, location, self.__use_this_proxy,)\n\t\t\n\t\tif self.requests_before_reconnect > 0:\n\t\t\tif self.requests_count > self.requests_before_reconnect:\n\t\t\t\t#open new connection\n\t\t\t\tself.requests_count = 1\n\t\t\t\tself.reset()\n\t\t\tself.requests_count += 1\n\n\t\tif secure:\n\t\t\tconn = self.https\n\t\telse:\n\t\t\tconn = self.http\n\n\t\tif self.debug:\n\t\t\tprint conn\n\n\t\tif headers and 'Referrer' in headers:\n\t\t\traise ValueError(\"Incorrect spelling - use referer not referrer\")\n\n\t\t# This strips out the :443 of https connections from the Host header by setting it manually.\n\t\tif not 'Host' in headers:\n\t\t\theaders['Host'] = self.site\n\t\t\n\t\ttry:\n\t\t\ttry:\n\t\t\t\tconn.request( method, location, parameters, headers )\n\t\t\texcept socket.error:\n\t\t\t\tconn.close()\n\t\t\t\tconn.request( method, location, parameters, headers )\n\t\t\texcept httplib.CannotSendRequest:\n\t\t\t\tconn.close()\n\t\t\t\tconn.request( method, location, parameters, headers )\n\t\t\t\n\t\t\ttry:\n\t\t\t\tresp = conn.getresponse()\n\t\t\texcept httplib.BadStatusLine:\n\t\t\t\tconn.close()\n\t\t\t\tconn.request( method, location, parameters, headers )\n\t\t\t\tresp = conn.getresponse()\n\t\t\texcept httplib.CannotSendRequest:\n\t\t\t\tconn.close()\n\t\t\t\tconn.request( method, location, parameters, headers )\n\t\t\t\tresp = conn.getresponse()\n\t\texcept Exception, e:\n\t\t\tprint \"Reset browser.py %r because error %r\" % (self, e,)\n\t\t\tself.reset()\n\t\t\traise\n\t\t\n\t\tcookie = resp.getheader( 'set-cookie' )\n\t\tif cookie:\n\t\t\tself.cookies.add( cookie )\n\t\t\n\t\tprotocol = 'http'\n\t\tif secure:\n\t\t\tprotocol = 'https'\n\t\tself.last_visited = '%s://%s%s' % (protocol, self.site, location)\n\t\t\n\t\t# if this is a redirect:\n\t\tif resp.status >= 300 and resp.status < 400:\n\t\t\t# check if the site was specified and it differs from\n\t\t\t# the current one\n\t\t\tconn.close()\n\t\t\tlocation = resp.getheader('location')\n\t\t\t#print \"redirecting to \", location\n\t\t\tparsed_location = urlparse.urlparse(location)\n\t\t\thttp_or_https = protocol\n\t\t\tcls = LocalRedirect\n\t\t\tif parsed_location[1]:\n\t\t\t\tif parsed_location[1] != self.site:\n\t\t\t\t\tcls = ExternalRedirect\n\t\t\t\telse:\n\t\t\t\t\t# ignore the beginning bit\n\t\t\t\t\thttp_or_https = parsed_location[0]\n\t\t\t\t\tparsed_location = list(parsed_location)\n\t\t\t\t\tparsed_location[0] = ''\n\t\t\t\t\tparsed_location[1] = ''\n\t\t\t\t\tlocation = urlparse.urlunparse(parsed_location)\n\t\t\t# raise an exception for the redirection\n\t\t\traise cls(location, resp.status, resp.reason, resp, http_or_https)\n\t\t\n\t\t# set the location that was visited, in case it differs from that which\n\t\t# was specified (i.e because of a redirect)\n\t\tresp.location = location\n\t\treturn resp", "def request(self, method, path, body=None, headers=None):\n url = self.base_url + path\n print \"Method: \" + method + \", URL: \" + url\n\n if body is not None:\n print json.dumps(\n json.loads(body),\n sort_keys=True,\n indent=4,\n separators=(\n ',',\n ': '))\n\n try:\n response = self.session.request(\n method,\n url,\n data=body,\n headers=headers)\n print \"Status code: \" + str(response.status_code)\n return response\n except requests.exceptions.HTTPError as exception:\n print \"HTTPError: \" + exception\n sys.exit(1)\n except requests.exceptions.RequestException as exception:\n print exception\n sys.exit(1)", "def retry_request(self, method, action, body=None,\r\n headers=None, params=None):\r\n max_attempts = self.retries + 1\r\n for i in range(max_attempts):\r\n try:\r\n return self.do_request(method, action, body=body,\r\n headers=headers, params=params)\r\n except exceptions.ConnectionFailed:\r\n # Exception has already been logged by do_request()\r\n if i < self.retries:\r\n _logger.debug(_('Retrying connection to Neutron service'))\r\n time.sleep(self.retry_interval)\r\n\r\n raise exceptions.ConnectionFailed(reason=_(\"Maximum attempts reached\"))", "def _http_request(self, method, url_suffix='', **kwargs):\n try:\n res = super()._http_request(method, url_suffix, error_handler=exception_handler, **kwargs)\n except Exception as e:\n if 'Expired Token' in e.__str__():\n self.generate_new_token()\n res = super()._http_request(method, url_suffix, error_handler=exception_handler, **kwargs)\n else:\n raise e\n return res", "def _request(self, query):\n query_string = self._create_query_string(query)\n\n try:\n response = requests.get(query_string)\n except requests.exceptions.ConnectionError:\n raise EngineConnectionException(self.name, \"Unable to send request, check connectivity.\")\n\n if response.status_code != 200:\n raise EngineConnectionException(self.name, \"\", code=response.status_code)\n\n return self._parse_json_response(query, response)", "def http_request(\n self,\n method: str,\n url_suffix: str,\n json_data=None,\n params=None,\n headers=None,\n ):\n resp = Response()\n try:\n resp = super()._http_request(\n method=method,\n url_suffix=url_suffix,\n json_data=json_data,\n params=params,\n headers=headers,\n resp_type='response',\n timeout=self.request_timeout,\n ok_codes=(200, 201),\n error_handler=self.handle_error_response,\n )\n except MissingSchema:\n raise ValueError(MESSAGES['MISSING_SCHEMA_ERROR'])\n except InvalidSchema:\n raise ValueError(MESSAGES['INVALID_SCHEMA_ERROR'])\n except InvalidURL:\n raise ValueError(MESSAGES['INVALID_API_URL'])\n except DemistoException as e:\n self.handle_demisto_exception(e)\n\n if resp.ok:\n content_type = resp.headers.get('Content-Type', '')\n if content_type == CONTENT_TYPE_JSON:\n # Handle empty response\n if resp.text == '':\n return resp\n else:\n return resp.json()\n elif self.is_supported_context_type(content_type):\n return resp", "async def request(\r\n self, method: str, url: str, params: dict = None, data: dict = None\r\n ):\r\n async with self._session.request(\r\n method,\r\n url,\r\n params=params,\r\n json=data,\r\n headers={\"Authorization\": \"Bearer \" + self._token},\r\n ) as resp:\r\n if resp.status == 200:\r\n return await resp.json()\r\n if resp.status in (400, 422, 429, 500):\r\n data = None\r\n try:\r\n data = await resp.json()\r\n except Exception: # pylint: disable=broad-except\r\n pass\r\n raise APIResponseError(\r\n resp.request_info,\r\n resp.history,\r\n status=resp.status,\r\n message=resp.reason,\r\n headers=resp.headers,\r\n data=data,\r\n )\r\n resp.raise_for_status()", "def do_request(\n self,\n version: str,\n action: str,\n protocol: str,\n method: str,\n pathname: str,\n request: dict,\n headers: Dict[str, str],\n runtime: util_models.RuntimeOptions,\n ) -> dict:\n runtime.validate()\n _runtime = {\n 'timeouted': 'retry',\n 'readTimeout': UtilClient.default_number(runtime.read_timeout, self._read_timeout),\n 'connectTimeout': UtilClient.default_number(runtime.connect_timeout, self._connect_timeout),\n 'httpProxy': UtilClient.default_string(runtime.http_proxy, self._http_proxy),\n 'httpsProxy': UtilClient.default_string(runtime.https_proxy, self._https_proxy),\n 'noProxy': UtilClient.default_string(runtime.no_proxy, self._no_proxy),\n 'maxIdleConns': UtilClient.default_number(runtime.max_idle_conns, self._max_idle_conns),\n 'maxIdleTimeMillis': self._max_idle_time_millis,\n 'keepAliveDurationMillis': self._keep_alive_duration_millis,\n 'maxRequests': self._max_requests,\n 'maxRequestsPerHost': self._max_requests_per_host,\n 'retry': {\n 'retryable': runtime.autoretry,\n 'maxAttempts': UtilClient.default_number(runtime.max_attempts, 3)\n },\n 'backoff': {\n 'policy': UtilClient.default_string(runtime.backoff_policy, 'no'),\n 'period': UtilClient.default_number(runtime.backoff_period, 1)\n },\n 'ignoreSSL': runtime.ignore_ssl,\n # 链上交易中的事件\n }\n _last_request = None\n _last_exception = None\n _now = time.time()\n _retry_times = 0\n while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now):\n if _retry_times > 0:\n _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times)\n if _backoff_time > 0:\n TeaCore.sleep(_backoff_time)\n _retry_times = _retry_times + 1\n try:\n _request = TeaRequest()\n _request.protocol = UtilClient.default_string(self._protocol, protocol)\n _request.method = method\n _request.pathname = pathname\n _request.query = {\n 'method': action,\n 'version': version,\n 'sign_type': 'HmacSHA1',\n 'req_time': AntchainUtils.get_timestamp(),\n 'req_msg_id': AntchainUtils.get_nonce(),\n 'access_key': self._access_key_id,\n 'base_sdk_version': 'TeaSDK-2.0',\n 'sdk_version': '1.3.1',\n '_prod_code': 'BAASDATAGW',\n '_prod_channel': 'undefined'\n }\n if not UtilClient.empty(self._security_token):\n _request.query['security_token'] = self._security_token\n _request.headers = TeaCore.merge({\n 'host': UtilClient.default_string(self._endpoint, 'openapi.antchain.antgroup.com'),\n 'user-agent': UtilClient.get_user_agent(self._user_agent)\n }, headers)\n tmp = UtilClient.anyify_map_value(RPCUtilClient.query(request))\n _request.body = UtilClient.to_form_string(tmp)\n _request.headers['content-type'] = 'application/x-www-form-urlencoded'\n signed_param = TeaCore.merge(_request.query,\n RPCUtilClient.query(request))\n _request.query['sign'] = AntchainUtils.get_signature(signed_param, self._access_key_secret)\n _last_request = _request\n _response = TeaCore.do_action(_request, _runtime)\n raw = UtilClient.read_as_string(_response.body)\n obj = UtilClient.parse_json(raw)\n res = UtilClient.assert_as_map(obj)\n resp = UtilClient.assert_as_map(res.get('response'))\n if AntchainUtils.has_error(raw, self._access_key_secret):\n raise TeaException({\n 'message': resp.get('result_msg'),\n 'data': resp,\n 'code': resp.get('result_code')\n })\n return resp\n except Exception as e:\n if TeaCore.is_retryable(e):\n _last_exception = e\n continue\n raise e\n raise UnretryableException(_last_request, _last_exception)", "def _make_get_request(self,url,object_fh,params=None,return_type=None,extras=None):\n \n if params is None:\n params = {}\n \n if extras is None:\n extras = {}\n \n #Polite Pool Work\n #---------------------------------------\n #Example \n #GroovyBib/1.1 (https://example.org/GroovyBib/; mailto:GroovyBib@example.org) BasedOnFunkyLib/1.4.\n\n #It is unclear if we need to match this format\n #This is good enough for now\n #Eventually we might allow a user to describe their application\n #version, and url\n ua_str = 'st_crossref/%s (https://github.com/ScholarTools/crossref_api_python; mailto:%s)' % (VERSION,user_config.email)\n \n headers = {'user-agent': ua_str}\n \n \n #TODO Check params and # of results ...\n \n #TODO: Implement rate limits ...\n \n \n #The params get passed directly\n r = self.session.get(url,params=params,headers=headers) \n \n\n #Update limits\n #--------------------- \n headers = r.headers\n self.rate_limit = headers.get('X-Rate-Limit-Limit',50)\n self.rate_limit_interval = int(headers.get('X-Rate-Limit-Interval','1s')[:-1])\n \n #TODO: Implement ...https://konghq.com/blog/how-to-design-a-scalable-rate-limiting-algorithm/\n \n\n #These are debug only and should not be used for anything else\n #-------------------------------------------------------------\n self.last_url = url\n self.last_response = r \n self.last_params = params \n \n if r.status_code == 404:\n #This typically happens when the DOI is invalid\n #TODO: Make this a named exception\n raise errors.RequestError(r.text)\n \n json_data = r.json()\n if json_data['status'] == 'failed':\n self.last_error = json_data\n raise errors.CrossrefAPIError(json_data['message'])\n \n #Example error \n \"\"\"\n {'status': 'failed', 'message-type': 'validation-failure', \n 'message': [{'value': 'sample', \n 'message': 'This route does not support sample', 'type': 'parameter-not-allowed'}]}\n \"\"\" \n \n #TODO: return_type\n if return_type == 'json' or object_fh is None:\n return json_data\n else:\n return object_fh(json_data,self)", "def http_backoff(\n method: HTTP_METHOD_T,\n url: str,\n *,\n max_retries: int = 5,\n base_wait_time: float = 1,\n max_wait_time: float = 8,\n retry_on_exceptions: Union[Type[Exception], Tuple[Type[Exception], ...]] = (\n ConnectTimeout,\n ProxyError,\n ),\n retry_on_status_codes: Union[int, Tuple[int, ...]] = HTTPStatus.SERVICE_UNAVAILABLE,\n **kwargs,\n) -> Response:\n if isinstance(retry_on_exceptions, type): # Tuple from single exception type\n retry_on_exceptions = (retry_on_exceptions,)\n\n if isinstance(retry_on_status_codes, int): # Tuple from single status code\n retry_on_status_codes = (retry_on_status_codes,)\n\n nb_tries = 0\n sleep_time = base_wait_time\n\n # If `data` is used and is a file object (or any IO), it will be consumed on the\n # first HTTP request. We need to save the initial position so that the full content\n # of the file is re-sent on http backoff. See warning tip in docstring.\n io_obj_initial_pos = None\n if \"data\" in kwargs and isinstance(kwargs[\"data\"], io.IOBase):\n io_obj_initial_pos = kwargs[\"data\"].tell()\n\n session = get_session()\n while True:\n nb_tries += 1\n try:\n # If `data` is used and is a file object (or any IO), set back cursor to\n # initial position.\n if io_obj_initial_pos is not None:\n kwargs[\"data\"].seek(io_obj_initial_pos)\n\n # Perform request and return if status_code is not in the retry list.\n response = session.request(method=method, url=url, **kwargs)\n if response.status_code not in retry_on_status_codes:\n return response\n\n # Wrong status code returned (HTTP 503 for instance)\n logger.warning(f\"HTTP Error {response.status_code} thrown while requesting {method} {url}\")\n if nb_tries > max_retries:\n response.raise_for_status() # Will raise uncaught exception\n # We return response to avoid infinite loop in the corner case where the\n # user ask for retry on a status code that doesn't raise_for_status.\n return response\n\n except retry_on_exceptions as err:\n logger.warning(f\"'{err}' thrown while requesting {method} {url}\")\n\n if nb_tries > max_retries:\n raise err\n\n # Sleep for X seconds\n logger.warning(f\"Retrying in {sleep_time}s [Retry {nb_tries}/{max_retries}].\")\n time.sleep(sleep_time)\n\n # Update sleep time for next retry\n sleep_time = min(max_wait_time, sleep_time * 2) # Exponential backoff", "def __http_request_maker(\n req_type,\n url,\n headers,\n retries,\n time_sleep,\n timeout_sec=None,\n data=None,\n content_type=None,\n socks_proxy=None,\n):\n if socks_proxy is not None:\n socks_version = (\n socks.SOCKS5\n if socks_proxy.startswith(\"socks5://\")\n else socks.SOCKS4\n )\n socks_proxy = socks_proxy.rsplit(\"://\")[1]\n if \"@\" in socks_proxy:\n socks_username = socks_proxy.rsplit(\":\")[0]\n socks_password = socks_proxy.rsplit(\":\")[1].rsplit(\"@\")[0]\n socks.set_default_proxy(\n socks_version,\n str(socks_proxy.rsplit(\"@\")[1].rsplit(\":\")[0]),\n int(socks_proxy.rsplit(\":\")[-1]),\n username=socks_username,\n password=socks_password,\n )\n socket.socket = socks.socksocket\n socket.getaddrinfo = getaddrinfo\n else:\n socks.set_default_proxy(\n socks_version,\n str(socks_proxy.rsplit(\":\")[0]),\n int(socks_proxy.rsplit(\":\")[1]),\n )\n socket.socket = socks.socksocket\n socket.getaddrinfo = getaddrinfo\n exits = 0\n r = None\n while True:\n try:\n req_type = req_type.lower()\n if req_type in [\"post\", \"put\", \"patch\"]:\n if content_type == \"application/data\":\n r = eval(\n \"requests.{}(url=url, headers=headers, data=data,\\\n timeout=timeout_sec, verify=False)\".format(\n req_type\n )\n )\n elif content_type == \"application/json\":\n r = eval(\n \"requests.{}(url=url, headers=headers, json=data,\\\n timeout=timeout_sec, verify=False)\".format(\n req_type\n )\n )\n elif req_type in [\"get\", \"head\", \"delete\"]:\n r = eval(\n \"requests.{}(url=url, headers=headers,\\\n verify=False, timeout=timeout_sec)\".format(\n req_type\n )\n )\n break\n except Exception as _:\n exits += 1\n if exits is retries:\n return 0\n else:\n time.sleep(time_sleep)\n continue\n return r", "def send_request(method: str, path: str, headers: dict, data: str = '', params: tuple = PARAMS):\n log.info(\n f\"EMBY, send_request: {method=}, {BASEURL + path=}, {headers=}, {data=}, {params=}\")\n\n if method.lower() == 'post':\n response = requests.post(BASEURL + path, headers=headers,\n params=params, data=data, timeout=5)\n elif method.lower() == 'get':\n response = requests.get(BASEURL + path, headers=headers,\n params=params, data=data, timeout=5)\n elif method.lower() == 'delete':\n response = requests.delete(\n BASEURL + path, headers=headers, params=params, data=data, timeout=5)\n\n error_list = {\n 400: \"Bad Request. Server cannot process request.\",\n 401: \"Unauthorized. Client needs to authenticate.\",\n 403: \"Forbidden. No permission for the requested operation.\",\n 404: \"Resource not found or unavailable.\"\n }\n\n if response.status_code in error_list:\n log.error(\n f\"EMBY, send_request: {response.status_code=}: {error_list[response.status_code]}\")\n raise Exception(\n f\"{response.status_code=}: {error_list[response.status_code]}\")\n elif response.status_code >= 500:\n log.error(\n f\"EMBY, send_request: {response.status_code=}: {error_list[response.status_code]}\")\n raise Exception(f\"SERVER ERROR {response.status_code=}: {response.reason=}\")\n elif response.status_code >= 204:\n return # nothing, empty response\n else:\n log.debug(f\"Everything seems OK :{response.status_code=}\")\n log.debug(f\"EMBY, send_request: {response.json()=}\")\n return response.json()", "def _make_request(self, method, url, post_data=None, body=None):\n if not self.connection:\n self._connect()\n try:\n self.connection.close()\n except:\n pass\n self.connection.connect()\n headers = {}\n if self.auth_header:\n headers[\"Authorization\"] = self.auth_header\n self.connection.request(method, url, body, headers)\n resp = self.connection.getresponse()\n return resp", "def _raise_performing_request_error(self, *args, **kwargs):", "def request(self, path, method=\"GET\", **kwargs):\n if path.startswith(\"https://\"):\n response = self._request(method, path, **kwargs)\n _handle_http_errors(response, _CODES_CONVERSION)\n return response\n\n while True:\n response = self._request(method, GITHUB_API + path, **kwargs)\n\n if (\n response.status_code == 403\n and int(response.headers.get(\"X-RateLimit-Remaining\", \"-1\")) == 0\n ):\n self._handle_rate_limit()\n continue\n\n return response", "def _make_request(self, method, url, post_data=None, body=None):\r\n if not self.connection:\r\n self._connect()\r\n try:\r\n self.connection.close()\r\n except:\r\n pass\r\n self.connection.connect()\r\n headers = {}\r\n if self.auth_header:\r\n headers[\"Authorization\"] = self.auth_header\r\n self.connection.request(method, url, body, headers)\r\n resp = self.connection.getresponse()\r\n return resp", "def error_handler(source, prod, HEADERS):\n\n try:\n req = requests.get(source, params=prod, headers=HEADERS)\n except Timeout as e:\n print(\"\\nThe website took too long to respond. Please try after sometime.\\n\")\n sys.exit(1)\n except ConnectionError as e:\n print(\"\\nYou do not have a descent internet connection. Please check your Internet Connection and try again later.\\n\")\n sys.exit(1)\n except TooManyRedirects as e:\n print(\"\\nYour request exceeded the configured number of maximum redirections. Please try after sometime.\\n\")\n sys.exit(1)\n except Exception as e:\n print(\"\\nRequest souldn't be completed. Please try after sometime.\\n\")\n sys.exit(1)\n\n return req", "def _raise_http_error(self, *args, **kwargs):", "def __send_request(self, url, params=None, headers=None):\n\n if self.rate_limit is not None and self.rate_limit <= self.min_rate_to_sleep:\n seconds_to_reset = self.rate_limit_reset_ts - int(time.time()) + 1\n cause = \"GitHub rate limit exhausted.\"\n if self.sleep_for_rate:\n logger.info(\"%s Waiting %i secs for rate limit reset.\", cause, seconds_to_reset)\n time.sleep(seconds_to_reset)\n else:\n raise RateLimitError(cause=cause, seconds_to_reset=seconds_to_reset)\n\n r = requests.get(url, params=params, headers=headers)\n r.raise_for_status()\n self.rate_limit = int(r.headers['X-RateLimit-Remaining'])\n self.rate_limit_reset_ts = int(r.headers['X-RateLimit-Reset'])\n logger.debug(\"Rate limit: %s\" % (self.rate_limit))\n return r", "def __request(self, method, resource, retry=True):\n headers = {\"x-access-token\": self._accessToken}\n result = self.__call(method, resource, headers=headers)\n\n if result:\n return result\n elif result.status_code == 401 and retry:\n self.__authenticate()\n return self.__request(method, resource, retry=False)\n else:\n raise requests.HTTPError(result)", "def _request(self, method, url, body=None, headers=None, serialize=True):\n headers = headers or {}\n headers['Accept'] = 'application/json'\n headers['User-Agent'] = 'paxes-httpclient'\n if body and not 'Content-Type' in headers:\n headers['Content-Type'] = 'application/json'\n if self.auth_token:\n headers['X-Auth-Token'] = self.auth_token\n LOG.debug('>> %s %s, %s, %s' % (method, url, headers, body))\n conn = self._create_connection(url)\n if body and serialize:\n body = json.dumps(body)\n conn.request(method, url, body, headers)\n res = conn.getresponse()\n header_list = res.getheaders()\n header_dict = {}\n for ituple in header_list:\n header_dict[ituple[0].lower()] = ituple[1]\n response_info = {\n 'status': res.status,\n 'reason': res.reason,\n 'headers': header_dict,\n 'body': res.read()\n }\n LOG.debug('<< %d %s, %s, %s' % (response_info['status'],\n response_info['reason'],\n response_info['headers'],\n response_info['body']))\n conn.close()\n return response_info", "def _send_http_request(self, resource, method, data=None, params=None, headers=None):\n\n url = '/'.join((self.https_url, resource))\n\n response = self._session.request(\n url=url,\n method=method,\n data=data,\n params=params,\n headers=headers,\n proxies=self._proxies)\n response.raise_for_status()\n\n return response", "def _generate_http_request_method(self, method):\n verb = method.verb\n\n assert ResourceNode.is_valid_method(verb), \"%s is invalid verb\" % verb\n\n def unbound(self, **kwargs):\n \"\"\"\n Do the HTTP request for a given verb and return the replied json object\n\n This method will be bound to a ResourceNode object\n \"\"\"\n debug(\"url=\", self.url)\n with requests.sessions.Session() as session:\n\n params = None\n headers = {}\n data = None\n if verb in [\"GET\", \"DELETE\", \"HEAD\"]:\n params = kwargs\n elif verb in [\"POST\", \"PUT\", \"PATCH\"]:\n headers['Content-Type'] = 'application/json'\n data = json.dumps(kwargs)\n\n req = requests.models.Request(\n method = verb,\n url = self.url,\n headers = headers,\n files = None,\n data = data or {},\n json = None,\n params = params,\n auth = None,\n cookies = None,\n hooks = None\n )\n\n prep = session.prepare_request(req)\n\n if self.shared_config[\"request_validator_enable\"] and method.request_validator:\n method.request_validator(prep)\n\n proxies = {}\n settings = session.merge_environment_settings(\n prep.url, proxies, None, None, None\n )\n\n # Send the request.\n send_kwargs = {\n 'timeout': None,\n 'allow_redirects': True,\n }\n send_kwargs.update(settings)\n reply = session.send(prep, **send_kwargs)\n\n if self.shared_config[\"response_validator_enable\"] and method.response_validator:\n method.response_validator(reply,\n request_method=verb.lower(),\n raw_request=prep)\n\n ret = None\n if reply is not None and \\\n reply.status_code >= 200 and \\\n reply.status_code < 300 and \\\n reply.text:\n ret = reply.json()\n return ret\n\n # Update docstring to reflect the HTTP verb\n # FIXME: Does not work\n unbound.__doc__ = \"\"\" Do the HTTP %s request and return the replied json object \"\"\" % verb\n\n # Now bind the function 'unbound' to this ResourceNode\n bound = unbound.__get__(self, ResourceNode)\n\n verb_translator = self.shared_config[\"verb_translator\"]\n if verb_translator:\n method_name = verb_translator(method)\n else:\n method_name = verb.lower()\n\n self.__setattr__(method_name, bound)", "def __call__(self, **parameters):\n request = self._build_request(**parameters)\n\n return self.requestor.request(**request)", "def make_request(self, path, method, args=None, files=None, batch=False, raw_path=False):\n args = dict(args or {})\n args = {k.encode('utf-8'): unicode(v).encode('utf-8')\n for k, v in args.items()}\n\n if batch:\n # Then just return a dict for the batch request\n return {\n 'method': method,\n 'relative_url': '%s?%s' % (path, urllib.urlencode(args))\n }\n logger.info('Making a %s request at %s/%s with %s' % (method, self.api_root, path, args))\n if 'access_token' not in args:\n args['access_token'] = self.access_token\n try:\n if method == 'GET':\n url = path if raw_path else '%s/%s?%s' % (self.api_root, path, urllib.urlencode(args))\n f = urllib2.urlopen(url)\n elif method == 'POST':\n url = path if raw_path else '%s/%s' % (self.api_root, path)\n if files:\n encoder = MultipartFormdataEncoder()\n content_type, body = encoder.encode(args, files)\n req = urllib2.Request(url, data=body)\n req.add_header('Content-Type', content_type)\n f = urllib2.urlopen(req)\n else:\n f = urllib2.urlopen(url, urllib.urlencode(args))\n elif method == 'DELETE':\n url = path if raw_path else '%s/%s?%s' % (self.api_root, path, urllib.urlencode(args))\n req = urllib2.Request(url)\n req.get_method = lambda: 'DELETE'\n f = urllib2.urlopen(req)\n else:\n raise\n return json.load(f)\n except urllib2.HTTPError as e:\n err = AdsAPIError(e)\n # Info, not warning or error, because these often happen as an expected result because of user input\n # and well formed requests that facebook rejects.\n logger.info(u'API Error: {}'.format(err.message))\n raise err\n except urllib2.URLError as e:\n logger.warn(u'URLError: %s' % e.reason)\n raise", "def createHttpRequest(self, endpoint, payload=None, method=None, xDepth=None):\n\n logger.debug(\"Endpoint: {}\".format(endpoint))\n\n if endpoint is None:\n raise TypeError(\"expected CIC endpoint url but received None\",\"CIC_WRONG_ARGUMENT_TYPE_ERR\")\n\n # if no playload provided always do HTTP GET by default\n if payload is None:\n logger.debug(\"Preparing HTTP GET\")\n request = urllib2.Request(self.cicUrl+endpoint)\n\n elif ((payload is not None) and (method == \"POST\")):\n logger.debug(\"Preparing HTTP Post\")\n data = json.dumps(payload)\n request = urllib2.Request(self.cicUrl+endpoint,data, {'Content-Type': 'application/json'})\n request.get_method = lambda: 'POST'\n\n elif ((payload is not None) or (method==\"PATCH\")):\n logger.debug(\"Preparing HTTP Patch\")\n data = urllib.urlencode(payload)\n request = urllib2.Request(self.cicUrl+endpoint,data)\n request.get_method = lambda: 'PATCH'\n\n if xDepth:\n request.add_header(\"X-Depth\", xDepth)\n\n return request", "def request(self, url, method, json=None, headers=None, **requests_parameters):\n requests_parameters[\"timeout\"] = requests_parameters.get(\"timeout\", 10)\n if requests_parameters.get(\"auth\"):\n requests_parameters[\"auth\"] = self.__auth(**requests_parameters[\"auth\"])\n\n request = getattr(requests, method.lower())\n requests_parameters[\"url\"] = url\n requests_parameters[\"json\"] = json\n requests_parameters[\"headers\"] = headers\n return self.wait(request, **requests_parameters)", "def request(self, method, full_path, data=None, headers=None, files=None):\n if headers is None:\n headers = {}\n else:\n headers = encode_meta_headers(headers)\n\n # set a default User-Agent header if it wasn't passed in\n if 'user-agent' not in headers:\n headers['user-agent'] = self.default_user_agent\n url = \"%s://%s%s\" % (\n self.parsed_url.scheme,\n self.parsed_url.netloc,\n full_path)\n self.resp = self._request(method, url, headers=headers, data=data,\n files=files, **self.requests_args)\n return self.resp", "def api_call(url, method, debug, **kwargs):\n resp = None\n attempt = 0\n maxattempts = 3\n req = Request(method.upper(), url, **kwargs)\n\n if debug:\n print(\"DEBUG: Request ({}) {}\".format(method.upper(), url))\n\n while True:\n try:\n attempt += 1\n resp = Session().send(\n Session().prepare_request(req), verify=True)\n resp.raise_for_status()\n break\n except (HTTPError, ConnectionError, Timeout) as ex:\n if attempt >= maxattempts:\n abort(ex.message)\n else:\n time.sleep(1)\n continue\n except RequestException as ex:\n abort(ex.message)\n\n if resp is not None:\n return resp\n else:\n abort(\"Error making API call to URL: \" % url)", "def request(*args):\r\n URL = BASE_URL + args[0]\r\n proxies = {'https': 'http://137.123.%s.135:8134' % randint(2, 233)}\r\n # print(args[0])\r\n if len(args) == 1: # GET method\r\n response = session.get(URL, headers=header, proxies=proxies)\r\n updateHeader(URL)\r\n else: # POST with data parameter addition\r\n response = session.post(URL, headers=header, data=args[1], proxies=proxies)\r\n try:\r\n response.raise_for_status()\r\n except UnboundLocalError as exc:\r\n print(exc)\r\n if len(args) == 2:\r\n print('POST FAIL! continue to file question')\r\n else:\r\n import traceback\r\n print(traceback.format_exc())\r\n exit(1)\r\n else:\r\n return response", "def sendRestRequest(self, method, path, parameters={}):\n # Make sure we got a valid method\n assert method in self.METHODS\n\n # Look up the http method we need\n f = self.METHODS[method]\n\n # Construct the url\n url = self.urlBase + path\n\n if hasattr(self, 'token'):\n # Add the authentication token to any parameters we got\n parameters.update({'token': self.token})\n\n # Make the request, passing parameters and authentication info\n result = f(url, params=parameters)\n\n # If success, return the json object. Otherwise throw an exception.\n if result.status_code == 200 or result.status_code == 403 :\n return result.json()\n else:\n print 'Showing result before raising exception:'\n print result.text\n raise Exception('Request: ' + result.url + ', return code: ' + str(result.status_code))", "def __send_request(self, path, method=\"GET\", params=None, use_token=True, use_json_content_type=False):\n url = \"{}/{}\".format(self.__api_url, path)\n method.upper()\n logger.debug(\"__send_request method: {} url: '{}' with parameters: {}\".format(method, url, params))\n if type(params) not in (dict, list):\n params = {}\n if use_token and self.__token:\n headers = {'Authorization': 'Bearer {}'.format(self.__token)}\n else:\n headers = {}\n # if use_json_content_type and params:\n headers['Content-Type'] = 'application/json'\n params = json.dumps(params)\n\n if method == \"POST\":\n response = requests.post(url, headers=headers, data=params)\n elif method == \"PUT\":\n response = requests.put(url, headers=headers, data=params)\n elif method == \"DELETE\":\n response = requests.delete(url, headers=headers, data=params)\n else:\n response = requests.get(url, headers=headers, params=params)\n if response.status_code == 401 and self.__refresh_token == 0:\n self.__get_token()\n return self.__send_request(path, method, params)\n elif response.status_code == 404:\n logger.warning(\"404: Sorry, the page you are looking for could not be found.\")\n logger.debug(\"Raw_server_response: {}\".format(response.text, ))\n elif response.status_code == 500:\n logger.critical(\"Whoops, looks like something went wrong on the server. Please contact with out support tech@sendpulse.com.\")\n else:\n try:\n logger.debug(\"Request response: {}\".format(response.json(), ))\n except:\n logger.critical(\"Raw server response: {}\".format(response.text, ))\n return response", "def _make_request_with_auth_fallback(self, url, headers=None, params=None):\n self.log.debug(\"Request URL and Params: %s, %s\", url, params)\n try:\n resp = requests.get(\n url,\n headers=headers,\n verify=self._ssl_verify,\n params=params,\n timeout=DEFAULT_API_REQUEST_TIMEOUT,\n proxies=self.proxy_config,\n )\n resp.raise_for_status()\n except requests.exceptions.HTTPError as e:\n self.log.debug(\"Error contacting openstack endpoint: %s\", e)\n if resp.status_code == 401:\n self.log.info('Need to reauthenticate before next check')\n\n # Delete the scope, we'll populate a new one on the next run for this instance\n self.delete_current_scope()\n elif resp.status_code == 409:\n raise InstancePowerOffFailure()\n elif resp.status_code == 404:\n raise e\n else:\n raise\n\n return resp.json()", "def request(self, method, *path, **data):\n\t\theaders = data.pop('headers', {})\n\t\tversion = data.pop('version', None)\n\t\tjson = data.pop('json', True)\n\t\tpath = urljoin(*path)\n\t\treturn self._request(method, path, version, data, headers, json)", "async def _api_request(self,\n method: str,\n path_url: str,\n params: Dict[str, Any] = {}) -> Dict[str, Any]:\n base_url = f\"https://{global_config_map['gateway_api_host'].value}:\" \\\n f\"{global_config_map['gateway_api_port'].value}\"\n url = f\"{base_url}/{path_url}\"\n client = await self._http_client()\n if method == \"get\":\n if len(params) > 0:\n response = await client.get(url, params=params)\n else:\n response = await client.get(url)\n elif method == \"post\":\n response = await client.post(url, data=params)\n\n parsed_response = json.loads(await response.text())\n if response.status != 200:\n err_msg = \"\"\n if \"error\" in parsed_response:\n err_msg = f\" Message: {parsed_response['error']}\"\n raise IOError(f\"Error fetching data from {url}. HTTP status is {response.status}.{err_msg}\")\n if \"error\" in parsed_response:\n raise Exception(f\"Error: {parsed_response['error']}\")\n\n return parsed_response", "async def request(\n self, method, url=None, *, path=\"\", retries=1, connection_timeout=60, **kwargs\n ):\n\n ALLOWED_KWARGS = {\n \"data\",\n \"params\",\n \"headers\",\n \"encoding\",\n \"json\",\n \"files\",\n \"multipart\",\n \"cookies\",\n \"callback\",\n \"timeout\",\n \"retries\",\n \"max_redirects\",\n \"follow_redirects\",\n \"persist_cookies\",\n \"auth\",\n \"stream\",\n }\n\n unknown_kwargs = set(kwargs) - ALLOWED_KWARGS\n if unknown_kwargs:\n raise TypeError(\n \"request() got unexpected keyword arguments {!r}\".format(\n \", \".join(str(x) for x in unknown_kwargs)\n )\n ) from None\n\n timeout = kwargs.get(\"timeout\", None)\n req_headers = kwargs.pop(\"headers\", None)\n\n if self.headers is not None:\n headers = copy(self.headers)\n if req_headers is not None:\n headers.update(req_headers)\n req_headers = headers\n\n async with self.sema:\n if url is None:\n url = self._make_url(path)\n\n retry = False\n\n sock = None\n try:\n sock = await timeout_manager(\n connection_timeout, self._grab_connection, url\n )\n port = sock.port\n\n req_obj = RequestProcessor(\n self,\n method,\n url,\n port,\n headers=req_headers,\n encoding=self.encoding,\n sock=sock,\n persist_cookies=self._cookie_tracker,\n **kwargs\n )\n\n try:\n if timeout is None:\n sock, r = await req_obj.make_request()\n else:\n sock, r = await timeout_manager(timeout, req_obj.make_request)\n except BadHttpResponse:\n if timeout is None:\n sock, r = await req_obj.make_request()\n else:\n sock, r = await timeout_manager(timeout, req_obj.make_request)\n\n if sock is not None:\n try:\n if r.headers[\"connection\"].lower() == \"close\":\n sock._active = False\n await sock.aclose()\n except KeyError:\n pass\n await self.return_to_pool(sock)\n\n # ConnectionErrors are special. They are the only kind of exception\n # we ever want to suppress. All other exceptions are re-raised or\n # raised through another exception.\n except ConnectionError as e:\n if retries > 0:\n retry = True\n retries -= 1\n else:\n raise e\n\n except Exception as e:\n if sock:\n await self._handle_exception(e, sock)\n raise\n\n # any BaseException is considered unlawful murder, and\n # Session.cleanup should be called to tidy up sockets.\n except BaseException as e:\n if sock:\n await sock.aclose()\n raise e\n\n if retry:\n return await self.request(\n method, url, path=path, retries=retries, headers=headers, **kwargs\n )\n\n return r", "def request(self, *args, **kwargs):\n response = super().request(*args, **kwargs)\n\n # Log request headers\n for header, value in response.request.headers.items():\n LOGGER.debug(\"REQUEST %s: %s\", header, value)\n\n # Raise HTTP errors as exceptions\n try:\n response.raise_for_status()\n except requests.HTTPError as http_error:\n LOGGER.error(http_error)\n LOGGER.error(http_error.response.json())\n raise\n\n # Log response headers\n for header, value in response.headers.items():\n LOGGER.debug(\"RESPONSE %s: %s\", header, value)\n\n return response", "def _send(self, endpoint, method, extra_headers=None, **kwargs):\n\n headers = self.headers\n if extra_headers:\n headers.update(extra_headers)\n if method == \"GET\":\n return requests.get(\n f\"{self.API_URL}{endpoint}\",\n headers=headers,\n **kwargs\n )\n elif method == \"POST\":\n return requests.post(\n f\"{self.API_URL}{endpoint}\",\n headers=headers,\n **kwargs\n )\n else:\n raise ValueError(f\"supported methods are GET,POST but given {method}\")", "def get(self, url, authenticate=True, headers=None, query_params=None, expected_status_codes=None, retry=0):\n\n # Initialize headers if not provided.\n if headers is None:\n headers = {}\n\n # Set expected status codes to default value if not provided.\n if expected_status_codes is None:\n expected_status_codes = [200, 204]\n\n # If request is authenticated, add authorization header.\n if authenticate:\n headers[\"Authorization\"] = self._get_authorization_header()\n\n # Append query params to URL if provided.\n if query_params is not None:\n url = f\"{url}?\"\n for key, value in query_params.items():\n url = f\"{url}{key}={value}&\"\n\n # If max number of retries is exceeded, abort.\n if retry > consts.MAX_RETRIES:\n log.abort_and_exit(\"GHUB\", f\"Request to {url} with headers {headers} failed after {retry} retries.\")\n\n # Sleep before making request to ensure proper delay.\n time.sleep(consts.API_REQUEST_DELAY_SEC)\n\n # Before making a request, check for rate limiting. Wait if necessary.\n if self.is_rate_limited():\n self._handle_rate_limit()\n\n # Make request and update rate limit status from response headers.\n response = requests.get(url, headers=headers)\n self._rate_limit_status = self._parse_rate_limit_headers(response.headers)\n status = response.status_code\n\n retry_after_header = response.headers.get(\"Retry-After\")\n if retry_after_header is not None:\n # Retry-After header found, indicates abuse rate limiting. Discard response, wait and retry.\n retry_sec = int(retry_after_header)\n log.warning(\"GHUB\",\n f\"Received Retry-After (abuse rate limiting), trying again after '{retry_sec}' seconds.\")\n self.update_rate_limit_status()\n self.get(url, headers, expected_status_codes, retry + 1)\n\n if (status == 403) or (status not in expected_status_codes):\n # Check for rate limiting in case of unexpected status code.\n if self.is_rate_limited():\n # Wait until the rate limit should be lifted.\n self._handle_rate_limit()\n else:\n # It was not a rate limiting issue - log a warning.\n log.warning(\"GHUB\", f\"Unexpected status code {status} for request {url}.\")\n\n # Rate limit should now be lifted if there was one. Retry, update number of retries.\n self.get(url, headers, expected_status_codes, retry + 1)\n\n return status, response.json(), self._parse_link_header(response.headers.get(\"Link\"))", "def _req(self, host, path, meth, timeout, params, data, is_search):\n if is_search and self.http_search:\n url = 'http://%s%s' % (host, path)\n else:\n url = 'https://%s%s' % (host, path)\n req = self.session.request(meth, url, params=params, data=data,\n headers=self.headers)\n res = yield from req\n with async_timeout.timeout(timeout):\n if res.status // 100 == 2:\n return (yield from res.json())\n elif res.status // 100 == 4:\n message = 'HTTP Code: %d' % res.status\n try:\n message = (yield from res.json())['message']\n finally:\n raise AlgoliaException(message)\n # TODO: Check this for replacement.\n res.raise_for_status()" ]
[ "0.62948996", "0.62826276", "0.62717605", "0.6218818", "0.61214685", "0.6097778", "0.6028783", "0.59454954", "0.5928931", "0.5927472", "0.5919466", "0.5915387", "0.584488", "0.58434975", "0.58159757", "0.5748989", "0.57105803", "0.57093656", "0.5705746", "0.5681902", "0.5676401", "0.56446207", "0.5643871", "0.5628112", "0.56215066", "0.5618409", "0.56130373", "0.5610854", "0.56055844", "0.5594453", "0.5584854", "0.5580722", "0.557429", "0.55738443", "0.55695915", "0.5566294", "0.55656624", "0.55438364", "0.5539164", "0.5538559", "0.55291736", "0.54811084", "0.5475978", "0.5451646", "0.545043", "0.54398406", "0.5429254", "0.54267454", "0.542642", "0.54243445", "0.54238683", "0.5420556", "0.541997", "0.54095614", "0.5409554", "0.5395159", "0.5391985", "0.5390644", "0.5385586", "0.5383648", "0.53811157", "0.5375195", "0.5359815", "0.5355296", "0.5347722", "0.5346074", "0.53438085", "0.5331578", "0.5323475", "0.53108877", "0.531088", "0.5307764", "0.5304352", "0.5303051", "0.5298841", "0.5298327", "0.52903354", "0.5275151", "0.52722275", "0.5267786", "0.5261322", "0.5236972", "0.5235848", "0.5235078", "0.5225156", "0.52125716", "0.52120453", "0.5211667", "0.5211663", "0.5209183", "0.5207017", "0.5204289", "0.5201406", "0.5198991", "0.5198148", "0.51979023", "0.5180312", "0.5180179", "0.51796824", "0.5176143" ]
0.7337592
0
Wrapper for requests GET method
def _get(self, *args, **kwargs): return self._request('get', *args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(self, *args, **kwargs):\n self.request(\"get\", *args, **kwargs)", "def http_method_get():\n return 'GET'", "def get(self, *path, **data):\n\t\treturn self.request('GET', *path, **data)", "def _get(self, url):\n return self._request(url)", "def get(self, *args, **kwargs):\n return self._request('get', *args, **kwargs)", "def get(self, *args, **kw):\n kw['method'] = 'GET'\n return self.open(*args, **kw)", "def get(self, *args, **kwargs):\n return self._hit(\"GET\", *args, **kwargs)", "def do_GET(self):\n self.http_method = 'GET'\n self.response()", "def get(self, *args, **kwargs):", "def _get(self, request_obj):\n return self._execute_action(request_obj, 'GET')", "def get(self, *args, **kwargs):\n url = urljoin(self.instance(), args[0])\n return self._requests_call(util.requests_get, url, *args[1:], **kwargs)", "def get(self, request):\n pass", "def get(self, method, uri, query_param, request_param, headers, **kwargs):\n raise NotImplementedError", "def do_GET(self):\n self.log.debug('do_GET called')\n self.HeadGet('GET')", "def __get(self, url, headers=None):\n return self.__req(url, \"GET\", headers=headers)", "def aget(url, **kwargs):\n return requests.get(url, **kwargs)", "def _get(self, path=\"\", query={}, **kwargs):\n qs = urllib.urlencode(query)\n uri = force_json(self.uri + path) + \"?\" + qs\n return self.client.request(uri, method=\"GET\", **kwargs)", "def get(self):\n self.get_or_post(method='GET')", "def test_get(self):\n return self.doRequest(self.url, method=\"GET\", body=self.input)", "def do_GET(self):\r\n self._send_handler_response('GET')", "def get(self, *args, **kwargs):\n return self.handle_get_request()", "def get(self, path, req = None, **kwargs):\n req = req or []\n return self.route(path, req=req+[filter_method(['GET'])], **kwargs)", "def get(self, *args, **kwargs):\n return Response({'foo': 'bar'})", "def get(self, url):\n return self._request('GET', url)", "def get(self, *args):", "def get(self, *args, **kwargs):\n if len(args) != 1:\n raise TypeError('wrong number of arguments')\n return self._geturl.get(*args, **kwargs)", "def get(url, to_error=_default_to_error, **kwargs):\n\n return request('get', url, to_error=to_error, **kwargs)", "def get(self, path):\n return self.request(path, method='GET')", "def get(self, *args, **kwargs):\n pass", "def get(self, *args, **kwargs):\n pass", "def get(self, url: str, **kwargs: Dict[str, Any]) -> Any:\n pass", "def http_get(self, **kwargs):\n return self.rabjcallable.get(**kwargs)", "def _get(self):\n return self.request(method=\"get\", path=self.router.fields)", "def httpGet(self, url, parameters=None):\r\n return self.auth.get(url, parameters)", "def do_GET(self):\n self._try_to_process_request(self._handle_get_request)", "def req_get(url, headers=None, params=None) -> Response:\n if params:\n url = \"{}?{}\".format(url, parse.urlencode(params))\n\n req = Request(url, headers=headers, method=\"GET\")\n\n with request.urlopen(req) as res:\n response = Response(res)\n return response", "def simulate_get(app, path, **kwargs) -> _ResultBase:\n\n return simulate_request(app, 'GET', path, **kwargs)", "def _get(self, url, **queryparams):\n url = urljoin(self.base_url, url)\n if len(queryparams):\n url += '?' + urlencode(queryparams)\n try:\n r = self._make_request(**dict(\n method='GET',\n url=url,\n auth=self.auth,\n timeout=self.timeout,\n hooks=self.request_hooks,\n headers=self.request_headers\n ))\n except requests.exceptions.RequestException as e:\n raise e\n else:\n if r.status_code >= 400:\n _raise_response_error(r)\n return r.json()", "def make_get_request(client, endpoint):\n return client.get(endpoint)", "def get(self, url, query=None):\n # Perform get request with query filter\n if query is not None:\n return self._query(url, 'GET', params=quote(f'query=\"{query}\"'))\n\n # Perform simple get request\n return self._query(url, 'GET')", "def get(self, url, *args):\n\n req_method = type(self.client).__name__\n\n if not url.startswith(\"http\"):\n\n if not url.startswith(\"/\"):\n url = \"/%s\" % url\n\n url = \"%s%s\" % (self.base, url)\n\n if req_method == \"FlaskClient\":\n self.client.get(url, headers=self.headers, *args)\n\n else:\n self.client.get(url, headers=self.headers, *args)", "def get(self):\r\n return http.Request('GET', self.get_url()), parsers.parse_json", "def get(self):\r\n return http.Request('GET', self.get_url()), parsers.parse_json", "def get(self, **kwargs):\r\n params = base.get_params(None, kwargs, serialize_param=serialize_param)\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(self, **kwargs):\r\n params = base.get_params(None, kwargs, serialize_param=serialize_param)\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(self, **kwargs):\r\n params = base.get_params(None, kwargs, serialize_param=serialize_param)\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(self, **kwargs):\r\n params = base.get_params(None, kwargs, serialize_param=serialize_param)\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(self, **kwargs):\r\n params = base.get_params(None, kwargs, serialize_param=serialize_param)\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(self, **kwargs):\r\n params = base.get_params(None, kwargs, serialize_param=serialize_param)\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get(url_ext, query_params={}, custom_err=None, timeout=DEFAULT_TIMEOUT):\r\n url = get_url() + url_ext\r\n # get request headers\r\n headers = get_headers()\r\n\r\n r = requests.get(url, params=query_params, headers=headers, timeout=timeout)\r\n return handle_response(r, \"GET\", custom_err)", "def _get_request(self, endpoint, params=None, **kwargs):\n\n return requests.get(self.base_url + endpoint, params, **kwargs)", "def _get_request(url_root,api_key,path,response_type,params, ssl_verify):\n url = _url_builder(url_root,api_key,path,params)\n content = _fetch(url, ssl_verify)\n response = _dispatch(response_type)(content)\n return response", "def _get(self, url, **kwargs):\n return self._http.get(self.cluster + url, timeout=self.timeout, **kwargs)", "def get_request(\n self,\n alias,\n uri,\n headers=None,\n data=None,\n json=None,\n params=None,\n allow_redirects=None,\n timeout=None):\n session = self._cache.switch(alias)\n # XXX workaround to restore library default behaviour. Not needed in new keywords\n redir = True if allow_redirects is None else allow_redirects\n\n response = self._common_request(\n \"get\",\n session,\n uri,\n params=params,\n headers=headers,\n data=data,\n json=json,\n allow_redirects=redir,\n timeout=timeout)\n\n return response", "async def get(self, path, params=None, json_data=None):\n response = await self.request('GET', path, params, json_data)\n return response", "def use_GET_in(fn, request):\n response = fn(request.GET)\n if isinstance(response, dict):\n return HttpResponse(json.dumps(response),\n content_type='application/json')\n else:\n return response", "def get(self, path='', **kwargs):\n\n r = self.session.get(self.url(path), **kwargs)\n self.log_request(r)\n return r", "def simulate_get(self, path='/', **kwargs) -> _ResultBase:\n return self.simulate_request('GET', path, **kwargs)", "def _request(self, url, **kwargs):\n headers = {'PRIVATE-TOKEN': self.token}\n response = make_request(self.base_url + url, headers=headers, **kwargs)\n logging.info('Requested: {0}'.format(url))\n logging.info('Method: {0}'.format(kwargs.get('method', 'GET')))\n logging.info(response.content)\n return json.loads(response.content)", "def _http_get(self, url, params={}):\n if not self.token:\n self.get_token()\n headers = {'Authorization': self.token, 'Accept': 'application/json; indent=4'}\n url = self.server + '/api2' + url\n try:\n r = requests.get(url=url, headers=headers, params=params)\n except requests.exceptions.RequestException as e:\n return check_failed(e)\n # raise ClientHttpError(None, e)\n if r.status_code != 200:\n return check_failed(r.status_code)\n # return ClientHttpError(r.status_code, json.loads(r.text)['error_msg'])\n try:\n data = json.loads(r.text)\n except:\n data = r.text\n # TODO: check data\n return data", "def api_get(self, path, query=None):\n return self._api_request(path, 'GET', query=query)", "def http_request(method, url, params=None):\n if method.lower() not in _request_methods:\n raise NotImplementedError(\"HTTP request method not implemented\")\n\n\n return _request_methods[method.lower()](url, params)", "def get(self, request, format=None):\n an_apiview = [\n 'Uses HTTP methods as functions (get,post,patch,put,delete)',\n 'Is similar to a traditional django view',\n 'Gives you the most control over the applicaton logic',\n 'Is mapped manually to the URLs',\n ]\n return Response({'message': 'get method', 'an_apiview': an_apiview})", "def render_GET(self, request):\r\n d = self._processGETReq(request.args)\r\n d.addCallback(self._processGETResp, request)\r\n d.addErrback(self._processGETErr, request)\r\n\r\n return NOT_DONE_YET", "def simulate_get(self, path='/', **kwargs):\n return self.simulate_request('GET', path, **kwargs)", "def http_get(self) -> Optional[pulumi.Input['HTTPGetActionArgs']]:\n return pulumi.get(self, \"http_get\")", "def sr_get(self, route_or_uri, params=None, query=None, **kwargs):\n return self.__req(\n route_or_uri,\n params=params,\n query=query,\n op=self.get,\n raw_response=True,\n **kwargs,\n )", "def get(self, url):\r\n response = self.requestHelper.get(url)\r\n return self.process(response)", "def _request(self, opts, query, query_key='q'):\n params = opts['params']\n params[query_key] = query\n resp = requests.get(opts['url'], params=params, headers=self._headers)\n if not resp.ok:\n raise Exception(\"Server threw an error for: {}\".format(resp.url))\n return resp.json()", "def _get_request(url, params):\n request = requests.get(url, params=params)\n\n return request", "def __get(self, url):\n\n res = requests.get(url, headers=self.auth_header)\n res.raise_for_status()\n return res", "def _external_request(self, method, url, *args, **kwargs):\n self.last_url = url\n if url in self.responses.keys() and method == 'get':\n return self.responses[url] # return from cache if its there\n\n headers = kwargs.pop('headers', None)\n custom = {'User-Agent': useragent}\n if headers:\n headers.update(custom)\n kwargs['headers'] = headers\n else:\n kwargs['headers'] = custom\n\n response = getattr(requests, method)(url, *args, **kwargs)\n\n if self.verbose:\n print(\"Got Response: %s\" % url)\n\n if response.status_code == 503:\n raise SkipThisService(\"Service returned 503 - Temporarily out of service.\")\n\n if method == 'get':\n self.responses[url] = response # cache for later\n\n self.last_raw_response = response\n return response", "def retrieve(self, request, pk=None):\n\n return Response({'http_method': 'GET'})", "def get(self, path: str, params: dict) -> dict:\n return self.request(\"GET\", path, params)", "def get_response(request_url):\n return requests.get(request_url)", "def _Get(self, url, timeout_seconds, headers): # pylint: disable=W0613, R0201\n raise NotImplementedError() # pragma: no cover", "def _genericGet(self,resource,**kwargs):\n requestUrl = self.apiRootUrls[0] + resource\n debugRequest(requestUrl)\n r = retry(self.session.get,requestUrl,params=kwargs)\n if r.json is None:\n debugError('not json. here is the actual body text:')\n debugRaw(r.text)\n return\n return r.json", "def request_get(self, path, params=None):\n\tif params is None:\n\t\tparams = {}\n\t\trequest_url = self.host_url + path\n\t\ttry:\n\t\t\tresponse = self.session.get(request_url, auth=self.api_key, params=params)\n\t\texcept requests.RequestException as e:\n\t\t\traise self.DataUnavailable(\"Network exception\") from e\n\n\tif response.status_code != 200:\n\t\traise self.DataUnavailable(\n\t\t\t\"Unexpected response status (%s)\" % response.status_code\n\t\t)\n\n\treturn response.json()", "def simple_get(self, url):\r\n \"\"\"\r\n The simple_get function accepts a single url argument. \r\n It then makes a GET request to that url. \r\n If nothing goes wrong, you end up with the raw HTML content for the page you requested. \r\n If there were any problems with your request (like the url is bad or the remote server is down) \r\n then your functon returns None.\r\n \"\"\"\r\n try:\r\n with closing(get(url, stream=True)) as resp:\r\n if self.is_good_response(resp):\r\n return resp.content\r\n else:\r\n return None\r\n except RequestException as e:\r\n self.log_error('Error during requests to {0} : {1}'.format(url, str(e)))\r\n return None", "def get(self, url, params=None):\n # TODO: handle params\n path = self.get_path(url)\n return self.build_response_for(path)", "def get_request(url):\n\tr = requests.get(url)\n\treturn(r)", "def request(self, url, *args, **kwargs):\n raise NotImplementedError", "async def simulate_get(self, path='/', **kwargs) -> _ResultBase:\n return await self.simulate_request('GET', path, **kwargs)", "def get(self, url, params=None):\n return self.session.get(url=self.base_url + url, params=params)", "def _get(self, url: str) -> requests.Response:\n # todo: do some error checking here\n if url.startswith(API_PATH['base']):\n try:\n # logger.debug(f\"RestClient._get(): {url}\") # log in calling function\n response = requests.get(url, auth=self.auth)\n rest_code = response.json()['meta']['code']\n if rest_code not in [200, 201, 204]:\n raise RestException(f\"REST API Error: {rest_code}. {response.content}\")\n except RestException as e:\n logger.error(e)\n return None\n return response\n else:\n raise ValueError(f\"URL is invalid: {url}\")", "def test_two_legged_get(self):\n resp, content = self._two_legged(\"GET\")\n self.assertEqual(int(resp['status']), 200)", "def _http_get(self, path):\n # Prepare the request path\n if path[0] == '/':\n path = path[1:]\n path = urljoin(self.servlet_path, path)\n\n # Request the end points\n conn = httplib.HTTPConnection(\"localhost\", self.port)\n conn.request(\"GET\", path)\n result = conn.getresponse()\n data = result.read()\n conn.close()\n\n # Convert the response to a string\n return result.status, to_str(data)", "def test_get(self):\n url, port = self.server.address\n\n #couple of basic GETs\n r = self.client.get(\"http://{0}:{1}/\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/200\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/400\".format(url, port))\n self.assertEqual(400, r.status_code)\n\n # GETs with params\n r = self.client.get(\"http://{0}:{1}/get_with_params\".format(url, port),\n params=self.params)\n self.assertEqual(200, r.status_code)\n self.assertEqual(str(self.params), r.text)\n\n # GETs with ...?", "def get(self, *args: Any, **kwargs: Any) -> Response:\n return cast(Response, super().get(*args, **kwargs))", "def GET(self, req):\n # early checks for request validity\n validate_container_params(req)\n return self.GETorHEAD(req)", "def __call__(self, request):\n response = self.get_request(request)\n return response", "def do_get(self, *args):\n raise NotImplementedError()", "def get(*args, **kwargs):\n\n response = yield from aiohttp.request('GET', *args, **kwargs)\n return (yield from response.read())", "def api_get(self, *args, **kwargs):\n return self.api_get_with_response(*args, **kwargs)[0]", "def simple_get(url):\n\n def is_good_response(resp):\n \"\"\"\n Checks if a response is good.\n \"\"\"\n content_type = resp.headers['Content-Type'].lower()\n return (resp.status_code == 200 and content_type is not None\n and content_type.find('html') > -1)\n\n def log_error(err):\n \"\"\"\n Simple error logging wrapper\n \"\"\"\n print(err)\n\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n return None\n\n except RequestException as err:\n log_error(\"Error during requests to {0} : {1}\".format(url, str(err)))", "def get(self):\r\n request = http.Request('GET', self.get_url())\r\n\r\n return request, parsers.parse_json", "def get(self):\r\n request = http.Request('GET', self.get_url())\r\n\r\n return request, parsers.parse_json", "def get(self):\r\n request = http.Request('GET', self.get_url())\r\n\r\n return request, parsers.parse_json", "def get(self):\r\n request = http.Request('GET', self.get_url())\r\n\r\n return request, parsers.parse_json", "def read(self) -> requests.request:\n # Check if id is set,\n if self.args.id is not None:\n self.REQUEST_URL += str(self.args.id)\n\n # Send GET request\n return requests.get(self.REQUEST_URL)" ]
[ "0.7811461", "0.7689053", "0.7667969", "0.75441927", "0.753764", "0.75005716", "0.7481838", "0.746681", "0.7408591", "0.7339474", "0.7280342", "0.7256964", "0.7247393", "0.72263336", "0.7189114", "0.718059", "0.7164386", "0.7148469", "0.71383345", "0.71329045", "0.7128152", "0.7112754", "0.7109763", "0.7102752", "0.70882744", "0.70575804", "0.70422906", "0.703662", "0.70077103", "0.70077103", "0.700547", "0.69619167", "0.69385153", "0.69337016", "0.6905958", "0.690353", "0.68993515", "0.6887376", "0.6860954", "0.68594664", "0.68543136", "0.6853523", "0.6853523", "0.6848205", "0.6848205", "0.6848205", "0.6848205", "0.6848205", "0.6848205", "0.6841915", "0.6835881", "0.6815101", "0.68119067", "0.6810421", "0.6771018", "0.6770542", "0.67695796", "0.6762409", "0.67560613", "0.6736389", "0.6732671", "0.67226946", "0.67196035", "0.67166436", "0.6710178", "0.66652286", "0.665634", "0.664868", "0.664343", "0.66416323", "0.66217285", "0.6619725", "0.6619073", "0.6617116", "0.66151595", "0.6611877", "0.659445", "0.65876997", "0.6584064", "0.6582358", "0.6580693", "0.6576382", "0.6563517", "0.65634197", "0.6555826", "0.6554616", "0.65532076", "0.65480715", "0.6547821", "0.65468127", "0.6543998", "0.6539063", "0.6528512", "0.65259594", "0.65209395", "0.65181667", "0.65181667", "0.65181667", "0.65181667", "0.65166736" ]
0.79950804
0
Wrapper for requests POST method
def _post(self, *args, **kwargs): return self._request('post', *args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self, *args, **kwargs):\n return self._requests_call(util.requests_post, *args, **kwargs)", "def http_method_post():\n return 'POST'", "def http_post(self, **kwargs):\n return self.rabjcallable.post(**kwargs)", "def post(self, *args, **kwargs):\n self.request(\"post\", *args, **kwargs)", "def test_post(self):\n return self.doRequest(self.url, method=\"POST\", body=self.input)", "def post(self, *path, **data):\n\t\treturn self.request('POST', *path, **data)", "def _post(self, request_obj):\n return self._execute_action(request_obj, 'POST')", "def post(self):", "def post(self, *args, **kwargs):\n return self._hit(\"POST\", *args, **kwargs)", "def _post(self, url, **kwargs):\n return self._call('POST', url, kwargs)", "def _post_request(url, params):\n data = dumps(params).encode(\"utf-8\")\n request = requests.post(url, data=data)\n return request", "def do_POST(self,):\n self.http_method = 'POST'\n self.response()", "def json_post(method):\n def wrap(*args, **kwargs):\n # idx is the position of the data\n idx = 0\n if not isinstance(args[0], webob.Request):\n idx = 1\n\n json_data = json.loads(args[idx].body)\n kwargs['post_data'] = json_data\n\n #print \"JP:\", repr(args), repr(kwargs)\n\n return method(*args, **kwargs)\n \n return json_return(wrap)", "def _post(self, url, data=None):\n if data is not None:\n data = urllib.urlencode(data)\n return self._request(url, method='POST', payload=data)", "def post(self, *args, **kw):\n kw['method'] = 'POST'\n return self.open(*args, **kw)", "def post(self, request):\n pass", "def simulate_post(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'POST', path, **kwargs)", "def post(self, *args, **kwargs):\n return self.handle_post_request()", "def post(self, path, **post_args):\n return self.request(path, data=post_args, method='POST')", "def apost(url, **kwargs):\n return requests.post(url, **kwargs)", "def _createPostRequest(self, postBody: dict) -> object:\n request = HttpRequest()\n request.method = \"POST\"\n for name,value in postBody.items():\n request.POST[name]= value\n return request", "def make_post_request(client, endpoint, data):\n return client.post(endpoint, data=data)", "def post_algorithm():\n try:\n request_json = request.get_json()\n result = json.dumps([])\n response = app.response_class(\n response=result,\n status=200,\n mimetype='application/json')\n except ValueError as e:\n response = app.response_class(\n status=400,\n response=str(e)\n )\n return response", "def post_required(func):\n def post_wrapper(request,*args,**kwds):\n res = http.ResponseBuilder()\n if request.method != 'POST':\n return res.error(\"post is required\").build_json()\n return func(request,*args,**kwds)\n return post_wrapper", "def post(self, *args, **kwargs):\n headers = self.post_headers\n headers.update(kwargs.get('headers', {}))\n kwargs['headers'] = headers\n return self._request('post', *args, **kwargs)", "def post():\n pass", "def make_post_request(url:str, post_params:dict, **kwargs):\n\n print(\"Making call to '{}'...\".format(url))\n resp = requests.post(url, data=post_params, **kwargs)\n print(\"Received response.\")\n\n if not resp.ok:\n return False, resp.status_code, json.loads(resp.content)\n\n return True, resp.status_code, json.loads(resp.content)", "def post(self):\n self.get_or_post(method='POST')", "def post(self, path, req = None, **kwargs):\n req = req or []\n return self.route(path, req=req+[filter_method(['POST'])], **kwargs)", "def post(self, request, *args, **kwargs):\n return self.get(request, *args, **kwargs)", "def do_POST(self):\r\n self.do_GET()", "def post(self, url_or_path, *args, **kwargs):\n return self.request.post(url_or_path, *args, **kwargs).json()", "def post(self):\n data = request.json\n return save_new_post(data=data)", "def make_post_request(self, url, data):\n auth = (self.AUTH_ID, self.AUTH_TOKEN)\n headers = {'content-type': 'application/json'}\n return requests.post(url, data=data, auth=auth, headers=headers)", "def httpPost(self, url, post_parameters=None):\r\n return self.auth.post(url, post_parameters)", "def _post(self, path, data=None):\n headers = {'content-type': 'application/json'}\n if data:\n data = json.dumps(data)\n r = requests.post(self._url(path), data=data, headers=headers)\n assert r.status_code == 200\n return r", "def post(self, data):\n return requests.post(self.url, headers=self.headers, data=data)", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def post(self):\n pass", "def httpPost(self, url='', data='', params={}, headers={}):\n\n return self.httpRequest('POST', url, data, params, headers)", "def api_post(self, *args, **kwargs):\n return self.api_post_with_response(*args, **kwargs)[0]", "def _post(self, path=\"\", body=None, **kwargs):\n uri = force_json(self.uri + path)\n return self.client.request(uri, method=\"POST\", d=body, **kwargs)", "def raw_post(\n self, uri: str, data: Optional[Dict] = None, json: Optional[Dict] = None, **kwargs\n ) -> requests.Response:\n return self.session.post(url=self._url(uri), data=data, json=json, **kwargs)", "def do_POST(self):\r\n self._send_handler_response('POST')", "def test_client_can_do_post_request(self):\n response = self.httpbin_4.test_requests_post_method()\n self.assertEqual(response.request.method, 'POST')\n self.assertEqual(response.status_code, 200)", "def simulate_post(self, path='/', **kwargs):\n return self.simulate_request('POST', path, **kwargs)", "def post(url):\n url = add_slash(url)\n\n def _(func):\n re_url = re.compile(\"^%s$\" % url)\n REQUEST_MAPPINGS['POST'].append((re_url, url, func))\n return func\n return _", "def __post(self, url, payload=None, headers=None):\n if headers is None:\n headers = {\"Content-Type\": \"application/json\"}\n return self.__req(url, \"POST\", body=payload, headers=headers)", "def post(url, data=None, json=None, **kwargs):\n\n return request('post', url, data=data, json=json, **kwargs)", "def api_post(self, path, data):\n return self._api_request(path, 'POST', data)", "def get(self):\n self.post()", "def _post(self, url, data=None):\n url = urljoin(self.base_url, url)\n try:\n r = self._make_request(**dict(\n method='POST',\n url=url,\n json=data,\n auth=self.auth,\n timeout=self.timeout,\n hooks=self.request_hooks,\n headers=self.request_headers\n ))\n except requests.exceptions.RequestException as e:\n raise e\n else:\n if r.status_code >= 400:\n _raise_response_error(r)\n\n if r.status_code == 204:\n return None\n return r.json()", "def post(url, to_error=_default_to_error, data=None, json=None, **kwargs):\n\n return request('post',\n url, to_error=to_error, data=data, json=json, **kwargs)", "def simulate_post(self, path='/', **kwargs) -> _ResultBase:\n return self.simulate_request('POST', path, **kwargs)", "def post(self, path: str, params: dict) -> dict:\n return self.request(\"POST\", path, params)", "def test_post_method(self):\n self.getPage('/', method='POST')\n self.assertStatus('200 OK')\n self.assertHeader('Content-Type', 'application/json')", "def _req_post(self, url: str, data, raw_res: bool = False):\n self._get_cookies()\n if not self._cookies:\n return\n r = reqtry.post(url, cookies=self._cookies, data=data, allow_redirects=False, timeout=(3, 3), tries=3, delay=1,\n backoff=1.5, jitter=(1, 1.5))\n if raw_res:\n return r\n assert r.status_code == 200, f\"Post request: Invalid http status code: {r.status_code}\"\n assert '\"errCode\":0' in r.text, f'Post response with error from server. Response: {r.text}'\n return r.text", "def post(self, method, uri, query_param, request_param, headers, **kwargs):\n raise NotImplementedError", "def post_form(url, headers, payload):\n\n headers['Content-Type'] = 'application/x-www-form-urlencoded'\n\n return RestClient.make_post_request(url, headers=headers, data=payload)", "def do_POST(self): # pylint: disable=invalid-name\n self.handle_request()", "def _api_post(self, query, data):\n r = requests.post(self._url + query,\n headers={'Content-Type': 'application/json', 'Accept': 'application/json'},\n auth=self._auth, data=json.dumps(data), timeout=self._request_timeout_secs)\n r.raise_for_status()\n return r", "def post(self, url, data=None):\r\n response = self.requestHelper.post(url, data=data)\r\n return self.process(response)", "def test_api_use_method_post(self):\n body = Body()\n response = self.client.open(\n '/api/use/{method}/'.format(method='method_example'),\n method='POST',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def PostRequest(self):\n if self.__Payload: \n self.__Answer = requests.post(self.__URL, data = self.__Payload, headers = self.__Headers)\n Logs.WriteToLog(\"Data transited to web server\")\n else:\n Logs.WriteToLog(\"No payload in HTTP request\")\n raise Exception(\"Payload must be setted\")", "def post(self, url: str, **kwargs: Dict[str, Any]) -> Any:\n pass", "def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)", "def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)", "def post(self, request, *args, **kwargs):\n return self.create(request, *args, **kwargs)", "async def simulate_post(self, path='/', **kwargs) -> _ResultBase:\n return await self.simulate_request('POST', path, **kwargs)", "def post(self, url, data):\r\n print(f\"POST {url}\")\r\n print(\"data:\")\r\n self.pp.pprint(data)\r\n response = self.session.post(url, data=data)\r\n print(f\"STATUS {response.status_code}\")\r\n self.print_cookies()\r\n return response", "def post_resource(**kwargs) -> dict:\n\n response = requests.post(**kwargs)\n print(f\"HTTP {response.request.method}: {response.url}\")\n print(f'HTTP Status code: {response.status_code}')\n\n # Raise an exception if the response is not OK\n if not response.ok:\n print(response.text)\n response.raise_for_status()\n\n # Convert the reply to JSON\n response_json = response.json()\n\n # Return json parsed data as a python dictionary\n return response_json", "def _http_post(self, url, params={}):\n url = self.server + '/api2' + url\n try:\n r = requests.post(url=url, data=params)\n except requests.exceptions.RequestException as e:\n return check_failed(e)\n # raise ClientHttpError(None, e)\n if r.status_code != 200:\n return check_failed(r.status_code)\n # return ClientHttpError(r.status_code, r.text)\n data = json.loads(r.text)\n # TODO: check data\n return data", "def post(self, url, body=None, headers=None, serialize=True):\n return self._request('POST', url, body, headers, serialize)", "def post(self):\n code, status = run_handlers.handle_data_post(self.request.headers, self.request.body)\n self.set_status(code)\n self.write(status)\n self.finish()", "async def post(self, path, params=None, json_data=None):\n response = await self.request('POST', path, params, json_data)\n return response", "def _request_post(self, path, method='POST', body=None, headers=None):\n url = '{}{}'.format(self._url_base, path)\n headers = self._headers() if headers is None else headers\n \n response, content = super(DSBaseService, self)._request(url,\n method=method,\n body=str(body).replace(\"'\", '\"'),\n headers=headers)\n if int(response['status']) in (200, 204):\n if content != \"\":\n res_text = json.loads(content)\n else:\n res_text = \"\"\n post_response = {\n 'status': response['status'],\n 'message': 'SUCCESS',\n 'content': []\n }\n post_response['content'].append(res_text)\n return post_response\n else:\n raise RuntimeError('{} responded with status code {}'.format(url, response['status']))", "def send(self, request: Request, **requests_kwargs) -> Response:", "def _post_request(self, url, payload):\n url = self.baseUrl + url\n logger.debug(\"POST %s\", url)\n with self.session.post(url, data=payload) as req:\n try:\n result = req.json()\n except json.decoder.JSONDecodeError as exc:\n raise errors.PluginError(\"no JSON in API response\") from exc\n if result[\"result\"] == \"success\":\n return result[\"data\"]\n raise errors.PluginError(\n \"error in API request: {} / {}\".format(\n result[\"error\"][\"code\"], result[\"error\"][\"description\"]\n )\n )", "def do_POST(self):\n self._try_to_process_request(self._handle_post_request)", "def _send(self, endpoint, method, data):\n\n if method == \"POST\":\n return requests.post(\n f\"{self.API_URL}{endpoint}\",\n headers=self.headers,\n cookies=self.cookies,\n json=data\n )\n else:\n raise ValueError(f\"supported methods are POST but given {method}\")", "def post(url, data={}, verify=True):\n data = requests.post(url=url, data=json.dumps(data), verify=verify)\n if data.status_code == 201:\n return data", "def _PostRequest(self, data=None):\n # requests will use about 3 times of data size's memory.\n req = requests.Request(\n 'POST',\n url=self._target_url,\n headers={'Multi-Event': 'True',\n 'Node-ID': str(self.GetNodeID())},\n files=data).prepare()\n clen = int(req.headers.get('Content-Length'))\n # Checks the size of request, and doesn't send if bigger than maximum size.\n if clen > self._max_bytes:\n return (413, 'Request Entity Too Large: The request is bigger '\n 'than %d bytes' % self._max_bytes, clen)\n resp = requests.Session().send(req, timeout=http_common.HTTP_TIMEOUT)\n if resp.headers['Maximum-Bytes']:\n self._max_bytes = int(resp.headers['Maximum-Bytes'])\n return resp.status_code, resp.reason, clen", "async def post(url, session=None, **kwargs):\n\n method = 'POST'\n resp = await _request(method, url, session=session, **kwargs)\n return resp", "def post(path, *params, **kwparams):\n def method(f):\n return config(f, 'POST', path, **kwparams)\n return method", "def post_request(self, path='', data=None, user=None, **kwargs):\n request = self.rf.post(path, data, **kwargs)\n request.user = user or self.super_user\n return request", "def post(self, url, payload={}):\n response = self._make_request(\"POST\", url, payload)\n\n return response", "def post(self, url, body):\n return self._query(url, 'POST', json=body)", "def post(self, url, obj):\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n return self.session.post(url, json=obj, verify=False)" ]
[ "0.7969932", "0.746994", "0.73935425", "0.73165405", "0.72691715", "0.7221908", "0.7146746", "0.713131", "0.71067417", "0.707506", "0.7030268", "0.7027897", "0.7024082", "0.7005868", "0.69719", "0.6945976", "0.69432557", "0.6938295", "0.6930771", "0.68950063", "0.6866864", "0.6850542", "0.68133587", "0.6799788", "0.6766979", "0.67667234", "0.6736962", "0.6731275", "0.6716904", "0.67100143", "0.6699788", "0.6699155", "0.6676452", "0.66635156", "0.6651574", "0.6643785", "0.66234696", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.6611914", "0.66088927", "0.6605222", "0.6597772", "0.65881556", "0.65876955", "0.65687746", "0.6536735", "0.6519007", "0.6513596", "0.65088654", "0.649877", "0.6478086", "0.64776653", "0.6474277", "0.6471573", "0.6471397", "0.64601755", "0.64319646", "0.6431298", "0.64291316", "0.6425009", "0.6419957", "0.63899374", "0.6382397", "0.6369379", "0.63625085", "0.6357426", "0.6357426", "0.6357426", "0.63333184", "0.6323954", "0.63184327", "0.6297131", "0.6287629", "0.62594", "0.6255983", "0.6237398", "0.62296104", "0.6224171", "0.62240404", "0.6223889", "0.6210837", "0.61771965", "0.6173577", "0.6173318", "0.61714685", "0.6168275", "0.61677736", "0.61673725" ]
0.7983252
0
Wrapper for requests PUT method
def _put(self, *args, **kwargs): return self._request('put', *args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def http_method_put():\n return 'PUT'", "def put(self, *args, **kwargs):\n self.request(\"put\", *args, **kwargs)", "def put(self, request, pk=None):\n\n return Response({'method': 'put'})", "def do_PUT(self,):\n self.http_method = 'PUT'\n # Nothing to do for now.\n pass", "def put(self, *args, **kw):\n kw['method'] = 'PUT'\n return self.open(*args, **kw)", "def http_put(self, **kwargs):\n return self.rabjcallable.put(**kwargs)", "def put(self, request, pk=None):\n return Response({'method': 'PUT'})", "def put(self, request, pk=None):\n return Response({'method': 'PUT'})", "def put(self, request, pk=None):\n return Response({'method': 'PUT'})", "def put(self, *args, **kwargs):\n return self.handle_put_request()", "def _put(self, path=\"\", **kwargs):\n uri = force_json(self.uri + path)\n return self.client.request(uri, method=\"PUT\", **kwargs)", "def put(self,request, pk =None):\n return Response({'method': 'PUT'})", "def put(self ,request, pk = None):\r\n\r\n return Response({'method ': 'put'})", "def api_put(self, *args, **kwargs):\n return self.api_put_with_response(*args, **kwargs)[0]", "def simulate_put(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'PUT', path, **kwargs)", "def update(self, request, pk=None):\n\n return Response({'http_method': 'PUT'})", "def _put(self, url, **kwargs):\n return self._call('PUT', url, kwargs)", "def put(self, request, pk=None):\n return Response({'method': 'patch'})", "def put(self, request, pk=None): #pk of id of objects to be updated (DB)\n return Response({'method':'PUT'})", "def aput(url, **kwargs):\n return requests.put(url, **kwargs)", "def put(self, url, body, headers={}):\n return self.request(url, \"PUT\", body, headers)", "def put(self, request, *args, **kwargs):\n verify_secure(request)\n return super().put(request, args, kwargs)", "def put(self, request, *args, **kwargs):\n verify_secure(request)\n return super().put(request, args, kwargs)", "def _put(self, path, data=None):\n headers = {'content-type': 'application/json'}\n if data:\n data = json.dumps(data)\n r = requests.put(self._url(path), data=data, headers=headers)\n assert r.status_code == 200\n return r", "def simulate_put(self, path='/', **kwargs) -> _ResultBase:\n return self.simulate_request('PUT', path, **kwargs)", "def test_put_method(self):\n self.getPage('/blah', method='PUT')\n self.assertStatus('200 OK')\n self.assertHeader('Content-Type', 'application/json')\n self.assertBody('{\"mystring\": \"blah\"}')", "def put(self, path, request):\n\n try:\n data = json_decode(request.body)\n self.interface_data.set(path, data)\n response = self.interface_data.get(path, False)\n status_code = 200\n except MetadataParameterError as e:\n response = {'error': str(e)}\n status_code = 400\n except (TypeError, ValueError) as e:\n response = {'error': 'Failed to decode PUT request body: {}'.format(str(e))}\n status_code = 400\n return ApiAdapterResponse(response, status_code=status_code)", "def simulate_put(self, path='/', **kwargs):\n return self.simulate_request('PUT', path, **kwargs)", "async def simulate_put(self, path='/', **kwargs) -> _ResultBase:\n return await self.simulate_request('PUT', path, **kwargs)", "def put(self,request,pk=None):\n return Response({'method':'Put'})", "def put(url, data=None, **kwargs):\n\n return request('put', url, data=data, **kwargs)", "def PUT(self):\n return json.dumps(self.update_new(cherrypy.request.body.read().strip()))", "def test_client_can_do_put_request(self):\n response = self.httpbin_4.test_requests_put_method()\n self.assertEqual(response.request.method, 'PUT')\n self.assertEqual(response.status_code, 200)", "def put(url, to_error=_default_to_error, data=None, **kwargs):\n\n return request('put', url, to_error=to_error, data=data, **kwargs)", "def update(self, request, pk=None): #update a specific object\n return Response({'http_method': 'PUT'})", "def handle_put(self, api, command):\n return self._make_request_from_command('PUT', command)", "def put(self, url, body):\n return self._query(url, 'PUT', json=body)", "def _put(self, url, data=None):\n url = urljoin(self.base_url, url)\n try:\n r = self._make_request(**dict(\n method='PUT',\n url=url,\n json=data,\n auth=self.auth,\n timeout=self.timeout,\n hooks=self.request_hooks,\n headers=self.request_headers\n ))\n except requests.exceptions.RequestException as e:\n raise e\n else:\n if r.status_code >= 400:\n _raise_response_error(r)\n return r.json()", "def update(self,request,pk = None):\n return Response({'http_method':'PUT'})", "def _put(self, url, data, extra_headers=None):\n headers = {'X-Requested-By': 'Unit Tests'}\n headers.update(extra_headers)\n return self.client.put(\n url,\n content_type='application/json',\n data=utils.as_json(data),\n headers=headers,\n )", "def test_PUT4(self):\n payload = {\n \"make\": \"Nissan\",\n \"model\": \"Skyline\",\n \"year\": 1999,\n \"price\": 2200\n }\n r = requests.put(self.address + \"/loremipsum/42\", json=payload)\n self.assertEqual(r.status_code, 400)", "def base_put(url_path, content):\n response = requests.put(url=settings.URL_API + url_path, json=content)\n return response", "def test_put(self):\n client = RestClient(host=self.host, username='')\n rest_url = 'some/url/'\n \n # Mock good response\n with responses.RequestsMock() as rsps:\n rsps.add(responses.PUT, f'{self.host}/{rest_url}', status=200,\n json={'value':\"good!\"})\n r = client.put(rest_url)", "def put_request(\n self,\n alias,\n uri,\n data=None,\n json=None,\n params=None,\n files=None,\n headers=None,\n allow_redirects=None,\n timeout=None):\n session = self._cache.switch(alias)\n data = utils.format_data_according_to_header(session, data, headers)\n # XXX workaround to restore library default behaviour. Not needed in new keywords\n redir = True if allow_redirects is None else allow_redirects\n\n response = self._common_request(\n \"put\",\n session,\n uri,\n data=data,\n json=json,\n params=params,\n files=files,\n headers=headers,\n allow_redirects=redir,\n timeout=timeout)\n\n return response", "def put(self, call, params={}): \n # Build an endpoint using the parameters...\n endpoint = self._calls[call](params)\n url = '{}/{}'.format(str(self), str(endpoint))\n return self.putter.respond(url)", "def put(self, endpoint: str, json: Any = None) -> Any:\n pass", "def do_put_request(self, uri, headers, payload, timeout_ms):\n return self._do_request('PUT', uri, headers, payload, timeout_ms, None)", "async def put(url, session=None, **kwargs):\n\n method = 'PUT'\n resp = await _request(method, url, session=session, **kwargs)\n return resp", "def update(self, request, pk=None):\n return Response({'http_method': 'PUT'})", "def put(self, **kwargs):\n if not hasattr(self, \"_put\"):\n flask_restful.abort(405, message=f\"Method not allowed\")\n self.is_html = False # pylint: disable=attribute-defined-outside-init\n try:\n # We are using kwargs, since in the super class here we don't know the name of the\n # ID parameter (user_id, ticket_id, etc.). The concrete sanity_check() and _put()\n # implementation know. The id parameter name there is matched to the id name\n # specified in the URL.\n kwargs['data'] = flask.request.json\n if not kwargs['data']:\n raise Exception(\"expected request data\")\n # self.__class__ at this point will be a child class, which actually implements\n # sanity_check(). We don't want pylint to complain, so allow an exception.\n # pylint: disable=no-member\n kwargs['data'], obj = self.__class__.sanity_check(**kwargs)\n # _put is defined in the child class, only. We don't want pylint to complain, so\n # we allow an exception.\n # pylint: disable=no-member\n _ = self._put(obj=obj, **kwargs)\n resp = flask.make_response({\"msg\" : \"Ok\"})\n return resp\n except ValueError as ex:\n flask_restful.abort(400, message=f\"Bad Request - {str(ex)}\")", "def PUT(self):\n return json.dumps(self.create_new(cherrypy.request.body.read().strip()))", "def put(self, path, request):\n\n content_type = 'application/json'\n\n try:\n data = json_decode(request.body)\n self.fileInterface.set(path, data)\n response = self.fileInterface.get(path)\n status_code = 200\n except FileInterfaceError as e:\n response = {'error': str(e)}\n status_code = 400\n except (TypeError, ValueError) as e:\n response = {'error': 'Failed to decode PUT request body: {}'.format(str(e))}\n status_code = 400\n\n logging.debug(response)\n\n return ApiAdapterResponse(response, content_type=content_type,\n status_code=status_code)", "def put(self,id):\r\n data = request.json\r\n return update(id=id,data=data)", "def put(call,\n headers=None,\n data=None,\n base=cloudBase,\n no_headers=False,\n raw=False,\n **kwargs): # pragma: no cover\n return _call(method=requests.put,\n call='{0}{1}'.format(base, call),\n headers=headers,\n no_headers=no_headers,\n data=data,\n raw=raw,\n **kwargs)", "def put(self):\n if request.method == 'POST':\n print(request.json)\n return request.args\n else:\n return 400", "def test_put(self):\n url, port = self.server.address\n\n #couple of basic POSTs\n r = self.client.get(\"http://{0}:{1}/\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/200\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/400\".format(url, port))\n self.assertEqual(400, r.status_code)\n\n r = self.client.put(\"http://{0}:{1}/400?foo=bar\".format(url, port))\n self.assertEqual(400, r.status_code)", "def update(self, request, pk=None):\n\n return Response({'http_method':'PUT'})", "def put(self, url_pattern):\n return self.route(url_pattern, methods=['PUT'])", "def put_http(self, path, data, content_type):\n req_url = self.normalize_cdmi_url(path)\n headers = {\"user-agent\": self.u_agent, \"Content-type\": content_type}\n res = requests.put(\n req_url, headers=headers, auth=self.auth, data=data, verify=False\n )\n if res.status_code in [400, 401, 403, 404, 406]:\n return Response(res.status_code, res)\n return Response(0, res)", "def put(url, data={}, verify=True):\n data = requests.put(url=url, data=data, verify=verify)\n if data.status_code == 200:\n return data", "def PUT(self, req):\n if req.is_object_request:\n headers = {}\n src_path = '/%s/%s' % (req.container_name, req.object_name)\n\n # object-sysmeta' can be updated by 'Copy' method,\n # but can not be by 'POST' method.\n # So headers['X-Copy-From'] for copy request is added here.\n headers['X-Copy-From'] = quote(src_path)\n headers['Content-Length'] = 0\n req.get_response(self.app, 'PUT', headers=headers)\n else:\n req.get_response(self.app, 'POST')\n\n return HTTPOk()", "def put(self, path: str) -> Response:\n endpoint_ = checkEndpoint(\"PUT\", path)\n if not endpoint_[\"method\"]:\n # If endpoint and PUT method is not supported in the API\n abort(endpoint_[\"status\"])\n # If 'instances' is available in request\n params = request.args.to_dict()\n object_ = json.loads(request.data.decode(\"utf-8\"))\n if params.get(\"instances\") or object_.get(\"data\"):\n int_list = params.get(\"instances\")\n return items_put_response(path, int_list)\n return item_collection_put_response(path)", "def put(path, *params, **kwparams):\n def method(f):\n return config(f, 'PUT', path, **kwparams)\n return method", "def do_PUT(self):\n content_len = int(self.headers.get('content-length', 0))\n post_body = self.rfile.read(content_len)\n post_body = json.loads(post_body)\n (resource, id) = self.parse_url(self.path)\n\n success = False\n\n if resource == \"categories\":\n success = update_category(id, post_body)\n if resource == \"comments\":\n success = edit_comment(id, post_body)\n if resource == \"posts\":\n success = update_post(id, post_body)\n\n if success:\n self._set_headers(204)\n else:\n self._set_headers(404)\n\n self.wfile.write(\"\".encode())", "def put(api, url, payload, headers=None, auth=_KERBEROS_AUTH, proxies=None,\n retries=_NUM_OF_RETRIES, timeout=None):\n return call(api, url, 'put', payload=payload,\n headers=headers, auth=auth, proxies=proxies, retries=retries,\n timeout=timeout)", "def putrequest(self, full_path, data=None, headers=None, files=None):\n return self.request('PUT', full_path, data, headers, files)", "def put(self, url, data=None):\r\n response = self.requestHelper.put(url, data=data)\r\n return self.process(response)", "def PUT(url, mime_type='text/html'):\n def_app = DefaultHttpRequestHandler()\n return def_app.RequestMap(url, methods=['PUT'], produces=mime_type)", "def _do_put(self, uri, **kwargs):\n #TODO:\n # Add error handling. Check for HTTP status here would be much more conveinent than in each calling method\n scaleioapi_put_headers = {'content-type':'application/json'}\n print \"_do_put()\"\n if kwargs:\n for key, value in kwargs.iteritems():\n #if key == 'headers':\n # scaleio_post_headers = value\n # print \"Adding custom PUT headers\"\n if key == 'json':\n payload = value\n try:\n self.logger.debug(\"do_put(): \" + \"{}\".format(uri))\n\n #self._session.headers.update({'Content-Type':'application/json'})\n response = self._session.put(url, headers=scaleioapi_put_headers, verify_ssl=self._im_verify_ssl, data=json.dumps(payload))\n self.logger.debug(\"_do_put() - Response: \" + \"{}\".format(response.text))\n if response.status_code == requests.codes.ok:\n return response\n else:\n self.logger.error(\"_do_put() - HTTP response error: \" + \"{}\".format(response.status_code))\n raise RuntimeError(\"_do_put() - HTTP response error\" + response.status_code)\n except:\n raise RuntimeError(\"_do_put() - Communication error with ScaleIO gateway\")\n return response", "def put(self, path: str, params):\n return self.request(\"PUT\", path, params)", "def fusion_api_generic_put(self, body, uri, api=None, headers=None):\n if api:\n headers = self.fusion_client._set_req_api_version(api=api)\n elif not headers:\n headers = self.fusion_client._headers\n uri = 'https://%s%s' % (self.fusion_client._host, uri)\n return self.fusion_client.put(uri=uri, headers=headers, body=json.dumps(body))", "def put(self, api_path, *args, **kwargs):\n\n\t\treturn self._do_operation(u'put', api_path, *args, **kwargs)", "def update(self, request, pk=None):\n\n return Response({'http_method': 'PUT'})", "def put(path: str, data={}):\n token = get_token()\n headers = {\n \"Authorization\": f\"Bearer {token}\"\n }\n return requests.put(get_base_url() + path, headers=headers, json=data)", "def putRequest(self, resource, data):\n\n try:\n req = self._put(self.fullUrl(resource), json=data)\n return req\n except requests.exceptions.RequestException as err:\n print('request failed:', err)\n return None", "def test_PUT(self):\n if not self.url:\n return\n response = self.client.put(self.url, {}, format='json')\n self.assertIn(response.status_code, [status.HTTP_405_METHOD_NOT_ALLOWED,\n status.HTTP_401_UNAUTHORIZED])", "def put(self, key, headers, value, metadata=None):", "def put(self, request, pk):\n return self.update(request, pk)", "def putRequestSimple(self, resource):\n\n req = self.s.put(self.fullUrl(resource))\n return req.json()", "def put(self, url, payload={}):\n response = self._make_request(\"PUT\", url, payload)\n\n return response", "def put(self, path, payload):\n \n # prep\n put_url = self.url(path)\n json_payload = json.dumps(payload)\n\n # request\n response = requests.put(put_url, data=json_payload, auth=self.auth, headers=API.HEADERS)\n\n # test and return\n self.raise_for_status(response)\n return response.json()", "def put(self, path, body):\n url = urljoin(self.api_endpoint, path)\n response = requests.put(url, json=body, headers=self.headers)\n return self._check_response(response)", "def do_PUT(self):\n note_details = NoteDetails\n if self.path == '/note/api/update':\n response_data=note_details.update_data(self)\n Response(self).jsonResponse(status=200, data=response_data)", "async def put(self, resource: str, data: Optional[Sequence]):\r\n return await self.request(\"put\", self._api_base + resource, data=data)", "def put(self, path, data):\n response = self._create_connection(\"PUT\", path, json.dumps(data))\n return self._handle_response(response)", "def put(self, app_prefix, path, payload):\n return self.handle_request('put', app_prefix, path, payload=payload)", "def put(self, api_path, *args, **kwargs):\n\n return self._do_operation(u'put', api_path, *args, **kwargs)", "def put(self, request, pk):\n return self.post(request, pk)", "def test_kyc_put_request(self):\n pass", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def _put(self, path=None, version=None, params=None,\n data=None, json=None, header=None):\n return self.client.put(module='mam', path=path, version=version,\n params=params, data=data,\n json=json, header=header)", "def put(self, endpoint, params=None, data=None):\n params = params or dict()\n data = data or dict()\n return self.request(verb=requests.put, address=self.project_address + endpoint,\n params=params, data=data)", "def put(call, data, hub_token_header=True, base=apiPath, **kwargs):\n return _call(method=requests.put,\n call='{0}{1}'.format(base, call),\n hub_token_header=hub_token_header,\n data=data,\n **kwargs)", "def update():\n return 'update api in put'", "def _put(self, url, json=None, **kwargs):\n kwargs = Connection._prepare_json_payload(json, **(kwargs or {}))\n return self._http.put(self.cluster + url, timeout=self.timeout, **kwargs)", "def PUT(self, req):\n xml = req.xml(MAX_PUT_BUCKET_REFERER_SIZE)\n if xml:\n # check referer\n try:\n elem = fromstring(xml, 'RefererConfiguration')\n allow_empyt_referer=elem.find('AllowEmptyReferer').text\n if allow_empyt_referer not in ['true','false']:\n raise InvalidArgument()\n referer_list=elem.find('RefererList')\n\t\tswift_referers=[]\n for referer in referer_list.findall('Referer'):\n\t swift_referers.append(referer.text)\n\t\tif len(swift_referers)==0 :\n\t\t req.headers['X-Container-Read']=' '\n\t\telse:\n req.headers['X-Container-Read'] = '.r:'+','.join(get_real_url(swift_referers))\n except (XMLSyntaxError, DocumentInvalid):\n raise MalformedXML()\n except Exception as e:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n LOGGER.error(e)\n raise exc_type, exc_value, exc_traceback\n resp = req.get_response(self.app)\n resp.status = HTTP_OK\n return resp", "def view_put():\n\n return jsonify(\n get_dict(\"url\", \"args\", \"form\", \"data\", \"origin\", \"headers\", \"files\", \"json\")\n )", "def reply(self, obj):\r\n request = http.Request('PUT', self.get_url(), self.wrap_object(obj))\r\n\r\n return request, parsers.parse_json", "def put(self, data, **kwargs):\n return put.put(self._host, self._session, data, **kwargs)", "def Put(Path: str, Params: Any = None, Data: Any = None, *, Callback: Callable[[int, Dict[str, Any]], None]) -> None:\n Request(\"PUT\", Path, Params, Data, Callback=Callback)" ]
[ "0.7939011", "0.792131", "0.78856367", "0.7825104", "0.7805697", "0.77722734", "0.76859236", "0.76859236", "0.76859236", "0.7676738", "0.7652082", "0.7603654", "0.7537342", "0.7523113", "0.74227804", "0.7417111", "0.74126923", "0.7345135", "0.7330537", "0.73288274", "0.72923994", "0.7239812", "0.7239812", "0.72376925", "0.7212337", "0.71974623", "0.7172811", "0.7167863", "0.7164798", "0.7163827", "0.7133627", "0.71187997", "0.7071923", "0.70691717", "0.70513886", "0.7036622", "0.70126", "0.69846916", "0.6950818", "0.6945014", "0.69270676", "0.6910014", "0.6909887", "0.6891075", "0.6863284", "0.68614566", "0.68586516", "0.68546104", "0.68528706", "0.68376756", "0.6834626", "0.68335855", "0.6818356", "0.6814245", "0.68121624", "0.68026894", "0.6792153", "0.67834765", "0.6780137", "0.67703027", "0.67673296", "0.67617667", "0.67585117", "0.67550415", "0.67507917", "0.67467034", "0.67437255", "0.67286104", "0.6726934", "0.67043066", "0.6699227", "0.6690273", "0.6633082", "0.6606365", "0.6594475", "0.6588902", "0.65861374", "0.6572125", "0.65620494", "0.6555663", "0.65519714", "0.65322053", "0.6495719", "0.6493743", "0.6488166", "0.6486257", "0.648281", "0.64733684", "0.64621043", "0.645588", "0.64374286", "0.64072776", "0.6376797", "0.6376399", "0.635634", "0.63226527", "0.6312526", "0.63015366", "0.62947553", "0.6289327" ]
0.82278174
0
Wrapper for requests DELETE method
def _delete(self, *args, **kwargs): return self._request('delete', *args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def http_delete(self, **kwargs):\n return self.rabjcallable.delete(**kwargs)", "def _delete(self, url):\n return self._request(url, method=\"DELETE\")", "def _delete(self, url, **kwargs):\n return self._call('DELETE', url, kwargs)", "def delete(self, method, uri, query_param, request_param, headers, **kwargs):\n raise NotImplementedError", "def delete(url, **kwargs):\n\n return request('delete', url, **kwargs)", "def httpDelete(self, url='', data='', params={}, headers={}):\n\n return self.httpRequest('DELETE', url, data, params, headers)", "def _delete(self, path=\"\", **kwargs):\n uri = force_json(self.uri + path)\n return self.client.request(uri, method=\"DELETE\", **kwargs)", "def delete(self):\r\n request = http.Request('DELETE', self.get_url())\r\n\r\n return request, parsers.parse_empty", "def delete(self):\r\n return http.Request('DELETE', '{0}'.format(\r\n self.get_url())), parsers.parse_json", "def do_DELETE(self,):\n self.http_method = 'DELETE'\n self.response()", "def delete(self, url):\n return self.request(url, \"DELETE\")", "def delete(self, *args, **kwargs):\n self.request(\"delete\", *args, **kwargs)", "def __delete(self, url, headers=None):\n return self.__req(url, \"DELETE\", headers=headers)", "def delete(url, to_error=_default_to_error, **kwargs):\n\n return request('delete', url, to_error=to_error, **kwargs)", "def api_delete(self, *args, **kwargs):\n return self.api_delete_with_response(*args, **kwargs)[0]", "def adel(url, **kwargs):\n return requests.delete(url, **kwargs)", "def delete(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"delete\"), kwargs)", "def delete(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"delete\"), kwargs)", "def delete(self, url):\n return self._request('DELETE', url)", "def base_delete(url_path):\n response = requests.delete(url=settings.URL_API + url_path)\n return response", "def delete(self):\r\n self.require_collection()\r\n request = http.Request('DELETE', self.get_url())\r\n\r\n return request, parsers.parse_empty", "def _delete(self, url, **kwargs):\n return self._http.delete(self.cluster + url, timeout=self.timeout, **kwargs)", "def delete(self, *args, **kw):\n kw['method'] = 'DELETE'\n return self.open(*args, **kw)", "def delete(self, request , pk=None): \n return Response({'message':'DELETE'})", "def test_delete_method(self):\n self.getPage('/blah', method='PUT')\n self.getPage('/', method='DELETE')\n self.assertStatus('204 No Content')\n self.assertHeader('Content-Type', 'application/json')", "def api_delete(self, path):\n return self._api_request(path, 'DELETE')", "def _delete(self, url):\n url = urljoin(self.base_url, url)\n try:\n r = self._make_request(**dict(\n method='DELETE',\n url=url,\n auth=self.auth,\n timeout=self.timeout,\n hooks=self.request_hooks,\n headers=self.request_headers\n ))\n except requests.exceptions.RequestException as e:\n raise e\n else:\n if r.status_code >= 400:\n _raise_response_error(r)\n if r.status_code == 204:\n return\n return r.json()", "def destroy(self, request, pk=None):\n\n return Response({'http_method': 'DELETE'})", "def simulate_delete(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'DELETE', path, **kwargs)", "def delete(self, *args, **kwargs):\n return self.handle_delete_request()", "async def delete(self, delete: TPayload) -> None:", "def simulate_delete(self, path='/', **kwargs) -> _ResultBase:\n return self.simulate_request('DELETE', path, **kwargs)", "def test_client_can_do_delete_request(self):\n response = self.httpbin_4.test_requests_delete_method()\n self.assertEqual(response.request.method, 'DELETE')\n self.assertEqual(response.status_code, 200)", "def delete(self, call, params={}): \n # Build an endpoint using the parameters...\n endpoint = self._calls[call](params)\n url = '{}/{}'.format(str(self), str(endpoint))\n return self.deleter.respond(url)", "async def simulate_delete(self, path='/', **kwargs) -> _ResultBase:\n return await self.simulate_request('DELETE', path, **kwargs)", "def simulate_delete(self, path='/', **kwargs):\n return self.simulate_request('DELETE', path, **kwargs)", "def delete(self):\n return self.request('', pylastica.request.Request.DELETE)", "def send_delete(url, data={}, headers={}, return_output=False):\n req = requests.delete(url=url, data=json.dumps(data), headers=headers)\n if return_output:\n return req\n if str(req.status_code).startswith('2'):\n print 'SUCCESS! {0} {1} {2}'.format(req.status_code, req.reason, req.content)\n else:\n print 'FAIL! {0} {1} {2}'.format(req.status_code, req.reason, req.content)\n exit(77)", "def _delete_request(self, url):\n url = self.baseUrl + url\n logger.debug(\"DELETE %s\", url)\n with self.session.delete(url) as req:\n try:\n result = req.json()\n except json.decoder.JSONDecodeError as exc:\n raise errors.PluginError(\"no JSON in API response\") from exc\n if result[\"result\"] == \"success\":\n return result[\"data\"]\n raise errors.PluginError(\n \"error in API request: {} / {}\".format(\n result[\"error\"][\"code\"], result[\"error\"][\"description\"]\n )\n )", "def delete(self):\n self.request().delete()", "def delete(self):\n self.method = \"DELETE\"\n self.send()", "def delete_request(\n self,\n alias,\n uri,\n data=None,\n json=None,\n params=None,\n headers=None,\n allow_redirects=None,\n timeout=None):\n session = self._cache.switch(alias)\n data = utils.format_data_according_to_header(session, data, headers)\n # XXX workaround to restore library default behaviour. Not needed in new keywords\n redir = True if allow_redirects is None else allow_redirects\n\n response = self._common_request(\n \"delete\",\n session,\n uri,\n data=data,\n json=json,\n params=params,\n headers=headers,\n allow_redirects=redir,\n timeout=timeout)\n\n return response", "def delete(self, url: str, params: Dict[str, Any] = None, headers: Dict[str, Any] = None) -> Response:\n return self._api_client._delete(url, params=params, headers=headers)", "def delete(self, params=None):\n params = self.parameters(additional_parameters=params)\n res = delete(self.endpoint_url, params=params)\n return Response(res)", "def destroy(self,request,pk = None):\n return Response({'http_method':'DELETE'})", "async def delete(url, session=None, **kwargs):\n\n method = 'DELETE'\n resp = await _request(method, url, session=session, **kwargs)\n return resp", "def delete(self, path: str, params: dict):\n return self.request(\"DELETE\", path, params)", "def delete(api, url, payload=None, headers=None, auth=_KERBEROS_AUTH,\n proxies=None, retries=_NUM_OF_RETRIES, timeout=None):\n return call(api, url, 'delete', payload=payload,\n headers=headers, auth=auth, proxies=proxies, retries=retries,\n timeout=timeout)", "def delete(self, url):\n return self._query(url, 'DELETE')", "def delete(resource, params=None, expected_status_code=204, user=user_data):\n return call(requests.delete, resource, expected_status_code, user, params=params)", "def delete(self, request, *args, **kwargs):\n verify_secure(request)\n return super().delete(request, args, kwargs)", "def delete(self, request, *args, **kwargs):\n verify_secure(request)\n return super().delete(request, args, kwargs)", "def handle_delete(self, api, command):\n return self._make_request_from_command('DELETE', command)", "def delete(self) -> requests.request:\n # Check if id is set\n if self.args.id is None:\n raise Exception('Provide id of asset you want to delete')\n\n # Send DELETE request\n return requests.delete(self.REQUEST_URL + str(self.args.id))", "def do_delete_request(self, uri, headers, timeout_ms):\n return self._do_request('DELETE', uri, headers, None, timeout_ms, None)", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self):\n\n headers = self._default_headers()\n\n return self._request(self.name,\n ok_status=None,\n data=None,\n headers=headers,\n method=\"DELETE\")", "def delete(self, *args, **kwargs) -> Any:\n pass", "def destroy(self, request, pk=None):\n return Response({'http_method':'DELETE'})", "def delete(self, request, *args, **kwargs):\n self.object = self.get_object()\n self.object.delete()\n return JsonResponse({'status': 'ok'})", "def test_delete(self):\n client = RestClient(host=self.host, username='')\n rest_url = 'some/url/'\n \n # Mock good response\n with responses.RequestsMock() as rsps:\n rsps.add(responses.DELETE, f'{self.host}/{rest_url}', status=200,\n json={'value':\"good!\"})\n r = client.delete(rest_url)", "def delete(self, url_pattern):\n return self.route(url_pattern, methods=['DELETE'])", "def __Delete(self, url, id = None):\n\n conn = self.__GetConnection()\n if (id != None):\n url += \"/\" + str(id)\n conn.request(\"DELETE\", url, \"\", self.__MakeHeaders(True))\n response = conn.getresponse()\n self.__CheckResponse(response)", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json", "def delete(cls, uri):\n return cls._perform_request(uri, 'DELETE')", "def destroy(self, request, pk=None): #delete a specific object\n return Response({'http_method': 'DELETE'})", "def delete(self):\n self.call('DELETE', expect=error.NO_CONTENT)", "def DELETE(url, mime_type='text/html'):\n def_app = DefaultHttpRequestHandler()\n return def_app.RequestMap(url, methods=['DELETE'], produces=mime_type)", "def revoke(self):\r\n return http.Request('DELETE', self.get_url()), parsers.parse_empty", "def delete(self, request, nnid, wfver):\n try:\n return_data = \"\"\n return Response(json.dumps(return_data))\n except Exception as e:\n return_data = {\"status\": \"404\", \"result\": str(e)}\n return Response(json.dumps(return_data))", "def _api_delete(self, query):\n if not isinstance(query, list):\n query = [query]\n\n req = list()\n for q in query:\n r = requests.delete(self._url + q, headers={'Content-Type': 'application/json',\n 'Accept': 'application/json'}, auth=self._auth,\n timeout=self._request_timeout_secs)\n r.raise_for_status()\n req.append(r)\n return req", "def delete(self, url, payload={}):\n response = self._make_request(\"DELETE\", url, payload)\n\n return response", "def sendDeleteRequest(self, url:str, originator:str, parameters:Parameters=None, ct:ContentSerializationType=None, targetResource:Resource=None) -> Result:\n\t\tif Utils.isHttpUrl(url):\n\t\t\tCSE.event.httpSendDelete() # type: ignore\n\t\t\treturn CSE.httpServer.sendHttpRequest(requests.delete, url, originator, parameters=parameters, ct=ct, targetResource=targetResource)\n\t\tLogging.logWarn(dbg := f'unsupported url scheme: {url}')\n\t\treturn Result(status=True, rsc=RC.badRequest, dbg=dbg)", "def do_DELETE(self):\n note_details = NoteDetails\n if self.path == '/note/api/delete':\n response_data=note_details.delete_data(self)\n Response(self).jsonResponse(status=200, data=response_data)", "def test_DELETE3(self):\n r = requests.delete(self.address + \"/cars/42\")\n self.assertEqual(r.status_code, 400)", "def test_delete():\n sample_uuid = get_sample_id()\n response = requests.delete(f'http://localhost:5000/api/persons/{sample_uuid}')\n\n assert response.status_code == 200", "def delete(self):\r\n request = http.Request('DELETE', self.get_push_url(), None)\r\n return request, parsers.parse_empty", "def test_DELETE(self):\n if not self.url:\n return\n response = self.client.delete(self.url, {}, format='json')\n self.assertIn(response.status_code, [status.HTTP_405_METHOD_NOT_ALLOWED,\n status.HTTP_401_UNAUTHORIZED])", "def DELETE(self, req):\r\n req.headers['X-Remove-Container-Meta-Access-Control-Allow-Origin'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Allow-Methods'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Allow-Headers'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Expose-Headers'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Max-Age'] = 'x'\r\n\r\n resp = req.get_response(self.app, method='POST', headers=req.headers)\r\n\r\n return resp", "def delete(self, path):\n \n # prep\n delete_url = self.url(path)\n\n # request\n response = requests.delete(delete_url, auth=self.auth, headers=API.HEADERS)\n\n # test and return\n self.raise_for_status(response)", "def delete(self, data):\r\n pass", "def delete(path: str):\n token = get_token()\n headers = {\n \"Authorization\": f\"Bearer {token}\"\n }\n return requests.delete(get_base_url() + path, headers=headers)", "def destroy(self, request, pk=None):\n\n return Response({'http_method':'DELETE'})", "def delete(self, _id):", "def delete(self, _uri):\n print(\"Deleting '%s'\"%(_uri))\n response = self.__httpsRequest('DELETE', _uri, '')", "def delete(self, path):\n req_url = self.normalize_cdmi_url(path)\n res = requests.delete(req_url, auth=self.auth, verify=False)\n if res.status_code == 204:\n return Response(0, \"ok\")\n else:\n return Response(res.status_code, res)", "def Delete(self, request, global_params=None):\n config = self.GetMethodConfig('Delete')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Delete(self, request, global_params=None):\n config = self.GetMethodConfig('Delete')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Delete(self, request, global_params=None):\n config = self.GetMethodConfig('Delete')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Delete(self, request, global_params=None):\n config = self.GetMethodConfig('Delete')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Delete(self, request, global_params=None):\n config = self.GetMethodConfig('Delete')\n return self._RunMethod(\n config, request, global_params=global_params)" ]
[ "0.80676454", "0.805195", "0.7881113", "0.78649473", "0.7842438", "0.78090477", "0.78055274", "0.7791732", "0.77519745", "0.76926935", "0.7613614", "0.7541842", "0.7470509", "0.7459804", "0.74210435", "0.7401509", "0.73857987", "0.73857987", "0.7379348", "0.7372078", "0.7366816", "0.73357296", "0.7335039", "0.73283875", "0.73261374", "0.73103845", "0.72918665", "0.72834533", "0.7273474", "0.7267811", "0.7241288", "0.72154105", "0.72047067", "0.7172033", "0.7152651", "0.7139227", "0.7120293", "0.7022547", "0.7013579", "0.70131946", "0.70126736", "0.6988896", "0.69835275", "0.69749993", "0.6974546", "0.6964688", "0.694939", "0.6945573", "0.694433", "0.6940189", "0.69341856", "0.69341856", "0.692962", "0.69234353", "0.69094026", "0.6885926", "0.6885926", "0.6885926", "0.6885926", "0.6882632", "0.68792987", "0.68744767", "0.6873345", "0.6870493", "0.68596303", "0.68512625", "0.68454266", "0.68454266", "0.68454266", "0.68454266", "0.68454266", "0.68454266", "0.68454266", "0.6817396", "0.6815827", "0.68142986", "0.678725", "0.67740124", "0.676222", "0.6739673", "0.6734224", "0.67176634", "0.6706825", "0.669032", "0.6682473", "0.6669364", "0.6657841", "0.6654745", "0.6651512", "0.6649021", "0.66363", "0.663133", "0.66269135", "0.6625562", "0.66204226", "0.6609075", "0.6609075", "0.6609075", "0.6609075", "0.6609075" ]
0.8166634
0
Load JSON as a protobuf (pb2) object. Any calls to load protobuf objects from JSON in this repository should be through this function. Returns `None` if the loading failed.
def open_pbobject(path, pb_class): assert path.endswith(".json"), 'File extension for {} needs to be json.'.format(path) if path.startswith('s3://'): return open_remote_pb_object(path, pb_class) assert os.path.exists(path), f'Path not found: {path}' with open(path, 'r', encoding='UTF-8') as json_file: pb_object = Parse(json_file.read(), pb_class()) return pb_object
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load(self):\n with io.open(self.filename, encoding='utf-8') as f:\n self.load_from_dict(json.loads(f.read()))", "def load_json(json_str):\n return _api_internal._load_json(json_str)", "def _localloadjson(path: str) -> JSONType:\n with open(path, encoding=\"utf-8\") as fh:\n return json.load(fh)", "def load(self):\n data = None\n try:\n with open(self.__filepath, 'r') as file:\n text = file.read()\n data = jsonpickle.decode(text)\n except FileNotFoundError:\n data = None\n except IOError as e:\n print(e)\n return data", "def json_loader(filename):\n\n with open(filename, \"r\", encoding=\"UTF-8\") as source:\n data = json.load(source, object_hook=object_decode)\n return data", "def _remoteloadjson(path: str) -> JSONType:\n return json.loads(request.urlopen(path).read())", "def _load(self, json_str, filepath):\n # pylint: disable=protected-access\n return self.json_o._load(json_str, filepath)", "def load_json(self, json_path=None):\n if json_path is None:\n json_path = self.json_path\n with open(json_path, encoding='utf-8', mode='r') as f:\n data = json.load(f)\n return data", "def load_json(self):\n\n self.load_json_str(self.get_json_str())", "def load(self):\n if not self.exist:\n self.create()\n\n with open(self.file_path, encoding=Config.ENCODING) as file:\n self.data = json.load(file)", "def object_from_protobuf(pb, model_type=None):\n key = None\n if isinstance(pb, entity_pb2.Entity):\n pb = pb._pb\n\n if pb.HasField(\"key\"): # Message field (Key)\n key = CustomIterator.key_from_protobuf(pb.key)\n key._type = SubclassMap.get()[key.kind]\n\n entity_props = {}\n\n for prop_name, value_pb in pb.properties.items():\n value = CustomIterator._get_value_from_value_pb(value_pb)\n entity_props[prop_name] = value\n\n obj = model_type._dotted_dict_to_object(entity_props, key)\n return obj", "def from_json(cls, b):\n return cls.from_dict(json.loads(b))", "def load_json(json_string):\n return json.loads(json_string)", "def load(path: str) -> \"DataDescriptor\":\n\n\t\twith open(path, \"r\") as f:\n\t\t\tinfo_dict = json.load(f)\n\n\t\treturn DataDescriptor(\n\t\t\tn_gram_size=int(info_dict[\"n_gram_size\"]),\n\t\t\tcaseless=bool(info_dict[\"caseless\"]),\n\t\t\tignore_punctuation=bool(info_dict[\"ignore_punctuation\"]),\n\t\t\tadd_pos_tags=bool(info_dict[\"add_pos_tags\"]),\n\t\t\tuses_lemma=bool(info_dict[\"uses_lemma\"]),\n\t\t\tuses_sentences=bool(info_dict[\"uses_sentences\"])\n\t\t)", "def util_load_json(path):\n with io.open(path, mode=\"r\", encoding=\"utf-8\") as f:\n return json.loads(f.read())", "def loadFromFile(self, filename):\n with open(filename, 'r') as file:\n raw_data = file.read()\n # data = json.loads(raw_data, encoding='utf-8') # python 3.9 suppression de encoding\n try:\n data = json.loads(raw_data)\n self.deserialize(data)\n self.has_been_modified = False\n except json.JSONDecodeError:\n raise InvalidFile(f'{os.path.basename(filename)} is not a valid JSON file')\n except Exception as e:\n dumpException(e)", "def load_from_json(path):\n fh = open(path)\n contents = fh.read()\n fh.close()\n\n return json.loads(contents)", "def load_json_obj(path: str) -> RAW_CFG:\n with fsspec.open(path) as json_file:\n return json.load(json_file)", "def load_json(filepath: str):\n if not filepath:\n return None\n\n abs_path = _resolve_relative_path(filepath)\n with open(abs_path) as f:\n raw_json = f.read()\n\n return json.loads(raw_json)", "def load_from_json_file(filename):\n with open(filename, 'r') as f:\n obj = json.loads(f.read())\n return obj", "def from_json(_json):\n if isinstance(_json, (str, Path)):\n return from_json_dict(json.loads(_json))\n else: # assume a file-like object\n return from_json_dict(json.load(_json))", "def json_load(fp):\n with _iotools.open_file(fp, \"r\") as f:\n return json.load(f, cls=DataDecoder)", "def load_from_json(filename):\n\n with open(filename, 'r') as file:\n return json.load(file)", "def load(self, loadpath=None):\n\n if loadpath:\n with open(loadpath, mode='r') as f:\n self.update(json.load(f))", "def load_json(value):\n try:\n return json.loads(value)\n except json.JSONDecodeError as e:\n raise JSONDecodeError(e) from e", "def _load_message(self,\n message_pb: descriptor_pb2.DescriptorProto,\n address: metadata.Address,\n path: Tuple[int],\n resources: Mapping[str, wrappers.MessageType],\n ) -> wrappers.MessageType:\n address = address.child(message_pb.name, path)\n\n # Load all nested items.\n #\n # Note: This occurs before piecing together this message's fields\n # because if nested types are present, they are generally the\n # type of one of this message's fields, and they need to be in\n # the registry for the field's message or enum attributes to be\n # set correctly.\n nested_enums = self._load_children(\n message_pb.enum_type,\n address=address,\n loader=self._load_enum,\n path=path + (4,),\n resources=resources,\n )\n nested_messages = self._load_children(\n message_pb.nested_type,\n address=address,\n loader=self._load_message,\n path=path + (3,),\n resources=resources,\n )\n\n oneofs = self._get_oneofs(\n message_pb.oneof_decl,\n address=address,\n path=path + (7,),\n )\n\n # Create a dictionary of all the fields for this message.\n fields = self._get_fields(\n message_pb.field,\n address=address,\n path=path + (2,),\n oneofs=oneofs,\n )\n fields.update(self._get_fields(\n message_pb.extension,\n address=address,\n path=path + (6,),\n oneofs=oneofs,\n ))\n\n # Create a message correspoding to this descriptor.\n self.proto_messages[address.proto] = wrappers.MessageType(\n fields=fields,\n message_pb=message_pb,\n nested_enums=nested_enums,\n nested_messages=nested_messages,\n meta=metadata.Metadata(\n address=address,\n documentation=self.docs.get(path, self.EMPTY),\n ),\n oneofs=oneofs,\n )\n return self.proto_messages[address.proto]", "def testLoadProtojsonWithValidJsonModule(self):\n sys.modules['json'] = ValidJsonModule\n\n # This will cause protojson to reload with the default json module\n # instead of simplejson.\n reload(protojson)\n self.assertEquals('json', protojson.json.name)", "def load_from_json(self, json_fp: str):\n # TODO:\n pass", "def load(filename):\n\n try:\n with open(filename) as data:\n return json.load(data)\n except:\n return None", "def load(cls, path):\n\n with open(path) as f:\n d = json.load(f, object_pairs_hook=OrderedDict)\n return cls.from_definition(d)", "def load(cls, path):\n\n with open(path) as f:\n d = json.load(f, object_pairs_hook=OrderedDict)\n return cls.from_definition(d)", "def load(self, path):\n with open(path, \"rt\") as open_file:\n data = json.load(open_file)\n return data", "def from_json(path: str):\n with open(path) as f:\n return json.load(f)", "def _load(self):\n if self.file_path.exists():\n with open(self.file_path) as fid:\n self.data = json.load(fid)", "def LoadJson(path):\n #NOTE(g): Import is done here, instead of the top of the file, to not require this module if it is not used\n import json\n \n fp = None\n try:\n fp = open(path)\n \n data = yaml.load(fp)\n \n finally:\n if fp:\n fp.close()\n \n return data", "def load(cls, data):\n cls.check_for_schema()\n models = None\n if type(data) is list:\n models = cls.json_to_models(data)\n # if len(models) > 0 and issubclass(models[0].__class__, PillowtalkBase):\n # # [m._add_relationships() for m in models]\n elif type(data) is dict:\n models = cls.json_to_model(data)\n else:\n raise PillowtalkError(\"Data not recognized. Supply a dict or list: \\\"{0}\\\"\".format(data))\n return models", "def _load_from_json(self, data):\n if \"errors\" in data:\n # TODO: handle responses with more than one error\n data = data[\"errors\"][0]\n self.code = data[\"code\"]\n if \"message\" in data:\n self.message = data[\"message\"]\n else:\n self.message = data[\"detail\"]", "def from_json_file(cls, json_file, check_params=False):\n try:\n with Params._open_file(json_file, \"r\") as reader:\n text = reader.read()\n return cls.from_json_string(text, check_params=check_params)\n except Exception as err:\n print(\"Failed to read {} instance from: {}\".format(cls.__name__, json_file), err)\n return None", "def loadJSON(jsonData):\n\n if hasattr(jsonData, 'read'):\n loadedjson = json.load(jsonData)\n elif isinstance(jsonData, str):\n if os.path.exists(jsonData):\n with open(jsonData) as jsonFile:\n loadedjson = json.load(jsonFile)\n else:\n try:\n loadedjson = json.loads(jsonData)\n except JSONDecodeError as e:\n raise ValueError(f\" {str(e)}: Got {jsonData}, either bad format of file does not exist\")\n\n elif isinstance(jsonData, dict):\n loadedjson = jsonData\n else:\n err = f\"workflow type: {type(jsonData)} is unknonw. Must be str, file-like or dict. \"\n raise ValueError(err)\n\n\n return loadedjson", "def get_pyobj_from_json(str_or_path):\n try:\n # see if treating str_or_path as a path works\n fp = codecs.open(str_or_path, mode=\"r\", encoding=\"utf-8\")\n doc = json.load(fp, encoding=\"utf-8\")\n except:\n # if it doesn't work load the text\n doc = json.loads(str_or_path)\n return doc", "def load_json(load_path):\n # read from path\n with open(load_path) as json_file:\n data = json.load(json_file)\n return data", "def from_json(cls, path):\n\n if not isinstance(path, Path):\n path = Path(path)\n\n directory, prefix = path.parent, path.stem\n\n with open(path) as fp:\n params = json.load(fp)\n model = cls(params['elements'],\n params['r_cut'],\n params['gp']['sigma'],\n params['gp']['theta'],\n params['gp']['noise'])\n\n gp_filename = params['gp']['filename']\n try:\n model.gp.load(directory / gp_filename)\n except:\n warnings.warn(\"The many-body GP file is missing\")\n pass\n\n return model", "def json_decode(self, data, **kwargs):\n kwargs.pop('object_hook', None)\n json.loads(data, object_hook = self._dict_to_obj, **kwargs)", "def load(obj, dto=None, decode=None):\n assert isinstance(obj, (six.string_types, bytes))\n assert dto is None or isinstance(dto, tuple)\n assert decode is None or isinstance(decode, six.string_types)\n # ensure object is standard json before reusing the api_client deserializer\n # safe_load from ruamel.yaml as it doesn't accidentally convert str\n # to unicode in py2. It also manages both json and yaml equally well\n # Good explanation: https://stackoverflow.com/a/16373377/4717963\n # Safe Load also helps prevent code injection\n if decode:\n if decode == 'base64':\n prep_obj = base64.b64decode(obj)\n else:\n raise ValueError(\"Load's decode option only supports base64\")\n else:\n prep_obj = obj\n loaded_obj = ruamel.yaml.safe_load(prep_obj)\n if dto:\n assert dto[0] in ['cloudbreak']\n assert isinstance(dto[1], six.string_types)\n obj_as_json = dump(loaded_obj)\n response = Response()\n response.data = obj_as_json\n api_clients = {\n 'cloudbreak': config.cb_config.api_client,\n }\n api_client = api_clients[dto[0]]\n return api_client.deserialize(\n response=response,\n response_type=dto[1]\n )\n return loaded_obj", "def load_from_json_file(filename):\n if type(filename) is not str:\n return\n\n with open(filename, mode=\"r\") as file:\n return json.loads(file.read())", "def load_from_json_file(filename):\n with open(filename, \"r\", encoding=\"utf-8\") as f:\n return(json.loads(f.read()))", "def load_from_json(file):\n with open(file, 'r') as f:\n return json.load(f)", "def load(self):\n basepath = os.path.dirname(os.path.abspath(__file__))\n filename = os.sep.join([basepath, c.FOLDER_JSON, c.FILE_GAME_VERSIONS])\n Handler.ALL_VERS_DATA = {} # reset known data; do not retain defunct information\n with open(filename, \"r\") as f:\n data = json.loads( f.read() )\n self.update(data)\n self._updated = False\n #for v,record in iteritems(Handler.ALL_VERS_DATA):\n # print(type(v), v)\n #for k,v in iteritems(record): ", "def load_json(filename):\n with open(filename) as file:\n obj = json.load(file)\n return obj", "def load_json(path: Path) -> Any:\n with path.open() as f:\n return json.load(f)", "def load_from_json_file(filename):\n with open(filename, mode=\"r\", encoding=\"utf-8\") as a_file:\n return json.loads(a_file.read())", "def load(cls, yaml_or_json):\n try:\n result = yaml.safe_load_all(yaml_or_json)\n except:\n try:\n result = json.loads(yaml_or_json)\n if isinstance(result, dict):\n result = (result for _ in range(1))\n except:\n result = None\n\n return result", "def from_json_file(cls, json_file):\n with tf.io.gfile.GFile(json_file, \"r\") as reader:\n text = reader.read()\n return cls(**json.loads(text))", "def from_json(cls, path):\n\n if not isinstance(path, Path):\n path = Path(path)\n\n directory, prefix = path.parent, path.stem\n\n with open(path) as fp:\n params = json.load(fp)\n model = cls(params['element'],\n params['r_cut'],\n params['gp']['sigma'],\n params['gp']['theta'],\n params['gp']['noise'])\n\n gp_filename = params['gp']['filename']\n try:\n model.gp.load(directory / gp_filename)\n except:\n warnings.warn(\"The many-body GP file is missing\")\n pass\n\n return model", "def from_JSON(cls, filename):\n with open(os.path.expanduser(filename), encoding='utf-8') as f:\n return json.load(f, object_hook=class_hook)", "def test_load_json():\n schema = pa.schema([\n pa.field(\"foo\", pa.int32()),\n pa.field(\"bar\", pa.int64())\n ])\n\n path = \"{}/tests/fixtures/simple_json.txt\".format(os.getcwd())\n\n converted_data = client.load_json(path, schema)\n assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20]}", "def deserialize(self, obj):\n try:\n return json.loads(obj.decode('utf-8'))\n except (JSONDecodeError, TypeError, UnicodeDecodeError):\n raise DeserializationError", "def load_json_str(self, json_text: str):\n\n self.data = json.loads(json_text)", "def create_from_pb2(cls, pb2_obj: _DetectionProto) -> 'Detection':\n categories = []\n keypoints = []\n\n for idx, score in enumerate(pb2_obj.score):\n categories.append(\n category_module.Category(\n score=score,\n index=pb2_obj.label_id[idx]\n if idx < len(pb2_obj.label_id)\n else None,\n category_name=pb2_obj.label[idx]\n if idx < len(pb2_obj.label)\n else None,\n display_name=pb2_obj.display_name[idx]\n if idx < len(pb2_obj.display_name)\n else None,\n )\n )\n\n if pb2_obj.location_data.relative_keypoints:\n for idx, elem in enumerate(pb2_obj.location_data.relative_keypoints):\n keypoints.append(\n keypoint_module.NormalizedKeypoint(\n x=elem.x,\n y=elem.y,\n label=elem.keypoint_label,\n score=elem.score,\n )\n )\n\n return Detection(\n bounding_box=bounding_box_module.BoundingBox.create_from_pb2(\n pb2_obj.location_data.bounding_box\n ),\n categories=categories,\n keypoints=keypoints,\n )", "def from_json(cls, json_string=None, filename=None, encoding='utf-8', errors='strict', **kwargs):\n bx_args = {}\n for arg in kwargs.copy():\n if arg in BOX_PARAMETERS:\n bx_args[arg] = kwargs.pop(arg)\n data = _from_json(json_string, filename=filename, encoding=encoding, errors=errors, **kwargs)\n if not isinstance(data, dict):\n raise BoxError('json data not returned as a dictionary, but rather a {0}'.format(type(data).__name__))\n return cls(data, **bx_args)", "def from_json_file(cls, json_file):\n with tf.io.gfile.GFile(json_file, \"r\") as reader:\n text = reader.read()\n return cls.from_dict(json.loads(text))", "def load(self, filename):\n _json = self.read_json(filename, byteify=True)\n _json = self._byteify(_json, ignore_dicts=True)\n if not _json:\n return None\n _dict = {k : self._parse_value(v) for k, v in _json.items()}\n return _dict", "def _load_json(self, kind, source, **kwargs):\n if source is None:\n raise exceptions.invalid_json_map[kind](f\"Cannot load {kind} - no data source specified.\")\n\n # Decode the json string and deserialize to objects.\n try:\n data = load_json(source, **kwargs)\n except FileNotFoundError as e:\n raise exceptions.file_not_found_map[kind](e)\n\n except jsonlib.decoder.JSONDecodeError as e:\n raise exceptions.invalid_json_map[kind](e)\n\n return data", "def read_json(file_or_path):\n try:\n with (open(file_or_path, 'r') if isinstance(file_or_path, (str, bytes)) else file_or_path) as f:\n obj = json.load(f)\n except IOError:\n obj = json.loads(file_or_path)\n return obj", "def load_json(filepath: str):\n with open(filepath, \"r\", encoding=\"utf8\") as f:\n return json.loads(f.read())", "def testLoadProtojsonWithInvalidJsonModule(self):\n sys.modules['json'] = InvalidJsonModule\n sys.modules['simplejson'] = ValidJsonModule\n\n # Ignore bad module and default back to simplejson.\n reload(protojson)\n self.assertEquals('simplejson', protojson.json.name)", "def load_from_json_file(filename):\n with open(filename, 'r', encoding='utf8') as f:\n return json.load(f)", "def _proto2object(\n proto: GetGroupsMessage_PB,\n ) -> \"GetGroupsMessage\":\n\n return GetGroupsMessage(\n msg_id=_deserialize(blob=proto.msg_id),\n address=_deserialize(blob=proto.address),\n content=json.loads(proto.content),\n reply_to=_deserialize(blob=proto.reply_to),\n )", "def load_from_json_file(filename):\n with open(filename, \"r\") as my_file:\n return json.loads(my_file.read())", "def deserialize(self, blob):\n return json.loads(blob)", "def load_json(content):\n from ujson import loads\n return loads(content)", "def __load_json(self, path):\n try:\n with Path(path).open('r') as f:\n return json.load(f)\n except ValueError as ve:\n six.raise_from(ValueError(\"error while loading the fixture %s\" % path), ve)", "def load_json(s: str) -> Any:\n try:\n return json.loads(s)\n except json.JSONDecodeError as e:\n raise RuntimeError(f'Failed to parse \"{s}\" as json.')", "def read_json(self, *args, **kwargs):\n with self.open('rb') as f:\n return json.load(f, *args, **kwargs)", "def load(cls, json_str):\n \n game_state = json.loads(json_str)\n return cls(game_state)", "def parse_pbobject(source, pb_class):\n if isinstance(source, str):\n return open_pbobject(source, pb_class)\n elif isinstance(source, bytes):\n pb_object = pb_class()\n pb_object.ParseFromString(source)\n return pb_object\n else:\n logging.error(f'cannot parse type {type(source)}')", "def from_json(cls, data):\n\n try:\n # Read a json string and convert it to dictionnary.\n return loads(data)\n\n except decoder.JSONDecodeError: # pragma: no cover\n # In case the decoder return an error,\n # we return and empty dictionnary.\n return {}", "def decode(self, bytestring, **options):\n try:\n return json.loads(\n bytestring.decode(\"utf-8\"), object_pairs_hook=collections.OrderedDict\n )\n except ValueError as exc:\n raise ParseError(\"Malformed JSON. %s\" % exc)", "def loadJSONFile(filename):\n\twith open(filename, 'r') as f:\n\t\treturn json.loads(f.read())", "def fromJSON(self, path='') -> dict:\n try:\n return(importJSON(path))\n except Exception as error:\n print(f\"Error: self.fromJSON({path}) -> {error}\")", "def load_bco(options):\n\n # Declare source of BioCompute Object\n print('\\nRemote BCO supplied: ', url_valid(options.bco), \\\n '\\t Local BCO supplied: ', os.path.exists(options.bco))\n\n if url_valid(options.bco):\n try:\n bco_dict = json.loads(requests.get(options.bco).content)\n print('Remote BioCompute loaded as ', bco_dict['provenance_domain']['name'])\n\n except ValueError: # includes simplejson.decoder.JSONDecodeError\n sys.exit('Loading remote JSON has failed \\U0001F61E\\nExiting')\n\n elif os.path.exists(options.bco):\n print(options.bco)\n try:\n with open(options.bco, 'r') as data:\n bco_dict = json.load(data)\n print('Local BioCompute loaded as ', bco_dict['provenance_domain']['name'])\n\n except ValueError: # includes simplejson.decoder.JSONDecodeError\n sys.exit(\"Importing local JSON has failed \\U0001F61E\\nExiting\")\n\n # If options.bco is not a valid FILE or URI program will exit\n else:\n print('BioCompute loading FAILED \\n')\n sys.exit(\"Please provide a valid URI or PATH\")\n\n return bco_dict", "def _proto2object(\n proto: GetGroupsResponse_PB,\n ) -> \"GetGroupsResponse\":\n\n return GetGroupsResponse(\n msg_id=_deserialize(blob=proto.msg_id),\n address=_deserialize(blob=proto.address),\n status_code=proto.status_code,\n content=json.loads(proto.content),\n )", "def load_from_json_file(filename):\n with open(filename, 'r') as jFile:\n fString = jFile.read()\n fObj = json.loads(fString)\n return fObj", "def loader(data: Union[str, dict], _: FileInfo) -> Optional[dict]:\n if isinstance(data, str):\n if fmt != 'json-ld':\n g = Graph()\n g.parse(data=data, format=fmt)\n data = pyld_jsonld_from_rdflib_graph(g)\n\n if not isinstance(data, dict):\n # TODO: Add a context processor to the source w/ CONTEXTS_PARAM_TYPE\n # TODO: figure out what to do base options below\n # TODO: determine whether jsonld.frame can handle something other than string input\n data_as_dict = jsonld.frame(data, contexts)\n else:\n data_as_dict = data\n typ = data_as_dict.pop('@type', None)\n # TODO: remove this when we get the Biolinkml issue fixed\n if not typ:\n typ = data_as_dict.pop('type', None)\n if typ and typ != target_class.class_name:\n # TODO: connect this up with the logging facility or warning?\n print(f\"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}\")\n return json_clean(data_as_dict)", "def load(cls, data: TextIO) -> \"OpenAPI\":\n return cls(yaml.safe_load(data))", "def import_json(self):\n with open(self.json_file_path, 'r') as json_file:\n self.json = json.load(json_file)\n self.logger.debug('Json loaded for cbg {}.'.format(self.cbg))\n self.non_empty = 'businesses' in self.json\n return None", "def load_json_from_file(path):\n \n json_file = open(path, \"r\")\n json_string = json_file.read()\n json_file.close()\n return json.loads(json_string)", "def testLoadProtojsonWithInvalidJsonModuleAndNoSimplejson(self):\n sys.modules['json'] = InvalidJsonModule\n\n # Bad module without simplejson back raises errors.\n self.assertRaisesWithRegexpMatch(\n ImportError,\n 'json library \"json\" is not compatible with ProtoRPC',\n reload,\n protojson)", "def load_json(filename_or_dict):\n\tif isinstance(filename_or_dict, str):\n\t\tinput_file = open(filename_or_dict, encoding='utf-8')\n\t\tjson_dict = json.loads(input_file.read())\n\t\tinput_file.close()\n\t\treturn json_dict\n\treturn filename_or_dict", "def _decode(self, data: bytes):\n\n return json.loads(data.decode('utf-8'))", "def load_json(path):\n with open(path) as data_file:\n return json.load(data_file)", "def load_json_or_yaml(file_path):\n # handle json doc\n if isinstance(file_path, dict):\n return file_path\n # handle url\n elif file_path.startswith(\"http\"):\n with requests.get(file_path) as url:\n # check if http requests returns a success status code\n if url.status_code != 200:\n raise ValueError(f\"Invalid URL [{url.status_code}]: {file_path} !\")\n else:\n _data = url.content\n # handle file path\n else:\n try:\n with open(file_path) as f:\n _data = f.read()\n except FileNotFoundError:\n raise ValueError(\"Invalid File Path!\")\n try:\n if isinstance(_data, bytes):\n _data = _data.decode(\"utf-8\")\n data = json.loads(_data)\n # except ValueError: # for py<3.5\n except json.JSONDecodeError: # for py>=3.5\n try:\n data = yaml.load(_data, Loader=yaml.SafeLoader)\n except (yaml.scanner.ScannerError, yaml.parser.ParserError):\n raise ValueError(\"Not a valid JSON or YAML format.\")\n return data", "def json_load(file_path):\n\n with open(file_path) as f:\n return json_loads(f.read())", "def json_loads(self, string: str) -> object:\n return json.loads(string)", "def load_json(jsonfile):\n with open(jsonfile) as f:\n return json.load(f)", "def load_json(jsonfile):\n with open(jsonfile) as f:\n return json.load(f)", "def load_from_json_file(filename):\n with open(filename) as f:\n return json.load(f)", "def load_json(file_path):\n try:\n with open(file_path, \"r\", encoding=\"utf-8\") as f:\n data = json.load(f)\n except json.JSONDecodeError as e:\n raise ValueError(f\"Invalid JSON format in file {file_path}\") from e\n except FileNotFoundError as e:\n raise ValueError(f\"File not found: {file_path}\") from e\n return data", "def __init__(self, json_str: object = None, json_file_path: object = None) -> None:\n self.data = None\n if json_str is None and json_file_path is None:\n # raise Exception(\"Invalid file path or json string. Please provide valid file path for json data or provide json string\")\n print(\"No valid json file has been loaded\")\n if json_str is None:\n with open(json_file_path) as file:\n self.data = json.load(file)\n else:\n self.data = json.loads(json_str)\n # if self.data is not None:", "def load_json(filepath: str):\n with open(filepath, encoding=\"utf-8\") as f:\n return json.load(f)" ]
[ "0.57719094", "0.57548875", "0.5741168", "0.568899", "0.5675841", "0.55961937", "0.5520938", "0.55098826", "0.55086243", "0.5459598", "0.54020417", "0.5388325", "0.5387577", "0.5386068", "0.5349237", "0.533662", "0.53339887", "0.53334725", "0.5324513", "0.5323394", "0.53115505", "0.5281063", "0.52729297", "0.52476245", "0.5238931", "0.522012", "0.5214781", "0.5202667", "0.5200718", "0.51952773", "0.51952773", "0.5181145", "0.51785564", "0.51647115", "0.5158004", "0.5147031", "0.514642", "0.514614", "0.5138599", "0.51311123", "0.51298714", "0.5129429", "0.51279074", "0.51277435", "0.51218164", "0.5120561", "0.5113471", "0.5101858", "0.5101597", "0.50985277", "0.5082259", "0.5080818", "0.50717133", "0.5070782", "0.50695413", "0.50691825", "0.50580865", "0.505541", "0.50467545", "0.5045158", "0.5040344", "0.5036993", "0.50303036", "0.50261205", "0.50181067", "0.5017907", "0.5015807", "0.50100875", "0.500929", "0.5008947", "0.5005763", "0.50033647", "0.5000189", "0.49969247", "0.49947745", "0.49891356", "0.49884117", "0.4987662", "0.49850878", "0.49834862", "0.4982958", "0.4981659", "0.49805915", "0.49716896", "0.49692795", "0.49692518", "0.49641833", "0.4960376", "0.49487743", "0.49467897", "0.49449676", "0.49420252", "0.49419206", "0.49416643", "0.49402243", "0.49402243", "0.4938525", "0.49329942", "0.49308872", "0.49284646" ]
0.5887734
0
Like open_pboject but source can be a path or a bytestring
def parse_pbobject(source, pb_class): if isinstance(source, str): return open_pbobject(source, pb_class) elif isinstance(source, bytes): pb_object = pb_class() pb_object.ParseFromString(source) return pb_object else: logging.error(f'cannot parse type {type(source)}')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def source(self) -> str | Path:\n ...", "def open_pds(source):\n\t# if isinstance(source, file):\n\t# \treturn source\n\tif hasattr(source, \"read\"):\n\t\t# sys.stderr.write(\"Identified a file-like object by read() method existence\\n\")\n\t\treturn source\n\n\ttry:\n\t\t# For universal newlines -- i.e. newlines are automatically converted to \"\\n\", use mode \"U\".\n\t\t# For preserved newlines -- e.g. \"\\r\", \"\\r\\n\", \"\\n\", use mode \"rb\".\n\t\t# PDS style newlines are \"\\r\\n\", however, http://pds.jpl.nasa.gov/documents/qs/sample_image.lbl uses \"\\n\".\n\t\t# Check if hasattr(open, 'newlines') to verify that universal newline support is enabeled.\n\t\tf = open(source, \"rb\")\n\t\treturn f\n\texcept (IOError, OSError):\n\t\t# sys.stderr.write(\"Could not open source\\n\")\n\t\traise\n\telse:\n\t\t# sys.stderr.write(\"Opened source\\n\")\n\t\t# Re-raise to catch something hairy.\n\t\traise\n\tfinally:\n\t\tpass\n\t\t# sys.stderr.write(\"Closing previously opened file\\n\")\n\t\t# f.close()\n\t\t\n\tif isinstance(source, str):\n\t\ttry:\n\t\t\timport cStringIO as StringIO\n\t\texcept ImportError:\n\t\t\timport StringIO\n\t\telse:\n\t\t\t# sys.stderr.write(\"Making a file-like object from string source\\n\")\n\t\t\treturn StringIO.StringIO(str(source))\n\t\t\t\n\t# try:\n\t# \timport urllib\n\t# \tf = urllib.urlopen(source)\n\t# \treturn f\n\t# except (IOError, OSError):\n\t# \tpass\n\t# else:\n\t# \t# Re-raise to catch something hairy.\n\t# \traise\n\t# finally:\n\t# \tpass", "def load_stream(source):\n raise NotImplementedError(\"not implemented yet\")", "def test_prepare_source(source):\n assert isinstance(PseudoPotentialData.prepare_source(source), io.BytesIO)\n\n if isinstance(source, io.BytesIO):\n # If we pass a bytestream, we should get the exact same back\n assert PseudoPotentialData.prepare_source(source) is source", "def make(self, source):\n if isinstance(source, str):\n return copy(self.get(source))\n elif self.PB_CLASS and isinstance(source, self.PB_CLASS):\n item = copy(self.get(source.name))\n item._pb = source\n return item\n else:\n return copy(source)", "def _source_path_reader(self, src, encoding=\"utf-8\"):\n if src is None:\n return src\n if isinstance(src, dict) and \"content\" in src:\n with tempfile.NamedTemporaryFile(mode=\"w\", encoding=encoding, delete=False) as fp:\n fp.write(src[\"content\"])\n return fp.name\n elif isinstance(src, dict) and \"file\" in src:\n if os.path.exists(src[\"file\"]) is False:\n raise FileNotFound(src)\n return src[\"file\"]\n else:\n raise InvalidParameter(\"The parameter is invalid.\")", "def Sourceify(path):\n return path", "def test_simple_source_constructor():\n TESTPATH = \"/usr/local/share/testfile.mp3\"\n test01 = Source(path=TESTPATH)\n debug(test01)\n assert(test01.path == TESTPATH)\n assert(test01.fname == \"testfile.mp3\")\n assert(test01.root == \"testfile\")\n assert(test01.ext == \".mp3\")\n assert(test01.isValidExtension(\".mp3\") is True)", "def __init__(self, source):\n self._source = source", "def __init__(self, source):\n self.source = source", "def __init__(self, source):\n self.source = source", "def __init__(self, source):\r\n self.source = source", "def load(source_file):\n return loads(source_file.read())", "def source(request, filepath_pseudos):\n filepath_pseudo = pathlib.Path(filepath_pseudos()) / 'Ar.upf'\n\n if request.param is str:\n return str(filepath_pseudo)\n\n if request.param is pathlib.Path:\n return filepath_pseudo\n\n return io.BytesIO(filepath_pseudo.read_bytes())", "def get_source(self):", "def __init__(self, source):\n self.source = source\n try:\n self._stream = open(source, \"r\" + self.mode)\n except TypeError: # not a path, assume we received a stream\n if self.mode == \"t\":\n if source.read(0) != \"\":\n raise StreamModeError(\n f\"{self.fmt} files must be opened in text mode.\"\n ) from None\n elif self.mode == \"b\":\n if source.read(0) != b\"\":\n raise StreamModeError(\n f\"{self.fmt} files must be opened in binary mode.\"\n ) from None\n else:\n raise ValueError(f\"Unknown mode '{self.mode}'\") from None\n self._stream = source\n self._read_header(self._stream)", "def get_source(cls, *args, **kwargs): # real signature unknown\n pass", "def get_source(cls, *args, **kwargs): # real signature unknown\n pass", "def get_source(cls, *args, **kwargs): # real signature unknown\n pass", "def getSource():", "def getsource(object):\r\n lines, lnum = getsourcelines(object)\r\n return string.join(lines, '')", "def read_raw(self, name, source, test_data=''):\n self.m.path.assert_absolute(source)\n step_test_data = lambda: self.test_api.read_raw(test_data)\n result = self._run(name, ['copy', source, self.m.raw_io.output()],\n step_test_data=step_test_data)\n return result.raw_io.output", "def load(source, **kwargs):\n with open(source, \"rb\") as f:\n return torch.load(f, **kwargs)", "def build_from_source(obj):\n if (obj.method == 'robot'):\n print(\"TODO: build obo and owl\")\n elif (obj.method == 'jenkins-archive'):\n print(\"TODO: download and unzip\")\n elif (obj.method == 'github-archive'):\n print(\"TODO: download and unzip\")\n elif (obj.method == 'svn-co'):\n print(\"TODO: run svn\")\n else:\n print(\"UNKNOWN METHOD:\"+obj.method)", "def getsource(object):\n lines, lnum = getsourcelines(object)\n return string.join(lines, '')", "def _openSource(self, source, params=None):\n with self._lastOpenSourceLock:\n if (hasattr(self, '_lastOpenSource') and\n self._lastOpenSource['source'] == source and\n self._lastOpenSource['params'] == params):\n return self._lastOpenSource['ts']\n if not len(large_image.tilesource.AvailableTileSources):\n large_image.tilesource.loadTileSources()\n if ('sourceName' not in source or\n source['sourceName'] not in large_image.tilesource.AvailableTileSources):\n openFunc = large_image.open\n else:\n openFunc = large_image.tilesource.AvailableTileSources[source['sourceName']]\n origParams = params\n if params is None:\n params = source.get('params', {})\n ts = openFunc(source['path'], **params)\n with self._lastOpenSourceLock:\n self._lastOpenSource = {\n 'source': source,\n 'params': origParams,\n 'ts': ts,\n }\n return ts", "def get_source(self, source, driver_name=None):\n if not driver_name:\n driver_name = self.driver_name\n driver = ogr.GetDriverByName(driver_name)\n return driver.Open(source, 0)", "def BootstrapSource (name, source, filename):\n source = binascii.b2a_base64 (zlib.compress (source.encode ('utf-8'))).strip ().decode ('utf-8')\n return source_payload.format (name = name, filename = filename, source = '\\\\\\n'.join (textwrap.wrap (source, 78)))", "def open_pbobject(path, pb_class):\n assert path.endswith(\".json\"), 'File extension for {} needs to be json.'.format(path)\n if path.startswith('s3://'):\n return open_remote_pb_object(path, pb_class)\n assert os.path.exists(path), f'Path not found: {path}'\n with open(path, 'r', encoding='UTF-8') as json_file:\n pb_object = Parse(json_file.read(), pb_class())\n return pb_object", "async def source(ctx):\n await ctx.send(\"The source can be found here: \" +\n \"https://github.com/FrederikBolding/CryptoBot\")", "def magic_psource(self, parameter_s=''):\n self._inspect('psource',parameter_s)", "def pack(filename: Union[str, Path], source_dir: Union[str, Path]) -> None:\n raise NotImplemented", "def preprocess_python_source(self, module, source):\n\n return source", "def _get_source(self, fullmodname):\n submodname, is_package, relpath = self._get_info(fullmodname)\n fullpath = self.path_entry + relpath\n source = self.datablocks[relpath]\n if hasattr(source, \"decode\"):\n source = source.decode(\"UTF-8\")\n source = source.replace('\\r\\n', '\\n')\n source = source.replace('\\r', '\\n')\n return submodname, is_package, fullpath, source", "def __init__(self, source: str, source_path: str):\n self.source = source\n self.source_path = source_path\n # current character\n self.curr_ch = None\n # pos looks at the position of the current character\n self.pos = Pos(-1, 0, 0)\n # next character\n self.next_ch = source[0] if len(source) > 0 else None\n # src_pos looks at the position of the next character\n self.src_pos = Pos(0, 0, 0)\n # this gets a character into the curr_ch slot\n self._adv()", "def _load(self, source):\n\n sock = toolbox.openAnything(source)\n xmldoc = minidom.parse(sock).documentElement\n sock.close()\n return xmldoc", "def get_source():\n if len(sys.argv) > 1:\n return open(sys.argv[1])\n else:\n return sys.stdin", "def Open(self, file_object):", "def put_source(file_path: str, source: str, sha256sum: str) -> str:\n return g.ledger.file.set_source(file_path, source, sha256sum)", "def parse_file(self, source):\n # If this is a file-like object, we should be able to read it.\n try:\n raw_data = source.read()\n except AttributeError:\n # This raises FileNotFoundError if the file doesn't exist.\n with open(source) as source_obj:\n raw_data = source_obj.read()\n\n # Parse the data in string format.\n return self.parse_string(raw_data)", "def open_source_datastore(self, connection_string, *args, **kwargs):\n return self.open_datastore(connection_string, self.source_inspectors, *args, **kwargs)", "def __init__(self, source, *args, **kwargs):\n super(self.__class__, self).__init__()\n self._source = source\n self.provides = source.provides", "def source():\n\n source = models.Source(name=u\"Joe's Funerals.com\", url=u\"http://www.joesfunerals.com\")\n return source", "def source(self) -> str | Path:\n return self._source", "def give_source(self):\n has_src, src_sobj = self.get_sobj().ReferencedObject()\n if has_src:\n return self.__class__(self._std, self._bld, src_sobj.GetID())", "def load_source(source: Union[str, dict, TextIO],\n loader: Callable[[Union[str, Dict], FileInfo], Optional[Dict]],\n target_class: Type[YAMLRoot],\n accept_header: Optional[str] = \"text/plain, application/yaml;q=0.9\",\n metadata: Optional[FileInfo] = None) -> Optional[YAMLRoot]:\n\n # Makes coding easier down the line if we've got this, even if it is strictly internal\n if metadata is None:\n metadata = FileInfo()\n if not isinstance(source, dict):\n data = hbread(source, metadata, metadata.base_path, accept_header)\n else:\n data = source\n data_as_dict = loader(data, metadata)\n return target_class(**data_as_dict) if data_as_dict is not None else None", "def load_data_source(data_source):\n source_module = __import__('source_'+data_source)\n get_source = getattr(source_module, 'get_source')\n return get_source()", "def read(self, source, sourcename=None, postcheck=True, strict=True):\n if isinstance(source, str):\n with open(source, mode=\"r\") as stream:\n return self.readTextStream(\n stream,\n sourcename or source,\n postcheck=postcheck,\n strict=strict,\n )\n elif isinstance(source, pathlib.Path):\n with source.open(mode=\"r\") as stream:\n return self.readTextStream(\n stream,\n sourcename or str(source),\n postcheck=postcheck,\n strict=strict,\n )\n elif isinstance(source, io.BufferedIOBase):\n return self.readTextStream(\n io.TextIOWrapper(source),\n sourcename,\n postcheck=postcheck,\n strict=strict,\n )\n elif not isinstance(source, io.TextIOBase):\n raise TypeError(\n \"Source must be file name (str or pathlib.Path) or \"\n \"readable stream of text data. Got {}\".format(type(source))\n )\n return self.readTextStream(\n source, sourcename, postcheck=postcheck, strict=strict\n )", "def _read_source(self, size: int) -> bytes:\n raise NotImplementedError() # pragma: no cover", "def __openSourceFile(self, act):\n file = act.data()\n if file:\n self.openSourceFile(file)", "def getsourcefile(object):\r\n filename = getfile(object)\r\n if string.lower(filename[-4:]) in ('.pyc', '.pyo'):\r\n filename = filename[:-4] + '.py'\r\n for suffix, mode, kind in imp.get_suffixes():\r\n if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:\r\n # Looks like a binary file. We want to only return a text file.\r\n return None\r\n if os.path.exists(filename):\r\n return filename\r\n # only return a non-existent filename if the module has a PEP 302 loader\r\n if hasattr(getmodule(object, filename), '__loader__'):\r\n return filename\r\n # or it is in the linecache\r\n if filename in linecache.cache:\r\n return filename", "def open_jpi_source_read(file_name):\n\t# change directory\n\tra_to_jpid()\n\t# open the file\n\tsource = open(file_name, \"r\")\n\t# return to starting direcotyr\n\tjpid_to_ra()\n\t# return the open file\n\treturn source", "def SphinxDummySourceClass(source: Any, *args: Any, **kwargs: Any) -> Any:\n return source", "def read(self, source):\n raise NotImplementedError( 'Needs implementation' )", "def _source(source: str, strict_source: bool) -> Store:\n sourced: Dict[str, str] = {}\n\n sourced.update(_parse(source))\n\n if strict_source:\n _assert_envs_exist(set(sourced.keys()))\n\n sourced.update(_preload_specific_vars(set(sourced.keys())))\n\n return sourced", "def read_file(path, source):\n if source == 'srim':\n return read_srim(path)\n elif source == 'astar':\n return read_astar(path)\n else:\n raise ValueError('Unknown data source {}'.format(source))", "def source(self):\n return some.dap.source(py.path.local(self.co_filename))", "def _loadGraphicsObject(source):\n\t#print gSources\n\tif gSources.has_key(source):\n\t\treturn gSources[source]\n\t\n\tsourceDict = {}\n\texecfile(pyengine.resource.fullPath(\"data/\"+source+\".py\"), globals(), sourceDict)\n\timageFile = pyengine.resource.fullPath(sourceDict['image'])\n\tsourceDict['surf'] = pygame.image.load(imageFile).convert()\n\tgSources[source] = sourceDict\n\treturn sourceDict", "def _StageSource(self, source, gcs_staging_dir_bucket,\n gcs_staging_dir_object):\n\n suffix = '.tgz'\n if source.startswith('gs://') or os.path.isfile(source):\n _, suffix = os.path.splitext(source)\n\n source_object = 'source/{stamp}-{uuid}{suffix}'.format(\n stamp=times.GetTimeStampFromDateTime(times.Now()),\n uuid=uuid.uuid4().hex,\n suffix=suffix,\n )\n\n if gcs_staging_dir_object:\n source_object = gcs_staging_dir_object + '/' + source_object\n\n gcs_source_staging = resources.REGISTRY.Create(\n collection='storage.objects',\n bucket=gcs_staging_dir_bucket,\n object=source_object)\n\n gcs_client = storage_api.StorageClient()\n if source.startswith('gs://'):\n gcs_source = resources.REGISTRY.Parse(\n source, collection='storage.objects')\n staged_source = gcs_client.Rewrite(gcs_source, gcs_source_staging)\n else:\n if not os.path.exists(source):\n raise c_exceptions.BadFileException(\n 'could not find source [{src}]'.format(src=source))\n elif os.path.isdir(source):\n source_snapshot = snapshot.Snapshot(source)\n size_str = resource_transform.TransformSize(\n source_snapshot.uncompressed_size)\n log.status.Print(\n 'Creating temporary tarball archive of {num_files} file(s)'\n ' totalling {size} before compression.'.format(\n num_files=len(source_snapshot.files), size=size_str))\n staged_source = source_snapshot.CopyTarballToGCS(\n gcs_client, gcs_source_staging)\n elif os.path.isfile(source):\n unused_root, ext = os.path.splitext(source)\n if ext not in _ALLOWED_SOURCE_EXT:\n raise c_exceptions.BadFileException(\n 'Local file [{src}] is none of '.format(src=source) +\n ', '.join(_ALLOWED_SOURCE_EXT))\n log.status.Print('Uploading local file [{src}] to '\n '[gs://{bucket}/{object}].'.format(\n src=source,\n bucket=gcs_source_staging.bucket,\n object=gcs_source_staging.object,\n ))\n staged_source = gcs_client.CopyFileToGCS(source,\n gcs_source_staging)\n\n return staged_source", "def source(self, source):\n\n self._close()\n self._source = source\n\n self.src = rasterio.open(source)\n\n idx = getattr(self, 'indexes', None)\n if idx is None:\n self.indexes = list(range(1, self.src.count+1))", "def main(source):\n pass", "def copy_to(raw_data, obj):\n\n shutil.copyfileobj(raw_data, obj)", "def svn_fs_copied_from(*args):\r\n return _fs.svn_fs_copied_from(*args)", "def source(self):\n return some.dap.source(\"<string>\")", "def open_from(self, f: BinaryIO):\n raise NotImplementedError", "async def source(ctx, command: Option(str, \"The command to view the source code for\", required=False)):\n source_url = 'https://github.com/Pycord-Development/robocord'\n branch = 'main'\n view = discord.ui.View()\n if command is None:\n url = source_url\n label = \"Source code for entire bot\"\n else:\n command_split = command.split()\n index = 0\n obj = discord.utils.get(bot.application_commands.values(), name=command_split[index])\n while isinstance(obj, SlashCommandGroup):\n if index + 1 > len(command_split):\n return await ctx.respond(\"Error: Command is a group. You must choose a subcommand from it.\")\n obj = discord.utils.get(obj.subcommands, name=command_split[index])\n if not isinstance(obj, SlashCommand):\n return await ctx.respond(\"Error: Command could not be found\")\n # noinspection PyUnresolvedReferences\n src = obj.callback.__code__\n filename = src.co_filename\n lines, firstlineno = inspect.getsourcelines(src)\n location = os.path.relpath(filename).replace('\\\\', '/')\n\n url = f'{source_url}/blob/{branch}/{location}#L{firstlineno}-L{firstlineno + len(lines) - 1}'\n content = await discord.ext.commands.clean_content(escape_markdown=True).convert(ctx, command)\n label = f'Source code for command \"{content}\"'\n view.add_item(discord.ui.Button(label=\"View Code\", url=url))\n await ctx.respond(label, view=view)", "def to_str(source: Union[str, bytes, IO[bytes]]) -> str:\n if isinstance(source, str):\n return source\n elif isinstance(source, bytes):\n # XXX: Assume it's UTF-8 encoded!\n return source.decode('UTF-8')\n else:\n raise NotImplementedError", "def test_get_file_object(self):\n pass", "def import_(\n self,\n source: \"File\",\n tag: Optional[str] = None,\n ) -> \"Container\":\n _args = [\n Arg(\"source\", source),\n Arg(\"tag\", tag, None),\n ]\n _ctx = self._select(\"import\", _args)\n return Container(_ctx)", "def get_source(self, key, files):\n raise NotImplementedError", "def __init__(self, filenum, source):\n self.source_body = {\n 'filenum': filenum,\n 'source': source\n }", "def new(name, source):", "def get_source (self, name):\n containment = self.containments.get (name)\n if containment is None:\n raise ImportError ('No such module: \\'{}\\''.format (name))\n return (containment [0] if sys.version_info [0] > 2 else\n containment [0].encode ('utf-8'))", "def _convert_str_to_file(source, dirname):\n filename = dirname / 'source.c'\n with filename.open('w') as f:\n f.write(str(source))\n return filename", "def get_source_unicode(obj):\n return inspect.getsource(obj).decode(get_encoding(obj))", "def extract_serializable(target, source, env):\n target = map(str,target)\n source = map(str,source)\n files = \"\"\n for f in source:\n\tfiles += \" \" + f\n\n serializable = map(str.strip, os.popen(\"sed -nre s/'.*DECLARE_KSERIALIZABLE_OPS\\(([A-Za-z_][A-Za-z0-9_]*)\\).*/\\\\1_serializable_ops/p'\" + files).readlines())\n f = file(target[0], \"w\")\n for s in serializable:\n\tf.write(\"extern struct kserializable_ops %s;\\n\" % s)\n\n f.write(\"\\n\")\n f.write(\"const struct kserializable_ops *%s[] = {\\n\" % os.path.splitext(os.path.basename(f.name))[0])\n \n for s in serializable:\n\tf.write(\" &%s,\\n\" % s)\n\n f.write(\" (void *)0\\n\")\n f.write(\"};\\n\")\n f.close()\n\n return None", "def get_kernel_string(kernel_source, params=None):\n #logging.debug('get_kernel_string called with %s', str(kernel_source))\n logging.debug('get_kernel_string called')\n\n kernel_string = None\n if callable(kernel_source):\n kernel_string = kernel_source(params)\n elif isinstance(kernel_source, str):\n if looks_like_a_filename(kernel_source):\n kernel_string = read_file(kernel_source) or kernel_source\n else:\n kernel_string = kernel_source\n else:\n raise TypeError(\"Error kernel_source is not a string nor a callable function\")\n return kernel_string", "def push(target):\n if target is None:\n target = getcwd()\n\n target = path.abspath(target)\n\n dot_chunk = load_chunk(target)\n src = dot_chunk[\"src\"]\n source = load_source(src)\n\n copy(target, source)", "def do_inspect_with_source(self, arg):\n self._do_inspect(arg, with_source=True)", "def usrp_source_make(*args):\n return _uhd_swig.usrp_source_make(*args)", "def __init__(self, source):\n # DOC {{{\n # }}}\n\n # CODE {{{\n # make sure source has readline() method {{{\n if ((hasattr(source, 'readline') == 0) or\n (callable(source.readline) == 0)):\n raise AttributeError(\"Source must have callable readline method.\")\n # }}}\n\n # remember what the source is\n self.source = source\n # }}}", "def __init__(self, source):\n if isinstance(source, str):\n self.line_iter = iter(source.splitlines())\n elif isinstance(source, io.TextIOBase):\n self.line_iter = source\n else:\n raise TypeError('source must be either a string or a text file')\n self.line_iter = enumerate(self.line_iter)\n self.source = source", "async def source(self, ctx, *, command: str = None):\n\n source_url = 'https://github.com/Discord-Bots-Italia/public-bot-py'\n branch = 'master'\n\n if command is None:\n return await ctx.send(source_url)\n\n else:\n obj = self.bot.get_command(command.replace('.', ' '))\n if obj is None:\n return await ctx.send('Could not find command.')\n\n # since we found the command we're looking for, presumably anyway, let's\n # try to access the code itself\n src = obj.callback.__code__\n module = obj.callback.__module__\n filename = src.co_filename\n\n lines, firstlineno = inspect.getsourcelines(src)\n location = os.path.relpath(filename).replace('\\\\', '/')\n\n final_url = f'<{source_url}/blob/{branch}/{location}#L{firstlineno}-L{firstlineno + len(lines) - 1}>'\n await ctx.send(final_url)", "def gen_from_source(source_id, *source_args, **source_kwargs):\n pass", "def fromSerpent(\n cls, source, sourcename=None, postcheck=True, strict=True, names=None,\n ):\n return super().fromSerpent(\n source,\n sourcename=sourcename,\n postcheck=postcheck,\n strict=strict,\n names=names,\n )", "def get_code(self, fullname):\n\t\tsource_path = self.get_filename(fullname)\n\t\tsource_mtime = None\n\t\ttry:\n\t\t\tbytecode_path = cache_from_source(source_path)\n\t\texcept NotImplementedError:\n\t\t\tbytecode_path = None\n\t\telse:\n\t\t\ttry:\n\t\t\t\tst = self.path_stats(source_path)\n\t\t\texcept NotImplementedError:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tsource_mtime = int(st['mtime'])\n\t\t\t\ttry:\n\t\t\t\t\tdata = self.get_data(bytecode_path)\n\t\t\t\texcept IOError:\n\t\t\t\t\tpass\n\t\t\t\telse:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tbytes_data = self._bytes_from_bytecode(fullname, data,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t bytecode_path,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t st)\n\t\t\t\t\texcept (ImportError, EOFError):\n\t\t\t\t\t\tpass\n\t\t\t\t\telse:\n\t\t\t\t\t\t_verbose_message('{} matches {}', bytecode_path,\n\t\t\t\t\t\t\t\t\t\tsource_path)\n\t\t\t\t\t\tfound = marshal.loads(bytes_data)\n\t\t\t\t\t\tif isinstance(found, _code_type):\n\t\t\t\t\t\t\t_imp._fix_co_filename(found, source_path)\n\t\t\t\t\t\t\t_verbose_message('code object from {}',\n\t\t\t\t\t\t\t\t\t\t\tbytecode_path)\n\t\t\t\t\t\t\treturn found\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tmsg = \"Non-code object in {}\"\n\t\t\t\t\t\t\traise ImportError(msg.format(bytecode_path),\n\t\t\t\t\t\t\t\t\t\t\t name=fullname, path=bytecode_path)\n\t\tsource_bytes = self.get_data(source_path)\n\t\tcode_object = self.source_to_code(source_bytes, source_path)\n\t\t_verbose_message('code object from {}', source_path)\n\t\tif (not sys.dont_write_bytecode and bytecode_path is not None and\n\t\t\tsource_mtime is not None):\n\t\t\tdata = bytearray(_MAGIC_BYTES)\n\t\t\tdata.extend(_w_long(source_mtime))\n\t\t\tdata.extend(_w_long(len(source_bytes)))\n\t\t\tdata.extend(marshal.dumps(code_object))\n\t\t\ttry:\n\t\t\t\tself._cache_bytecode(source_path, bytecode_path, data)\n\t\t\t\t_verbose_message('wrote {!r}', bytecode_path)\n\t\t\texcept NotImplementedError:\n\t\t\t\tpass\n\t\treturn code_object", "def _resolveSourcePath(self, sources, source):\n source = copy.deepcopy(source)\n if source['path'] != '__none__':\n sourcePath = Path(source['path'])\n source['path'] = self._basePath / sourcePath\n if not source['path'].is_file():\n altpath = self._basePath.parent / sourcePath / sourcePath.name\n if altpath.is_file():\n source['path'] = altpath\n if not source['path'].is_file():\n raise TileSourceFileNotFoundError(str(source['path']))\n sources.append(source)", "def __init__(self, source_path):\n \n self._source_path = source_path\n self._stable_dir = None\n self.stable_path = None\n self.ignore_next_exit = False", "def make_source(args, stdin=STDIN):\n\n infile = args.infile\n if infile is None:\n infile = stdin\n\n return hxl.input.data(infile, make_input_options(args))", "def mock_source():\n return Mock(spec=FrameIngestorSource)", "def __init__(self, source, *args, **kwargs):\n super(self.__class__, self).__init__()", "def load_source_with_environment(source, component_name, environment=None):\n environment = environment or {}\n exec(source, environment)\n return environment[component_name]", "def __init__(self, source, factory, key = None):\n # TODO: This class current has more methods than ICachableSource. We either \n # need to update the interface, or create a new one for the extra methods\n self._key = key\n self.source = source\n self.factory = factory\n self._files = list()\n self._csv_dictreader_list = list()\n \n if isinstance(source, str):\n if os.path.isfile(source):\n _file = open(source,'rb')\n self._files.append(_file)\n self._csv_dictreader_list.append(DictReader(_file))\n elif os.path.isdir(source):\n for _entry in os.listdir(source):\n _file = open(_entry,'rb')\n self._files.append(_file)\n self._csv_dictreader_list.append(DictReader(_file))\n else:\n raise ValueError(\"expected string source parameter to reference a valid file or directory: \" + str(source))\n elif isinstance(source, DictReader):\n self._csv_dictreader_list.append(source)\n else:\n self._csv_dictreader_list.append(DictReader(source))", "def _read_source(self):\n \n if self.fileType == FTPythonCompiled or \\\n self.fileType == FTCompiledModule:\n return None\n \n filename = Filename(self.filename)\n filename.setExtension('py')\n try:\n file = open(filename, 'rU')\n except IOError:\n return None\n return file.read()", "def source_format(self):\n return '{}{}'.format(\n self.source_path,\n self.sub_base_pod_path)", "def get_source(self):\n\t\treturn self.source.get_source()", "def getData(self, data_source):\r\n if isinstance(data_source, str):\r\n try:\r\n return eval(data_source)\r\n except (NameError, SyntaxError):\r\n try:\r\n data_f = open(data_source, 'U')\r\n data = data_f.read()\r\n data_f.close()\r\n try:\r\n return eval(data)\r\n except (NameError, SyntaxError, TypeError):\r\n pass\r\n return data\r\n except (IOError, NameError, TypeError):\r\n pass\r\n # if we got here, either we didn't get a string or we couldn't read\r\n # the data source into any other kind of object\r\n return data_source", "def load_file(self, src: str) -> bytes:\n if re.match(\"https?://\", src):\n content = self.load_file_from_url(src)\n else:\n content = self.load_file_from_folders(src)\n return content", "def svn_client_copy_source_t_path_get(svn_client_copy_source_t_self): # real signature unknown; restored from __doc__\n return \"\"", "def get_source_unicode(obj):\n return inspect.getsource(obj)" ]
[ "0.6619778", "0.6433512", "0.62496525", "0.61425763", "0.59727526", "0.58300316", "0.57773453", "0.57492805", "0.5733119", "0.5724798", "0.5724798", "0.5724405", "0.57121646", "0.5667463", "0.5616308", "0.55945593", "0.55895805", "0.55895805", "0.55895805", "0.55640024", "0.5558145", "0.5548597", "0.55305743", "0.5526982", "0.55262035", "0.55226374", "0.5479248", "0.5472544", "0.54600084", "0.5425108", "0.54068196", "0.5394045", "0.53904796", "0.5379556", "0.5371281", "0.53282833", "0.53183335", "0.52956814", "0.52846444", "0.52813256", "0.52793986", "0.5273174", "0.5254541", "0.52544653", "0.52391183", "0.5237928", "0.52260226", "0.52119946", "0.5210358", "0.52056706", "0.52049357", "0.5187477", "0.518304", "0.5167858", "0.51600355", "0.51442605", "0.51386094", "0.5135417", "0.5130696", "0.5125241", "0.5123303", "0.5097813", "0.5093827", "0.5090808", "0.50896347", "0.50744075", "0.50741327", "0.5069679", "0.50677156", "0.5058338", "0.5054881", "0.5054685", "0.5045398", "0.5040931", "0.50370085", "0.5025397", "0.50238854", "0.50222486", "0.50209486", "0.5015884", "0.50157684", "0.5012956", "0.50113934", "0.5005873", "0.4998958", "0.49986446", "0.49839464", "0.49737567", "0.4959657", "0.4959059", "0.49586833", "0.49575222", "0.49504787", "0.4949104", "0.49474767", "0.49430978", "0.49388298", "0.4936056", "0.493469", "0.4930323" ]
0.6567848
1
Load JSON as a protobuf (pb2) object from S3 remote
def open_remote_pb_object(s3_object_uri, pb_class): if s3_object_uri.startswith('s3://'): bucket_name, s3_base_path = convert_uri_to_bucket_path(s3_object_uri) else: raise ValueError("Expected path to S3 bucket but got {}".format(s3_object_uri)) pb_object = Parse(get_string_from_s3_file(bucket_name, s3_base_path), pb_class()) return pb_object
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __retrieve_from_bucket(fname):\n blob = BUCKET.blob(fname)\n json_data = json.loads(blob.download_as_string())\n return json_data", "def load(self, bucket, key):\n\n bucket = self._build_bucket_resource(bucket)\n\n with io.BytesIO() as stream:\n bucket.download_fileobj(key, stream)\n stream.seek(0)\n\n wrapper = io.TextIOWrapper(stream, encoding='utf-8')\n # Preserve the original order\n return json.load(wrapper, object_pairs_hook=collections.OrderedDict)", "def open_pbobject(path, pb_class):\n assert path.endswith(\".json\"), 'File extension for {} needs to be json.'.format(path)\n if path.startswith('s3://'):\n return open_remote_pb_object(path, pb_class)\n assert os.path.exists(path), f'Path not found: {path}'\n with open(path, 'r', encoding='UTF-8') as json_file:\n pb_object = Parse(json_file.read(), pb_class())\n return pb_object", "def load_pickle_from_s3(bucket, path):\n pkl = get_from_s3(bucket, path)\n try:\n return pickle.loads(pkl, encoding='utf-8') # python3\n except TypeError:\n return pickle.loads(pkl) # python2", "def load_pickle(self, bucket, key):\n\n with BytesIO() as obj_buffer:\n self._s3.Bucket(bucket).download_fileobj(key, obj_buffer)\n obj_buffer.seek(0)\n obj = pickle.load(obj_buffer)\n\n return obj", "def load_from_s3(filename):\n s3 = boto3.client('s3')\n obj = s3.get_object(Bucket=BUCKET_NAME, Key=S3_PATH.format(filename))\n return obj['Body'].read().decode()", "def load_archives_from_s3(self):\n s3_bucket = S3Backend(self.conf).bucket\n try:\n k = Key(s3_bucket)\n k.key = self.backup_key\n\n return json.loads(k.get_contents_as_string())\n except S3ResponseError, exc:\n log.error(exc)\n return {}", "def load_file_aws(file_path, aws_credentials):\n bucket_engine = S3Bucket(*aws_credentials)\n return json.loads(\n gzip.decompress(\n bucket_engine.read(file_path)\n ).decode('utf-8')\n )", "def read_jsonl_from_s3(s3_path, encoding='utf-8', compressed=False) :\n bucket, key = s3_path_to_bucket_key(s3_path)\n obj = s3_resource.Object(bucket, key)\n text = obj.get()['Body'].read()\n \n if compressed:\n split_text = gzip.decompress(text).decode(encoding).split('\\n')\n else:\n split_text = text.decode(encoding).split('\\n')\n \n data = []\n for t in split_text:\n data.append(json.loads(t))\n \n return data", "def deserialize(self, blob):\n return json.loads(blob)", "def _remoteloadjson(path: str) -> JSONType:\n return json.loads(request.urlopen(path).read())", "def load_s3_njson(bucket, prefix, key_list, honorary_list):\n # Get list of files in bucket and with prefix:\n s3_file_list = list_s3_files(bucket, prefix)\n \n # Load data from all files:\n structured_data = []\n for s3_file in s3_file_list:\n structured_data = structured_data + s3_file_to_dict_list(bucket, s3_file, key_list, honorary_list)\n \n return structured_data", "def get_pickle_from_s3(path):\n return load_pickle_from_s3(*load_bucket_and_path(path))", "def get_google_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n googl_filename = \"GOOGL.json\"\n complete_path = os.path.join(prefix, googl_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def get_file_from_s3(bucket_name, file_name, json_parse=True):\n s3_client = boto3.client('s3')\n s3_file = s3_client.get_object(Bucket=bucket_name, Key=file_name)\n try:\n file_contents = s3_file['Body'].read()\n if json_parse:\n file_contents = json.loads(file_contents)\n except Exception as exc:\n LOGGER.error('Encountered error reading s3 file')\n raise exc\n return file_contents", "def _json_to_obj(cls, serialized_str):\n json_dict = json.loads(serialized_str)\n if 'metadata' in json_dict.keys():\n metadata_dict = json_dict['metadata']\n return Metadata(metadata_dict)", "def get_amazon_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n amzn_filename = \"AMZN.json\"\n complete_path = os.path.join(prefix, amzn_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def get_gzipped_s3_objects_from_dict(session, event):\n return get_s3_objects_from_dict(\n session, event, default_unzip_s3_object_handler_function\n )", "def json_to_dict(s3_user):\n try:\n s3_config = s3_user.get_object(Bucket=\"hermes-sharedservices-data\", Key=\"Lambdas/start-stop/config.json\")['Body'].read()\n logger.info('Fetching config file..')\n return ast.literal_eval(s3_config)\n except Exception as error:\n logger.info(\"Reading the config from S3 failed with the following error : {}\".format(error))", "def _get_s3_object(self, s3_path):\n bucket_name, key = S3Util.get_bucket_and_key(s3_path)\n return self.s3_resource.Object(bucket_name, key)", "def read_data_from_s3(self, name, loc):\n try:\n filename = loc + name + \".json\"\n logger.info('Retrieving the data from the S3 file %s' % filename)\n return self._retrieve_dict(filename)\n except Exception as e:\n logger.error(e)\n return False", "def default_unzip_s3_object_handler_function(response):\n bytestream = BytesIO(response[\"Body\"].read())\n raw_object = GzipFile(None, \"rb\", fileobj=bytestream).read()\n try:\n # decode if allowed\n return_object = raw_object.decode(\"utf-8\")\n except AttributeError:\n return_object = raw_object\n return json.loads(return_object)", "def load(self):\n with io.open(self.filename, encoding='utf-8') as f:\n self.load_from_dict(json.loads(f.read()))", "def load_from_s3(self, bucket, prefix=None):\r\n n = 0\r\n if prefix:\r\n prefix = '%s/' % prefix\r\n else:\r\n prefix = '%s/' % self.id[1:]\r\n rs = bucket.list(prefix=prefix)\r\n for key in rs:\r\n n += 1\r\n m = self.new_message(key.get_contents_as_string())\r\n self.write(m)\r\n return n", "def json_loader(filename):\n\n with open(filename, \"r\", encoding=\"UTF-8\") as source:\n data = json.load(source, object_hook=object_decode)\n return data", "def FromBytes (cls, data):\n return cls (json.loads (zlib.decompress (data).decode ('utf-8')))", "def _read_s3_url(cls, s3_url):\n\n parsed_url = urllib.parse.urlparse(s3_url)\n return cls.s3.get_object(Bucket=parsed_url.netloc,\n Key=parsed_url.path.lstrip(\"/\"))[\"Body\"].read()", "def from_json(cls, req_session: Session, base_url: str, source_dict: Dict[Any, Any]) -> 'StorageObject':\n return StorageObject(req_session=req_session,\n base_url=base_url,\n instance_id=source_dict['instanceId'],\n name=source_dict['name'])", "def _localloadjson(path: str) -> JSONType:\n with open(path, encoding=\"utf-8\") as fh:\n return json.load(fh)", "def from_json(cls, b):\n return cls.from_dict(json.loads(b))", "def read(self, stream):\n ret = json.load(stream)\n self.validate(ret)\n self.stringify(ret)\n return (ret, self.make_order(ret))", "def s3_read_data(self):\n\n self.k.open()\n self.k.read()", "def get_gzipped_s3_objects_from_sns_msg_of_dict(session, event):\n objects = []\n if _is_s3_notif(event):\n return get_gzipped_s3_objects_from_dict(session, event)\n for record in event.get(\"Records\", []):\n message = record.get(\"Sns\", {}).get(\"Message\")\n objects.extend(get_gzipped_s3_objects_from_dict(session, json.loads(message)))\n return objects", "def get_ibm_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n ibm_filename = \"IBM.json\"\n complete_path = os.path.join(prefix, ibm_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def load_model(self, filename):\n model_object = self.s3_resource.Object(self.bucket_name, self.models_path + str(filename)).get()['Body'].read()\n model = pickle.loads(model_object)\n return model", "def read_json_to_object(fn):\n\n with open(fn, \"r\") as fid:\n obj = json.load(fid, object_hook=lambda d: SimpleNamespace(**d))\n return obj", "def get_bytes(bucket: str, key: str) -> bytes:\n logger.debug(f'Reading from s3://{bucket}/{key}')\n response = client().get_object(Bucket=bucket, Key=key)\n return response['Body'].read()", "def deserialize(self, blob):\n pass", "def from_s3(cls, bucket_name, mos_file_key):\n xml = s3.get_file_contents(bucket_name, mos_file_key)\n return cls.from_string(xml)", "def get_from_s3(s3_client, s3_url):\n url = urlparse(s3_url)\n\n # Split the bucket from the key\n bucket_name = urllib2.unquote(url.netloc).decode('utf8')\n key_name = urllib2.unquote(url.path[1:]).decode('utf8')\n\n # We're done parsing; start doing some S3 ops\n bucket = s3_client.get_bucket(bucket_name, validate=False)\n key = bucket.get_key(key_name)\n return key.get_contents_as_string()", "def JSONtoObject(fileName):\n # TODO: ensure file exists first!!\n \n with open(fileName) as json_data:\n d = json.load(json_data)\n \n return d\n #return json.loads(d, object_hook=_json_object_hook)", "def save_file_aws(obj, file_path, aws_credentials):\n bucket_engine = S3Bucket(*aws_credentials)\n data = gzip.compress(json.dumps(obj).encode('utf-8'))\n bucket_engine.write(file_path, data)", "def __load(self):\n try:\n response = self.s3_client.get_object(\n Bucket=self.AWS_BUCKET_NAME, Key=\"tweets.csv\"\n )\n except self.s3_client.exceptions.NoSuchKey as e:\n logging.error(\n \"No Such Key: S3 bucket \"\n f\"{self.AWS_BUCKET}/{self.OBJ_FILENAME}\"\n )\n raise e\n except self.s3_client.exceptions.InvalidObjectState as e:\n logging.error(\n \"Invalid Object State: S3 bucket \"\n f\"{self.AWS_BUCKET}/{self.OBJ_FILENAME}\"\n )\n raise e\n\n status = response.get(\"ResponseMetadata\", {}).get(\"HTTPStatusCode\")\n\n if status == 200:\n tweets = pd.read_csv(response.get(\"Body\"), dtype=self.DTYPES)\n elif status == 403:\n logging.error(\n \"Access Denied: S3 bucket \"\n f\"{self.AWS_BUCKET}/{self.OBJ_FILENAME}\"\n )\n raise PermissionError(\"Access Denied\")\n elif status == 404:\n logging.error(\n \"No Such Key: S3 bucket \"\n f\"{self.AWS_BUCKET}/{self.OBJ_FILENAME}\"\n )\n raise self.s3_client.exceptions.NoSuchKey(\"No Such Key\")\n else:\n logging.warning(f\"Status: {status}\")\n raise RuntimeError(\"Unable to open resource\")\n return tweets", "def get_s3_object(bucket, key_name, local_file):\n\n tracer.put_metadata('object', f's3://{bucket}/{key_name}')\n\n try:\n s3_resource.Bucket(bucket).download_file(key_name, local_file)\n result = 'ok'\n tracer.put_annotation('OBJECT_DOWNLOAD', 'SUCCESS')\n except botocore.exceptions.ClientError as e:\n tracer.put_annotation('OBJECT_DOWNLOAD', 'FAILURE')\n if e.response['Error']['Code'] == '404':\n result = f'Error: s3://{bucket}/{key_name} does not exist'\n else:\n result = f'Error: {str(e)}'\n\n return(result)", "def _load_object(self, cid):\n object_data = unixfs_pb2.Data()\n object_data.ParseFromString(self.client.object.data(\n cid,\n **self.client_request_kwargs,\n ))\n\n self.cid_type_cache[cid] = object_data.Type\n self.path_size_cache[cid] = object_data.filesize\n self.block_cache[cid] = object_data.Data\n self.subblock_sizes_cache[cid] = object_data.blocksizes\n\n return object_data", "def json_loads(self, string: str) -> object:\n return json.loads(string)", "def get_apple_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n aapl_filename = \"AAPL.json\"\n complete_path = os.path.join(prefix, aapl_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def load_data_s3(filename):\n \n global s3_client\n\n if s3_client is None:\n logger.debug('Creating new S3 client.')\n s3_client = boto3.client('s3') \n\n try:\n logger.debug('Loading batch to S3.')\n response = s3_client.upload_file('/tmp/'+filename, os.environ['BUCKET_NAME'], str(os.environ['BUCKET_PATH']) \n + '/' + filename)\n\n except Exception as ex:\n logger.error('Exception in loading data to s3 message: {}'.format(ex))\n send_sns_alert(str(ex))\n raise", "def deserialize_object(d):\n pass", "def _readobj(self, sock):\n buff = b\"\"\n while True:\n n_to_read = max(2048, len(buff))\n chunk = sock.recv(n_to_read)\n buff += chunk\n if len(chunk) != n_to_read:\n print(\"Got: {}\", buff)\n return json.loads(buff)", "def load(fp, *args, **kwargs): \n state = json.load(fp, *args, **kwargs)\n return unserialize(state)", "def _load_json(self, kind, source, **kwargs):\n if source is None:\n raise exceptions.invalid_json_map[kind](f\"Cannot load {kind} - no data source specified.\")\n\n # Decode the json string and deserialize to objects.\n try:\n data = load_json(source, **kwargs)\n except FileNotFoundError as e:\n raise exceptions.file_not_found_map[kind](e)\n\n except jsonlib.decoder.JSONDecodeError as e:\n raise exceptions.invalid_json_map[kind](e)\n\n return data", "def load(self, s):\n self._filename = s\n # self._isLoaded = True\n with open(s, 'r') as f:\n self._dict = json.load(f)", "def from_JSON(cls, filename):\n with open(os.path.expanduser(filename), encoding='utf-8') as f:\n return json.load(f, object_hook=class_hook)", "def _load(self, json_str, filepath):\n # pylint: disable=protected-access\n return self.json_o._load(json_str, filepath)", "def get_metadata_body(self):\n key = self.build_s3_key('datapackage.json')\n return self.get_s3_object(key)", "def json_loads(s):\n return json.loads(s, cls=DataDecoder)", "def download_json_metadata_from_s3(bucket_name, prefix=\"\", num_threads=20):\n\n # simple method for threads to pull from a queue and download JSON files\n def download_object(queue):\n while True:\n obj = queue.get()\n if obj is None:\n break\n obj.Object().download_file(obj.key.replace(prefix, ''))\n queue.task_done()\n\n # create a directory to store downloaded metadata\n cwd = Path.cwd()\n data_dir = cwd / 'data'\n json_dir = data_dir / 'json'\n # try:\n os.makedirs(json_dir, exist_ok=True)\n # except FileExistsError:\n # shutil.rmtree(json_dir)\n # os.makedirs(json_dir)\n os.chdir(json_dir)\n\n # create a queue for objects that need to be downloaded\n # and spawn threads to download them concurrently\n download_queue = Queue(maxsize=0)\n workers = []\n for worker in range(num_threads):\n worker = Thread(target=download_object, args=(download_queue, ))\n worker.setDaemon(True)\n worker.start()\n workers.append(worker)\n\n # loop through the files in the bucket and filter for JSON metadata\n # files for only labeled images; add them to the queue\n s3 = boto3.resource(\"s3\")\n bucket = s3.Bucket(bucket_name)\n for obj in bucket.objects.filter(Prefix=prefix):\n if obj.key.endswith(\"meta.json\"):\n download_queue.put(obj)\n\n # wait for the queue to be empty, then join all threads\n download_queue.join()\n for _ in range(num_threads):\n download_queue.put(None)\n for worker in workers:\n worker.join()\n\n os.chdir(cwd)", "def load_json_obj(path: str) -> RAW_CFG:\n with fsspec.open(path) as json_file:\n return json.load(json_file)", "def load_json(content):\n from ujson import loads\n return loads(content)", "def load_json(json_str):\n return _api_internal._load_json(json_str)", "def loads(self, profile_name: Optional[str] = \"default\", **kwargs):\n bytes_pickle = self._decode_pickle(self.pickle_object)\n self.remote_object = cloudpickle.loads(bytes_pickle)\n self.remote_object.loads(profile_name, **kwargs)", "def unpack(self, obj):\n if obj is None:\n return\n try:\n return json.loads(obj)\n except Exception:\n return obj", "def read_object(self, bucket_name, object_name, offset=0, size=0):\n\n data, done = h3lib.read_object(self._handle, bucket_name, object_name, offset, size, self._user_id)\n if data is None:\n data = b''\n return H3Bytes(data, done=done)", "def get_object(self, key):\n r = self.s3client.get_object(Bucket = self.s3_bucket, Key = key)\n data = r['Body'].read()\n return data", "async def getStorJSONObj(app, key, bucket=None):\n\n client = _getStorageClient(app)\n if not bucket:\n bucket = app['bucket_name']\n if key[0] == '/':\n key = key[1:] # no leading slash\n log.info(f\"getStorJSONObj({bucket})/{key}\")\n\n data = await client.get_object(key, bucket=bucket)\n\n try:\n json_dict = json.loads(data.decode('utf8'))\n except UnicodeDecodeError:\n log.error(f\"Error loading JSON at key: {key}\")\n raise HTTPInternalServerError()\n\n log.debug(f\"storage key {key} returned: {json_dict}\")\n return json_dict", "def _load_model(self, loc):\n\n # If not a string, return input\n if not (isinstance(loc, str) or isinstance(loc, unicode)):\n return loc\n\n # If location is in S3, copy to local, then unpickle \n to_delete = False\n if \"s3\" in loc:\n tmp_loc = \"{0}/tmp_file_{1}.obj\".format(tmpdir, random.randint(1,1000))\n s3 = boto3.client('s3')\n bucket = loc.split(\"/\")[2]\n key = \"/\".join(loc.split(\"/\")[3:])\n with open(tmp_loc, \"wb\") as data:\n s3.download_fileobj(bucket, key, data)\n loc = tmp_loc\n to_delete = True\n with open(loc, \"rb\") as f:\n model = pickle.load(f)\n if to_delete:\n os.remove(tmp_loc)\n return model", "def convert_json_to_object(file_content):\n object = json.loads(file_content)\n print(object)\n return object", "def loads(cls, raw: bytes) -> 'Tag':\n meta = json.loads(raw.decode('utf-8'))\n return cls(\n training=cls.Training(\n timestamp=cls._strptime(meta['training']['timestamp']), ordinal=meta['training']['ordinal']\n ),\n tuning=cls.Tuning(timestamp=cls._strptime(meta['tuning']['timestamp']), score=meta['tuning']['score']),\n states=(uuid.UUID(s) for s in meta['states']),\n )", "def from_json(cls, s):\n\n d = json.loads(s, object_pairs_hook=OrderedDict)\n return cls.from_definition(d)", "def from_json(cls, s):\n\n d = json.loads(s, object_pairs_hook=OrderedDict)\n return cls.from_definition(d)", "def load(cls, path):\n\n with open(path) as f:\n d = json.load(f, object_pairs_hook=OrderedDict)\n return cls.from_definition(d)", "def load(cls, path):\n\n with open(path) as f:\n d = json.load(f, object_pairs_hook=OrderedDict)\n return cls.from_definition(d)", "def load_from_json_file(filename):\n with open(filename, 'r') as f:\n obj = json.loads(f.read())\n return obj", "def _to_jsonrpc_obj(self, jsonrpcstr):\n return jsonrpc.JsonRpcData.parse(jsonrpcstr)", "def parse_pbobject(source, pb_class):\n if isinstance(source, str):\n return open_pbobject(source, pb_class)\n elif isinstance(source, bytes):\n pb_object = pb_class()\n pb_object.ParseFromString(source)\n return pb_object\n else:\n logging.error(f'cannot parse type {type(source)}')", "def load(obj, dto=None, decode=None):\n assert isinstance(obj, (six.string_types, bytes))\n assert dto is None or isinstance(dto, tuple)\n assert decode is None or isinstance(decode, six.string_types)\n # ensure object is standard json before reusing the api_client deserializer\n # safe_load from ruamel.yaml as it doesn't accidentally convert str\n # to unicode in py2. It also manages both json and yaml equally well\n # Good explanation: https://stackoverflow.com/a/16373377/4717963\n # Safe Load also helps prevent code injection\n if decode:\n if decode == 'base64':\n prep_obj = base64.b64decode(obj)\n else:\n raise ValueError(\"Load's decode option only supports base64\")\n else:\n prep_obj = obj\n loaded_obj = ruamel.yaml.safe_load(prep_obj)\n if dto:\n assert dto[0] in ['cloudbreak']\n assert isinstance(dto[1], six.string_types)\n obj_as_json = dump(loaded_obj)\n response = Response()\n response.data = obj_as_json\n api_clients = {\n 'cloudbreak': config.cb_config.api_client,\n }\n api_client = api_clients[dto[0]]\n return api_client.deserialize(\n response=response,\n response_type=dto[1]\n )\n return loaded_obj", "def _retrieve_blob(self, object_key):\n return self.s3_resource.Object(self.CVE_BUCKET, object_key).get()['Body'].read()", "def from_json(cls, s, **kwargs):\n return loads(s, cls, **kwargs)", "def get_s3_object(self, remote_s3_url):\n try:\n _file = tempfile.mkstemp()[1]\n parsed_s3_path = remote_s3_url.split(\"/\", 3) # s3://bucket-name/key\n remote_bucket = parsed_s3_path[2] # Bucket name\n remote_key = parsed_s3_path[3] # Key\n self.download_file(remote_bucket, remote_key, _file)\n return _file\n except Exception as e:\n message = {'FILE': __file__.split('/')[-1],\n 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e)}\n self.logger.exception(message)\n raise", "def _get_state_file_from_s3(\n self,\n state_file_url: str,\n profile: str = None,\n region: str = None\n ) -> Dict[str, Any]:\n if profile:\n session = boto3.session.Session(profile_name=profile, region_name=region)\n else:\n session = get_boto3_session()\n s3 = session.resource('s3')\n parts = state_file_url[5:].split('/')\n bucket = parts[0]\n filename = \"/\".join(parts[1:])\n key = s3.Object(bucket, filename)\n try:\n state_file = key.get()[\"Body\"].read().decode('utf-8')\n except botocore.exceptions.ClientError as ex:\n if ex.response['Error']['Code'] == 'NoSuchKey':\n raise NoSuchTerraformStateFile(\"Could not find Terraform state file {}\".format(state_file_url))\n raise ex\n return json.loads(state_file)", "def get_s3_objects_from_dict(session, event, object_handler_function):\n\n objects = []\n s3 = session.client(\"s3\")\n # Get the object from the event and show its content type\n for record in event.get(\"Records\", []):\n bucket = record[\"s3\"][\"bucket\"][\"name\"]\n unprocessed_key = record[\"s3\"][\"object\"][\"key\"]\n # urllib changes structure and encoding is different\n # between python 2 and 3\n key = (\n urllib.parse.unquote_plus(unprocessed_key)\n # if sys.version_info[0] >= 3\n # else urllib.unquote_plus(unprocessed_key.encode(\"utf-8\"))\n )\n logging.info(\"Bucket: %s. Key: %s\", bucket, key)\n\n # get S3 object and add it to return list\n response = s3.get_object(Bucket=bucket, Key=key)\n objects.append(object_handler_function(response))\n return objects", "async def putStorJSONObj(app, key, json_obj, bucket=None):\n\n client = _getStorageClient(app)\n if not bucket:\n bucket = app['bucket_name']\n if key[0] == '/':\n key = key[1:] # no leading slash\n log.info(f\"putS3JSONObj({bucket}/{key})\")\n data = json.dumps(json_obj)\n data = data.encode('utf8')\n\n rsp = await client.put_object(key, data, bucket=bucket)\n\n return rsp", "def load_file(file_path):\n with gzip.open(file_path, \"rb\") as fp:\n return json.loads(fp.read().decode('utf-8'))", "def _read_json(cls, input_file):\n with open(input_file, 'rb') as f:\n return json.load(f)", "def _read_json(cls, input_file):\n with open(input_file, 'rb') as f:\n return json.load(f)", "def from_json(cls, fname):\n d = read_json(fname)\n return cls.from_dict(d)", "def load_json(self):\n\n self.load_json_str(self.get_json_str())", "def dump_job_data(s3, bucket, key, ecosystem, package, version):\n data = s3.read_object(bucket, key)\n timestamp_str = datetime.datetime.utcnow().strftime(\"%Y-%m-%dT%H:%M:%S.%f\")\n filename = \"s3_data_{e}_{p}_{v}_{t}.json\".format(e=ecosystem,\n p=package,\n v=version,\n t=timestamp_str)\n with open(filename, 'w') as fout:\n json.dump(data, fout)", "def stream_object(self, bucket, key) -> StreamingBody:\n return self.resource.Object(bucket, key).get()['Body']", "def load_object(filename):\n\n with gzip.GzipFile(filename, 'rb') as source: result = source.read()\n ret = pickle.loads(result)\n source.close()\n\n return ret", "def json_load(fp):\n with _iotools.open_file(fp, \"r\") as f:\n return json.load(f, cls=DataDecoder)", "def get_s3_object_text(s3_resource: boto3.session.Session.resource, bucket_name: str, key: str) -> str:\n obj = s3_resource.Object(bucket_name, key)\n return obj.get()['Body'].read().decode('utf-8')", "def DeserializeJson(self, json_string, object_to_serialize):\n\n\n object_to_serialize.__dict__ = json.loads(str(json_string))\n return object_to_serialize", "def test_s3_gets_object_content(self):\n mock_s3 = Mock()\n mock_s3_object = Mock()\n s3_response = {'Body': mock_s3_object}\n mock_s3_object.read.return_value = \"file content\"\n mock_s3.get_object.return_value = s3_response\n\n s3_bucket = S3Bucket('bucket_name', s3_client=mock_s3)\n assert s3_bucket.get_content('/file.text') == \\\n 'file content'", "def test_get_object(self):\n err = None\n try:\n response = self.bos.put_object_from_string(self.BUCKET,\n self.KEY,\n \"This is a string.\",\n user_metadata={\"private\": \"private\"})\n except BceServerError as e:\n err = e\n finally:\n self.assertIsNone(err)\n self.check_headers(response)\n\n err = None\n try:\n response = self.bos.get_object(self.BUCKET, self.KEY)\n except BceServerError as e:\n err = e\n finally:\n self.assertIsNone(err)\n\n self.check_headers(response)\n self.assertEqual(response.metadata.etag, '13562b471182311b6eea8d241103e8f0')\n self.assertDictEqual(response.metadata.user_metadata, {u\"private\":u\"private\"})\n data = BytesIO(response.data.read())\n response.data.close()", "def deserialize(self, obj):\n try:\n return json.loads(obj.decode('utf-8'))\n except (JSONDecodeError, TypeError, UnicodeDecodeError):\n raise DeserializationError", "def read_json(self, key):\n return json.loads(self.get_object(key))", "def read_s3_file(date):\n \"\"\" history from S3 \"\"\"\n bucket = os.getenv(\"SPOTIFY_BUCKET_NAME\")\n path = os.getenv(\"SPOTIFY_BUCKET_PATH\")\n s3 = boto3.resource('s3')\n try:\n s3.Object(bucket, \"%s/%s.json\" % (path, date)).load()\n except botocore.exceptions.ClientError as e:\n logger.info(\"No existing history file found for %s, %s\" %\n (date, e.response['Error']['Code']))\n if e.response['Error']['Code'] == '404':\n return []\n else:\n logger.warning(\"Unexpected error code returned!\")\n return []\n else:\n logger.info(\"Reading history file for %s\" % date)\n content_object = s3.Object(bucket, \"%s/%s.json\" % (path, date))\n file_content = content_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def parse_s3_url(url):\n parsed_url = urlparse(url)\n if parsed_url.scheme != \"s3\":\n raise ValueError(\"S3 URLs must start with 's3://'\")\n\n bucket = parsed_url.netloc.split(\".\")[0]\n key = parsed_url.path.lstrip(\"/\")\n\n return {\"bucket\": bucket, \"key\": key}" ]
[ "0.6704851", "0.663122", "0.66172427", "0.6463626", "0.63138694", "0.624817", "0.61768293", "0.6076889", "0.603407", "0.60289794", "0.59423906", "0.5903331", "0.5902657", "0.5846186", "0.583783", "0.58322567", "0.571561", "0.56856984", "0.5669956", "0.5647885", "0.56409043", "0.56393653", "0.56380546", "0.5607729", "0.55952793", "0.55811137", "0.55808735", "0.55729955", "0.55618525", "0.5545636", "0.5539539", "0.5533486", "0.5526646", "0.5523905", "0.54425275", "0.5441247", "0.5437407", "0.5421666", "0.5404121", "0.5367744", "0.5364392", "0.53478414", "0.534163", "0.5338842", "0.5335586", "0.5328524", "0.5327045", "0.53147614", "0.53002954", "0.5298474", "0.5294928", "0.5290291", "0.5282055", "0.52814376", "0.5265576", "0.5262817", "0.52616626", "0.526072", "0.5252128", "0.5243683", "0.52416784", "0.5225811", "0.52176815", "0.5209655", "0.5208893", "0.5208415", "0.5207977", "0.52076465", "0.5205112", "0.51990736", "0.51990736", "0.51984024", "0.51984024", "0.5191176", "0.51868427", "0.5183427", "0.5180422", "0.5179656", "0.5174421", "0.5173198", "0.5166777", "0.51639986", "0.515504", "0.5152839", "0.5142584", "0.5142584", "0.5128449", "0.5118301", "0.51033324", "0.5100406", "0.509798", "0.508724", "0.5086371", "0.50835246", "0.5082714", "0.5075794", "0.5070413", "0.50679094", "0.50669324", "0.506481" ]
0.62069356
6
Save protobuf (pb2) object to JSON file with our standard indent, key ordering, and other settings. Any calls to save protobuf objects to JSON in this repository should be through this function.
def save_pbobject_as_json(pb_object, save_path): if os.path.isdir(save_path): save_path = os.path.join(save_path, generate_uid_from_pbobject(pb_object) + ".json") assert save_path.endswith(".json"), 'File extension for {} needs to be json.'.format(save_path) with open(save_path, "w", encoding='UTF-8') as _f: json.dump( MessageToDict(pb_object, including_default_value_fields=True, preserving_proto_field_name=True), _f, indent=2, sort_keys=True ) return save_path
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _save(self):\n with open(self.file_path, 'w') as fid:\n json.dump(self.data, fid, indent=4, sort_keys=True)", "def save(self):\n with open(self.file_path, 'w', encoding=Config.ENCODING) as file:\n json.dump(self.data, file, indent=2, ensure_ascii=False)", "def save(self):\n d1 = {}\n with open(self.__file_path, mode=\"w\") as f:\n for k, v in self.__objects.items():\n d1[k] = v.to_dict()\n json.dump(d1, f)", "def save_data(file_to_save, object_to_serialize):\r\n with open(file_to_save, \"w\", encoding=\"utf-8\") as f:\r\n f.write(json.dumps(object_to_serialize, indent=2, ensure_ascii=False))", "def save_to_json_file(my_obj, filename):\n with open(filename, \"w\", encoding=\"utf-8\") as opening:\n json.dump(my_obj, opening)", "def save(self):\n\n\t\tdirectory = os.path.dirname(self.path)\n\n\t\tif not os.path.exists(directory):\n\t\t\tos.makedirs(directory)\n\n\t\twith open(self.path, \"w\") as f:\n\t\t\tf.write(\n\t\t\t\tjson.dumps(\n\t\t\t\t\tself.dump(),\n\t\t\t\t\tindent=4,\n\t\t\t\t\tsort_keys=True\n\t\t\t\t)\n\t\t\t)", "def save_to_json_file(my_obj, filename):\n with open(filename, mode=\"w\", encoding=\"utf-8\") as writer:\n json.dump(my_obj, writer)", "def save_to_json_file(my_obj, filename):\n with open(filename, 'w', encoding='utf-8') as file:\n return file.write(json.dumps(my_obj))", "def save(self):\n a_copy = FileStorage.__objects\n obj_dict = {obj: a_copy[obj].to_dict() for obj in a_copy.keys()}\n with open(FileStorage.__file_path, \"w\") as f:\n json.dump(obj_dict, f)", "def save_to_json_file(my_obj, filename):\n import json\n with open(filename, mode='w', encoding='utf-8') as f:\n json.dump(my_obj, f)", "def write(self, _filepath=None):\n _json_txt = json.dumps(self.json_dict, indent=2)\n self._write_json_text(_json_txt, _filepath)", "def save(self, config_file: typing.TextIO):\n json.dump(self.to_dict(), config_file, indent=4)", "def save(self) -> None:\n with open(dict_path, 'w', encoding='utf-8') as dictionary_file:\n json.dump(self.data, dictionary_file, indent=2, separators=(',', ':'), ensure_ascii=False)", "def save(self):\n with open(FileStorage.__file_path, 'w') as saves:\n copy_dict = {key: self.__objects[key].to_dict()\n for key in self.__objects}\n json.dump(copy_dict, saves)", "def save_to_json_file(my_obj, filename):\n with open(filename, 'w') as file:\n json.dump(my_obj, file)", "def save(self):\n\n with open(FileStorage.__file_path, \"w\") as file:\n dictionary = {}\n for a, b in FileStorage.__objects.items():\n dictionary[a] = b.to_dict()\n ink = json.dumps(dictionary)\n file.write(ink)", "def save(self, json_path):\n with open(json_path, 'w') as f:\n json.dump(self.__dict__, f, indent = 4)", "def save_to_json(filename, struct):\n with open(filename, 'w') as outfile:\n json.dump(struct, outfile, sort_keys=True, indent=4)", "def save(self, filepath):\n with open(filepath, 'w') as f:\n json.dump(self, f, indent=2)", "def save_to_json_file(my_obj, filename):\n import json\n with open(filename, 'w') as file:\n json.dump(my_obj, file)", "def save_to_json_file(my_obj, filename):\n with open(filename, 'w+') as json_file:\n json.dump(my_obj, json_file)", "def save(self, json_path):\n with open(json_path, 'w') as f:\n json.dump(self.__dict__, f, indent=4)", "def save(self, json_path):\n with open(json_path, 'w') as f:\n json.dump(self.__dict__, f, indent=4)", "def save(self, json_path):\n with open(json_path, 'w') as f:\n json.dump(self.__dict__, f, indent=4)", "def save(self, filename):\n content = self.to_dict()\n with open(filename, 'w') as f:\n json.dump(content, f)", "def _write_json(self):\n with open(self._file_path, 'w') as f:\n json.dump(self._content, f, indent=4, separators=None,\n encoding='utf-8', sort_keys=False)", "def save(self, path: str):\n with open(path, 'w', encoding='utf-8') as f:\n f.write(self.to_json())", "def saveToFile(self, filename: str):\n with open(filename, 'w') as file:\n serialized = self.serialize()\n file.write(json.dumps(serialized, indent=4))\n self.print('saving to ', filename, ' was successful')\n\n self.has_been_modified = False", "def save_to_json_file(my_obj, filename):\n with open(filename, \"w\") as myfile:\n return myfile.write(json.dumps(my_obj))", "def save(self, filename):\n with open(filename, \"w\") as f:\n m = {\n \"order\": self.order,\n \"pad\": self.pad,\n \"records\": {str(k): v for k, v in self.records.items()}\n }\n json.dump(m, f)", "def save_to_json_file(my_obj, filename):\n with open(filename, mode='w') as file:\n file.write(json.dumps(my_obj))", "def save_to_json_file(my_obj, filename):\n import json\n with open(filename, 'w', encoding='utf-8') as f:\n obj = json.dumps(my_obj)\n f.write(obj)", "def write(self, obj):\n\n with open(os.path.join(os.getcwd(), 'windsor.json'), 'w') as buf:\n buf.write(self.schema.dumps(obj, indent=4))", "def save_to_json_file(my_obj, filename):\n with open(filename, \"w\") as f:\n j = json.dumps(my_obj)\n f.write(j)\n f.close()", "def save(self):\n with open(self.__file_path, \"w\", encoding=\"UTF-8\") as file:\n parsed_dict = {\n key: value.to_dict()\n for key, value in self.__objects.items()\n }\n save_data(parsed_dict, file)", "def SaveToJSON(self):\n import json\n\n f = open(f\"/Cache/{self.symbol}.JSON\", \"w\")\n j = {\"name\": self.name, \"symbol\": self.symbol}\n\n f.write(\"{\\\"name\\\":\\\"\" + str(self.name) + \"\\\", \")\n f.write(json.dumps(j))\n f.close()\n\n print(\"Warning: SaveToJSON not fully implemented.\")", "def save_to_json_file(my_obj, filename):\n with open(filename, 'w') as json_file:\n written = json_file.write(json.dumps(my_obj))\n return written", "def save_json(self, file):\n with open(file, 'w', encoding='utf8') as f:\n json.dump(self, f, ensure_ascii=False)", "def save_json(self, file: Union[str, TextIO]) -> None:\n if hasattr(file, 'write'):\n file_ctx = nullcontext(file)\n else:\n file_ctx = open(file, 'w')\n\n with file_ctx as fp:\n for d in self:\n json.dump(d.dict(), fp)\n fp.write('\\n')", "def saveParamsJSON(self, saveAs):\n try:\n if saveAs or self.associatedFile == None:\n fc.setFileFilter(PlayerFilter())\n if returnVal == JFileChooser.APPROVE_OPTION and fc.getSelectedFile() != None:\n if toFile.__name__.contains(\".\"):\n self.associatedFile = File(toFile.getParentFile(), toFile.__name__.substring(0, toFile.__name__.lastIndexOf(\".\")) + \".player\")\n else:\n self.associatedFile = File(toFile.getParentFile(), toFile.__name__ + \".player\")\n self.associatedFileField.setText(self.associatedFile.getPath())\n else:\n return\n bw.write(self.params.__str__())\n bw.close()\n self.savedParams = self.params.__str__()\n self.syncJSONtoUI()\n except IOException as ie:\n ie.printStackTrace()", "def write(self):\r\n\r\n with open(self.filename + \".json\", mode='w') as json_file:\r\n json.dump(self.data, json_file, separators=(',', ':'))", "def write_json(self, obj, mode='wb', **kwargs):\n with self.open(mode) as f:\n return json.dump(obj, f, **kwargs)", "def save(self, file):\n\n import json\n\n if hasattr(file, 'write'):\n f = file\n else:\n f = open(file, 'w')\n output = json.dumps(self, default=lambda v: v.__dict__)\n f.write(output)\n f.close()", "def save(self):\n\n toStore = {\n key: obj.to_dict()\n for key, obj in FileStorage.__objects.items()\n }\n with open(FileStorage.__file_path, 'wt') as file:\n json.dump(toStore, file)", "def save(self, data, outpath):\n with open(path, \"wt\") as open_file:\n json.dump(data, open_file, indent=4)", "def Save(self, filename: str):\n data_object = {\n \"input_layer_count\" : self.input_layer_size,\n \"hidden_layer_count\" : self.hidden_layer_size,\n \"output_layer_count\" : self.output_layer_size,\n\n \"hidden_layer_biases\" : self.hidden_layer_biases.tolist(),\n \"output_layer_biases\" : self.output_layer_biases.tolist(),\n\n \"input_to_hidden_weights\" : self.input_to_hidden_weights.tolist(),\n \"hidden_to_output_weights\" : self.hidden_to_output_weights.tolist()\n }\n\n with open(filename, \"w\") as f:\n json.dump(data_object, f)", "def save(self):\n f = open(self.file.name, 'w')\n json.dump(self.data,\n f,\n indent=4)\n f.close()\n return True", "def save(self):\n with open(self._config, 'w') as f:\n json.dump(self.data, f, indent=2, sort_keys=True)", "def save_json(self, name, object_dict: Union[dict, list]):\r\n with open_(self._path_for_json(name), \"w+\") as f:\r\n json.dump(object_dict, f, indent=4)", "def save(self, filepath: str):\n with open(filepath, 'w') as fp:\n json.dump(\n self.dict(),\n fp,\n )", "def save_json(obj,path,indent=4,sort_keys=False,mkdirs=True): \n if mkdirs:\n ensure_dir(path)\n with open(path,'w') as file:\n json.dump(obj,file,indent=indent,sort_keys=sort_keys)", "def serialize(self):\n keys = [\n 'uid',\n 'commit_sha',\n 'timestamp',\n 'filename',\n 'comment',\n 'train_data',\n 'val_data',\n 'test_data',\n 'model_files',\n 'custom_data',\n ]\n data = {key: self.__dict__[key] for key in keys}\n with open(os.path.join(self.root_path, self._data_file), 'w') as file:\n json.dump(data, file)", "def write_to_json(self):\r\n logging.info('Writing records to JSON')\r\n with open(self.backup, 'w') as fp:\r\n json.dump(self.record, fp)\r\n logging.info(\"Finished writing records to JSON\")", "def to_json(self, path):\n with open(path, \"w\") as dump:\n json.dump(self.__dict__, dump)", "def save(self):\r\n with open(self.filename, 'w') as f:\r\n if self.pretty:\r\n json.dump(self.__config, f, sort_keys=False,\r\n indent=4, separators=(',', ': '))\r\n else:\r\n json.dump(self.__config, f)", "def save_model(self, model_file):\n m = {'b':self.b,\n 'w':self.w.tolist()}\n\n with open(model_file, 'w') as f:\n json.dump(m, f)", "def save(self, file):\n with open(file, \"w+\") as f:\n json.dump(self.dict(), f)", "def save_file(obj, file_path):\n with gzip.open(file_path, \"wb\") as fp:\n fp.write(json.dumps(obj).encode('utf-8'))", "def save(self, path):\n if not is_dry():\n with open(path, 'w') as f:\n json.dump(self.to_dict(), f, indent=4)\n return path", "def to_json(obj: ConfiguredBaseModel, file: str):\n if file:\n with open(file, \"w\") as f:\n f.write(obj.json(indent=4))\n console.print(f\"\\nOutput written to {file}\\n\")\n else:\n print_json(obj.json(indent=4))", "def save_data(self):\n try:\n with open('blockchain-{}.txt'.format(self.node_id), mode='w') as f:\n # save the block object like a dictionary\n saveable_chain = [block.__dict__ for block in \n [Block(block_el.index, block_el.previous_hash, [tx.__dict__ for tx in block_el.transactions] , block_el.proof, block_el.timestamp) \n for block_el in self.__blockchain]]\n f.write(json.dumps(saveable_chain))\n f.write('\\n')\n saveable_transactions = [tx.__dict__ for tx in self.__open_transactions]\n f.write(json.dumps(saveable_transactions))\n # save the connected nodes\n f.write('\\n')\n f.write(json.dumps(list(self.__peer_nodes))) \n except IOError:\n print('Saving failed')", "def save_data(self):\n try:\n with open('blockchain-{}.txt'.format(self.node_id), mode='w') as f:\n saveable_chain = [block.__dict__ for block in [Block(block_el.index, block_el.previous_hash, \n [tx.__dict__ for tx in block_el.transactions], \n [tx.__dict__ for tx in block_el.chipsactions],\n [tx.__dict__ for tx in block_el.messsactions],\n block_el.proof, block_el.timestamp) for block_el in self.__chain]]\n f.write(json.dumps(saveable_chain))\n f.write('\\n')\n saveable_tx = [tx.__dict__ for tx in self.__open_transactions]\n f.write(json.dumps(saveable_tx))\n f.write('\\n')\n saveable_chip = [tx.__dict__ for tx in self.__open_chipsactions]\n f.write(json.dumps(saveable_chip))\n f.write('\\n')\n saveable_chip = [tx.__dict__ for tx in self.__open_messsactions]\n f.write(json.dumps(saveable_chip))\n f.write('\\n')\n f.write(json.dumps(list(self.__peer_nodes)))\n except IOError:\n print('Saving failed!')", "def save(self):\n if not self.fileKey:\n log.error(\"attempted to save a closed wallet\")\n return\n encrypted = self.fileKey.encrypt(tinyjson.dump(self).encode()).hex()\n w = tinyjson.dump({\n \"keyparams\": self.fileKey.params(),\n \"wallet\": encrypted,\n })\n helpers.saveFile(self.path, w)", "def render_saved_game_proto(saved_game_proto, output_dir, prefix='', json_only=False):\n saved_game = proto_to_dict(saved_game_proto)\n if json_only:\n os.makedirs(os.path.join(output_dir, 'json'), exist_ok=True)\n output_path = os.path.join(output_dir, 'json', prefix + '_' + saved_game['id'] + '.json')\n with open(output_path, 'w') as file:\n file.write(json.dumps(saved_game))\n print('Saved JSON for {}'.format(saved_game['id']))\n else:\n render_saved_game(saved_game, output_dir, prefix)", "def to_json_file(self, path):\n with open(path, 'w') as f:\n f.write(self.to_json())", "def write_complex_json(filepath, obj):\n\n with open(filepath, 'w', encoding='utf-8') as file_obj:\n json.dump(obj, file_obj, cls=ExtendedEncoder, ensure_ascii=False, indent=2)", "def write(self):\n self.json_o.write()", "def save_json(dict_obj, path, name):\n if 'txt' not in name:\n name += '.json'\n with open(os.path.join(path, name), 'w') as json_file:\n json.dump(dict_obj, json_file)", "def save(self, path_or_file, strict=True, fmt='auto'):\n\n self.validate(strict=strict)\n\n with _open(path_or_file, mode='w', fmt=fmt) as fdesc:\n json.dump(self.__json__, fdesc, indent=2)", "def write_json(obj_to_write: Any, filename: str):\n \n with open(filename, 'w') as json_file:\n json.dump(obj_to_write, json_file, indent=4)", "def save_json_file(save_path, save_data):\n with open(save_path, 'w') as fp:\n json.dump(save_data, fp, sort_keys = True, indent = 4)", "def write_json(obj, fpath):\n mkdir_if_missing(osp.dirname(fpath))\n with open(fpath, 'w', encoding='utf-8') as f:\n json.dump(obj, f, indent=4, separators=(',', ': '), ensure_ascii=False) # 添加中文支持", "def write_savefile(state: PhysicsState, file: Path):\n if file.suffix.lower() != '.json':\n # Ensure a .json suffix.\n file = file.parent / (file.name + '.json')\n log.info(f'Saving to savefile {file.resolve()}')\n\n savefile_json_dict = google.protobuf.json_format.MessageToDict(\n state.as_proto(),\n including_default_value_fields=False,\n preserving_proto_field_name=True,\n use_integers_for_enums=False,\n )\n\n for i, component in enumerate(savefile_json_dict['engineering']['components']):\n component['name'] = strings.COMPONENT_NAMES[i]\n\n with open(file, 'w') as outfile:\n json.dump(savefile_json_dict, outfile, indent=2)\n\n return file", "def save_(self):\n if not self._edited:\n return\n data = {'history': self.dump()}\n with open(os.path.join(os.path.dirname(self.arch_handler.dicomdir_path), self.SAVE_NAME), \"w\") as outfile:\n json.dump(data, outfile)\n self._edited = False", "def saveData(self):\n file_location = self.json_File_Location.replace(\".json\", \"_Update.json\")\n json_file = open(file_location, \"w+\")\n json_file.write(json.dumps(self.data, indent=4, separators=(', ', ' : ')))\n json_file.close()", "def serialize_file(cls, obj, file_path='./data.json'):\n file_stream = open(file_path, 'wb')\n json.dump(obj, file_stream, cls=CustomTypeEncoder)\n file_stream.flush()\n file_stream.close()", "def to_file(self, file_name: str) -> None:\n\n with open(file_name, 'w') as fi:\n json.dump(self.to_dict(), fi, indent=1)", "def to_proto(self):\n filename_tensor = array_ops.placeholder(\n shape=[], dtype=dtypes.string, name=\"saver_filename\")\n save_tensor = self._traced_save(filename_tensor)\n restore_op = self._traced_restore(filename_tensor).op\n return saver_pb2.SaverDef(\n filename_tensor_name=filename_tensor.name,\n save_tensor_name=save_tensor.name,\n restore_op_name=restore_op.name,\n version=saver_pb2.SaverDef.V2)", "def save(self):\n if not self._fully_loaded:\n self._load()\n with open(self._path, \"w+\", encoding=self._encoding) as h:\n h.write(\"<!-- | \")\n h.write(json.dumps(self._pack()))\n h.write(\" | -->\")\n h.write(NEWLINE)\n h.write(\"<!-- [[FRONT]] -->\")\n h.write(NEWLINE)\n h.write(self._front)\n h.write(NEWLINE)\n h.write(\"<!-- [[BACK]] -->\")\n h.write(NEWLINE)\n h.write(self._back)\n h.write(NEWLINE)", "def write(self, _filepath=None):\n if _filepath is None:\n _filepath = self.filepath\n _json_txt = json.dumps(self.json_dict, indent=2).splitlines()\n # json.dumps() puts a space bwetween :{ rF2 doesn't\n # So strip it out to make it easier to compare before and after\n _whitespace_removed = []\n for _line in _json_txt:\n _line = _line.replace(': {', ':{', 1)\n\n # For some reason rF2 escapes / in values\n _colon = _line.find(':')\n if _colon:\n _line = _line[:_colon] + _line[_colon:].replace('/', r'\\/')\n _whitespace_removed.append(_line)\n _json_txt = '\\n'.join(_whitespace_removed)\n\n super()._write_json_text(_json_txt, _filepath)", "def to_json(self, fpath):\n import json\n with open(fpath, 'w') as fp:\n json.dump(self.to_dict(), fp)", "def save_to_file(self, filename: str) -> None:\n channels = {\n 'channels': {\n channel: [self._command_entry_to_json(command) for command in commands]\n for (channel, commands) in self.history.items()\n }\n }\n\n # if we can't save it, exit early\n try:\n channel_json = json.dumps(channels)\n with open(filename, 'w') as f:\n f.write(channel_json)\n except:\n return None\n\n self.needs_save = False", "def to_json(self):\n # store the BOM itself on disc (it can get to large to transport as a string)\n entries = BOM.to_json(self)\n instance_id = uuid.uuid4().hex\n target = os.path.join(CADDOK.TMPDIR, u\"bom_%s.json\" % instance_id)\n with open(target, \"w\") as f:\n json.dump(entries, f, cls=ReplaceDatetimeEncoder)\n\n # only send the metadata of the bom\n result = {\n 'context': self._context.to_json(),\n 'use_kernel_operations': self._use_kernel_operations,\n 'id': self.id,\n 'instance_id': instance_id,\n 'assemblyIsTemporary': self.assemblyIsTemporary,\n 'assemblyIsModified': self._assemblyIsModified,\n 'user_attributes': filter_dict_serializable(self._user_attributes),\n 'is_selected': self.is_selected,\n }\n return result", "def dump_json(object, filename):\n import json\n\n filename = filename if filename.endswith('.json') else (filename + '.json')\n\n with open(filename, 'w') as f:\n json.dump(object, f, indent=4)", "def save(self, data):\n try:\n with open(self.__filepath, 'w') as file:\n text = jsonpickle.encode(data)\n file.write(text)\n except IOError as e:\n print(e)", "def write(self, obj):\n try:\n self.obj.write(json.dumps(obj))\n setattr(self, \"write\", self.delimited_write)\n except:\n self.bad_obj(obj)", "def _save(self, filename=None):\n if filename is None: # pragma: no cover\n filename = self.filename\n with open(filename, \"w\", encoding=\"utf8\") as notifications_file:\n json.dump([self.notifications, self.users], notifications_file)", "def save_class(self):\n with open(self.savefile, \"w\") as f:\n data = {\"name\": self.name, \"host\": self.host, \"port\": self.port}\n json.dump(data, f)", "def save_data(self):\n try:\n with open(\"blockchain.txt\", mode=\"w\") as f:\n dict_chain = []\n for block in self.__chain:\n temp = Block(\n block.index,\n block.previous_hash,\n [tx.__dict__ for tx in block.transfers],\n block.proof,\n block.timestamp,\n )\n dict_chain.append(temp.__dict__)\n f.write(json.dumps(dict_chain))\n f.write(\"\\n\")\n dict_open_transfers = [tx.__dict__ for tx in self.__open_transfers]\n f.write(json.dumps(dict_open_transfers))\n f.write(\"\\n\")\n f.write(json.dumps(list(self.__peer_nodes)))\n except IOError:\n print(\"Saving Data failed!\")", "def save_data_file(self):\n with open(self.files['data'], 'w') as outfile:\n outfile.write(self.to_json())\n outfile.close()", "def _stringify_proto(obj):\n return obj.SerializeToString()", "def save(self):\r\n if not self.filename:\r\n raise IOError(errors['NoConfigFileYet'])\r\n self.onSave()\r\n stuff = dict()\r\n for thing in ['aliases', 'triggers']:\r\n stuff[thing] = [] # Populate with (args, kwargs) pairs.\r\n if self.config.get('saving', thing):\r\n for c, o in getattr(self, thing).iteritems():\r\n stuff[thing].append(o.serialise())\r\n stuff['variables'] = dict()\r\n if self.config.get('saving', 'variables'):\r\n for v in self.variables:\r\n if hasattr(self, v):\r\n var = getattr(self, v)\r\n if type(var) in self.basicTypes:\r\n stuff['variables'][v] = var\r\n stuff['config'] = self.config.get_dump()\r\n with open(self.filename, 'w') as f:\r\n json.dump(stuff, f, indent = 1, sort_keys = True) # Finally write the completed dictionary.\r", "def output_json(file_name, domain_object, output_dir):\n\n with open(path.join(output_dir, file_name + '.json'), 'w', encoding='utf-8', errors='replace') as out_file:\n json_object = jsonpickle.encode(domain_object)\n out_file.write(json_object)\n out_file.write('\\n')", "def save(self, filepath):\n writer = json.dump if Config.isjson(filepath) else yaml.dump\n with open(filepath, 'w') as f:\n writer(dict(self), f)", "def SaveJSON(self, filename):\n data = {\n 'files': self._files,\n 'ebuilds': self._ebuilds,\n }\n json.dump(data, open(filename, 'w'))", "def writeToJson(inputObj,fileLoc):\n myFile = open(fileLoc,'w')\n json.dump(inputObj, myFile, sort_keys=True, indent=4, separators=(',', ': '))", "def saveFile(self, filename=\"UQModelTest.json\"):\n sd = self.saveDict()\n with open(filename, \"w\") as f:\n json.dump(sd, f, indent=2)", "def _object2proto(self) -> Metadata_PB:\n return Metadata_PB(\n name=self.name, id=serialize(self.id), node=serialize(self.node)\n )", "def save(self, settings=None):\n json_string = json.dumps(self.variables)\n with open(self.filepath, 'w', encoding='utf-8') as fh:\n fh.write(json_string)", "def to_json(self, path_to_json: str):\n with open(path_to_json, \"w\") as _f:\n json.dump(self.data, _f)" ]
[ "0.64825857", "0.63305324", "0.62367463", "0.61235774", "0.61045235", "0.6103396", "0.60486585", "0.59887636", "0.5981807", "0.5977813", "0.59710145", "0.5964725", "0.5957643", "0.5951203", "0.5950735", "0.59460485", "0.59422106", "0.5939334", "0.592283", "0.59114516", "0.5911261", "0.59078157", "0.59078157", "0.59078157", "0.589008", "0.5878089", "0.5871514", "0.5866423", "0.5866291", "0.5859703", "0.5857534", "0.5851814", "0.5809771", "0.5799871", "0.57873416", "0.57707566", "0.5752018", "0.5718812", "0.57159877", "0.5712392", "0.5704109", "0.5700596", "0.56963766", "0.56904817", "0.56882304", "0.568491", "0.56786454", "0.5670266", "0.5669114", "0.5664973", "0.56621027", "0.5659653", "0.5646343", "0.5639277", "0.5634384", "0.5633736", "0.563332", "0.56276804", "0.5626478", "0.5620498", "0.56170225", "0.5611967", "0.56093466", "0.5602366", "0.55988353", "0.55968493", "0.55922043", "0.5560084", "0.55552197", "0.5546275", "0.55321413", "0.55316967", "0.55289793", "0.5520412", "0.55159277", "0.5514437", "0.55064774", "0.55007905", "0.54994226", "0.54923344", "0.54915124", "0.54784226", "0.54783326", "0.5475043", "0.54712546", "0.54600805", "0.5448859", "0.54479647", "0.5445335", "0.54334223", "0.54299676", "0.54294175", "0.54215264", "0.5407296", "0.5398718", "0.5396997", "0.53884476", "0.5382073", "0.5380632", "0.5380309" ]
0.72667795
0
Open ontology objects, first attempt to open V2 before trying V1.
def open_ontology_pbobject(ontology_file): try: ontology = parse_pbobject(ontology_file, OntologyV2Pb2) if ontology is not None: logging.info('Successfully loaded Ontology V2 spec.') return ontology except Exception: logging.error('Failed to load ontology file with V2 spec, trying V1 spec.') try: ontology = parse_pbobject(ontology_file, OntologyV1Pb2) if ontology is not None: logging.info('Successfully loaded Ontology V1 spec.') return ontology except Exception: if isinstance(ontology_file, str): logging.error('Failed to load ontology file' + ontology_file + 'with V1 spec also, returning None.') else: logging.error('Failed to load ontology file with V1 spec also, returning None.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open(self):\n\n self._key_generator = KeyGenerator()\n\n # A map from LOD to LODHistory instance for all LODs that have\n # been referenced so far:\n self._lod_histories = {}\n\n # This corresponds to the 'nodes' table in a Subversion fs. (We\n # don't need a 'representations' or 'strings' table because we\n # only track file existence, not file contents.)\n self._node_db = _NodeDatabase()\n\n # Start at revision 0 without a root node.\n self._youngest = 0", "def open_feature_ontology_pbobject(ontology_file):\n try:\n ontology = open_pbobject(ontology_file, FeatureOntologyPb2)\n if ontology is not None:\n logging.info('Successfully loaded FeatureOntology spec.')\n return ontology\n except Exception:\n logging.error('Failed to load ontology file' + ontology_file + '.')", "def test1_loading(self):\n\t\tprint \"\\nTEST 1: Loading ontologies from %s folder.\\n=================\" % DATA_FOLDER\n\t\t\n\t\tfor f in os.listdir(DATA_FOLDER):\n\t\t\tif not f.startswith('.'):\n\t\t\t\tprint \"Loading... >\", f\t\t\n\t\t\t\t\n\t\t\t\to = ontospy.Ontology(DATA_FOLDER + f)\n\t\t\t\t\n\t\t\t\tself.assertEqual(type(o), ontospy.Ontology)\n\t\t\t\tprint \"Success.\"", "def Open(self, file_object):", "def open(self) -> None:", "def open(self) -> None:", "def open(self) -> None:", "def os_open_graph( self, ):\r\n pass", "def get_ontology(base_iri='emmo-inferred.owl', verbose=False, name=None):\n\n if (not base_iri.endswith('/')) and (not base_iri.endswith('#')):\n base_iri = '%s#'%base_iri\n if base_iri in default_world.ontologies:\n onto = default_world.ontologies[base_iri]\n else:\n onto = MyOntology(default_world, base_iri, name=name)\n onto._verbose = verbose\n return onto", "def open(self, version, force=False, representation=None,\n reference_depth=0, skip_update_check=False):\n version_full_path = os.path.normpath(version.absolute_full_path)\n\n # delete all the comps and open new one\n #comps = self.fusion.GetCompList().values()\n #for comp_ in comps:\n # comp_.Close()\n\n self.fusion.LoadComp(version_full_path.encode())\n\n rfm = RecentFileManager()\n rfm.add(self.name, version.absolute_full_path)\n\n # set the project_directory\n #self.project_directory = os.path.dirname(version.absolute_path)\n\n # TODO: file paths in different OS'es should be replaced with the current one\n # Check if the file paths are starting with a string matching one of\n # the OS'es project_directory path and replace them with a relative one\n # matching the current OS\n\n # replace paths\n #self.replace_external_paths()\n\n # return True to specify everything was ok and an empty list\n # for the versions those needs to be updated\n return empty_reference_resolution()", "def __init__(self, obo_file=OBO_FILE, optional_attrs=None):\n self.optobj = self._init_optional_attrs(optional_attrs) # OboOptionalAttrs or None\n self.format_version = None # e.g., \"1.2\" of \"format-version:\" line\n self.data_version = None # e.g., \"releases/2016-07-07\" from \"data-version:\" line\n self.typedefs = {}\n\n # True if obo file exists or if a link to an obo file exists.\n print(\"obo_file:\")\n print(obo_file)\n if os.path.isfile(obo_file):\n self.obo_file = obo_file\n # GOTerm attributes that are necessary for any operations:\n else:\n raise Exception(\"COULD NOT READ({OBO})\\n\"\n \"download obo file first\\n \"\n \"[http://geneontology.org/ontology/\"\n \"go-basic.obo]\".format(OBO=obo_file))", "def addOntologyToObject(self, obj):\n i = -1\n for item in obj.ontologyItems.items:\n i = i + 1\n ana = vsdModels.ObjectOntology(\n type=vsdModels.OntologyItem(**item).type,\n position=i,\n ontologyItem=vsdModels.APIBase(selfUrl=vsdModels.OntologyItem(**item).selfUrl),\n object=vsdModels.APIBase(selfUrl=obj.selfUrl)\n )\n print(ana.to_struct())\n self.postRequest(\n 'object-ontologies/{0}'.format(\n vsdModels.OntologyItem(**item).type\n ),\n data=ana.to_struct())", "def open(self):\n raise NotImplementedError", "def open(self):\n raise NotImplementedError", "def open(self) -> None:\n\n raise NotImplementedError", "def open( self ):\n pass", "def on_ontology_parse(self, ctx):\n return None", "def importAovs(self):\n\t\tLayersInfo = pickle.load( open( self.aovsPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tfor ao in LayersInfo.keys():\n\t\t\taov.create( ao, LayersInfo[ao]['name'], LayersInfo[ao]['type'], LayersInfo[ao]['enabled'] )\n\t\tmc.refresh( su = 0 )", "def load_gene_ontology(self, file_path):\n\t\tpass", "def test_import_wc2(self):\r\n tree = self.wc2_tree\r\n root = tree.getroot()\r\n assert importer.put_objects(root) == True", "def open_input_files(self):\n self.dictionaryFile = open(self.dictionaryFile, 'r', encoding=self.encoding)\n\n if self.annotationFile :\n self.annotationFile = open(self.annotationFile, 'r', encoding=self.encoding)\n elif self.annotationFile is None:\n try:\n self.annotationFile = open(os.path.join(self.dictionaryPath, self.dictionaryName + '.ann'), 'r', encoding=self.encoding)\n except FileNotFoundError:\n if self.verbose >= 2:\n sys.stdout.write (\"Warning: annotation file is not found.\\n\")\n\n if self.abbreviationsFile :\n self.abbreviationsFile = open(self.abbreviationsFile, 'r', encoding=self.encoding)\n elif self.abbreviationsFile is None:\n try:\n self.abbreviationsFile = open(os.path.join(self.dictionaryPath, self.dictionaryName + '_abrv.dsl'), 'r', encoding=self.encoding)\n except FileNotFoundError:\n if self.verbose >= 2:\n sys.stdout.write (\"Warning: abbreviations file is not found.\\n\")", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self) -> None:\n pass", "def open(self):\n self.solenoid.set(self.OPEN)", "def open_workbooks(self):\n try:\n self.wb_alm = load_workbook(self.fn_alm)\n self.wb_defect = load_workbook(self.fn_defect)\n self.wb_enhancement = load_workbook(self.fn_enhancement)\n self.wb_incident = load_workbook(self.fn_incident)\n self.wb_destination = load_workbook(self.fn_destination)\n\n self.wb_alm.iso_dates = True\n self.wb_defect.iso_dates = True\n self.wb_enhancement.iso_dates = True\n self.wb_incident.iso_dates = True\n self.wb_destination.iso_dates = True\n except Exception as e:\n self.error(str(e))\n return False\n\n return True", "def open(self):", "def open(self):\n raise NotImplementedError( 'Needs implementation' )", "def _load_objects(self):\n self._get_package()\n\n object_names = [name for name in dir(self._sdk) if name != \"GATDLSession\" and name != \"SDKInfo\" and name.startswith(\"GA\") and not name.endswith(\"Fetcher\")]\n\n for object_name in object_names:\n obj = getattr(self._sdk, object_name)\n self._objects_mapping[obj.rest_name] = object_name", "def open(self) -> None:\n raise NotImplementedError()", "def __open(self, file):\n pkg = pptx.packaging.Package().open(file)\n self.__load(pkg.relationships)\n # unmarshal relationships selectively for now\n for rel in self.__relationships:\n if rel._reltype == RT_OFFICEDOCUMENT:\n self.__presentation = rel._target", "def get_oas(cfg, version='3.0'):\n\n if version == '3.0':\n return get_oas_30(cfg)\n else:\n raise RuntimeError('OpenAPI version not supported')", "def test_open():\n z = XPIManager(get_path('xpi/install_rdf_only.xpi'))\n assert z is not None", "def connectOntology(ontology, endpoint=None):\r\n world = World()\r\n world.get_ontology(ontology).load()\r\n graph = world.as_rdflib_graph()\r\n if graph:\r\n return graph\r\n else:\r\n print(\"connection failed\")\r\n return", "def sync_from_ontology(self):\n if not self.ontology:\n raise ValueError(\"No associated ontology.\")\n\n self.annotations = self.ontology.get_annotations(self)\n self.labels = self.ontology.get_labels(self)\n self.comments = self.ontology.get_comments(self)\n self.definitions = self.ontology.get_definitions(self)\n self.triples = self.ontology.get_triples(self)\n self.parents = self._get_parents()\n self.children = self._get_children()", "def open(self):\r\n pass", "def open(self):\r\n pass", "def getOntologyItem(self, resource, oType=0):\n\n if isinstance(resource, int):\n resource = 'ontology/{0}/{1}'.format(resource, oType)\n\n res = self.getRequest(resource)\n onto = vsdModels.Ontology(**res)\n\n return onto", "def export_representations(self):\n\n dbpath, config = self._start()\n self.logger.msg1(\"Loading ontology\")\n obo_path = check_file(config.obo, dbpath, \"obo\")\n self.obo = MinimalObo(obo_path, True)\n self._export_reference_representations()\n self._export_model_representations(config)\n self._end()", "def load_obo_file(self, obo_file, optional_attrs, load_obsolete, prt):\n reader = OBOReader(obo_file, optional_attrs)\n\n # Save alt_ids and their corresponding main GO ID. Add to GODag after populating GO Terms\n alt2rec = {}\n i = 0\n for rec in reader:\n # Save record if:\n # 1) Argument load_obsolete is True OR\n # 2) Argument load_obsolete is False and the GO term is \"live\" (not obsolete)\n if load_obsolete or not rec.is_obsolete:\n self[rec.id] = rec\n for alt in rec.alt_ids:\n alt2rec[alt] = rec\n\n # Save the typedefs and parsed optional_attrs\n # self.optobj = reader.optobj\n self.typedefs = reader.typedefs\n self._populate_terms(reader.optobj)\n self._set_level_depth(reader.optobj)\n\n # Add alt_ids to go2obj\n for goid_alt, rec in alt2rec.items():\n self[goid_alt] = rec\n desc = self._str_desc(reader)\n if prt is not None:\n prt.write(\"{DESC}\\n\".format(DESC=desc))\n return desc", "def get_oas_30(cfg):\n\n paths = {}\n\n # TODO: make openapi multilingual (default language only for now)\n server_locales = l10n.get_locales(cfg)\n locale_ = server_locales[0]\n\n api_rules = get_api_rules(cfg)\n\n osl = get_ogc_schemas_location(cfg['server'])\n OPENAPI_YAML['oapif-1'] = os.path.join(osl, 'ogcapi/features/part1/1.0/openapi/ogcapi-features-1.yaml') # noqa\n OPENAPI_YAML['oapif-2'] = os.path.join(osl, 'ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml') # noqa\n\n LOGGER.debug('setting up server info')\n oas = {\n 'openapi': '3.0.2',\n 'tags': []\n }\n info = {\n 'title': l10n.translate(cfg['metadata']['identification']['title'], locale_), # noqa\n 'description': l10n.translate(cfg['metadata']['identification']['description'], locale_), # noqa\n 'x-keywords': l10n.translate(cfg['metadata']['identification']['keywords'], locale_), # noqa\n 'termsOfService':\n cfg['metadata']['identification']['terms_of_service'],\n 'contact': {\n 'name': cfg['metadata']['provider']['name'],\n 'url': cfg['metadata']['provider']['url'],\n 'email': cfg['metadata']['contact']['email']\n },\n 'license': {\n 'name': cfg['metadata']['license']['name'],\n 'url': cfg['metadata']['license']['url']\n },\n 'version': api_rules.api_version\n }\n oas['info'] = info\n\n oas['servers'] = [{\n 'url': get_base_url(cfg),\n 'description': l10n.translate(cfg['metadata']['identification']['description'], locale_) # noqa\n }]\n\n paths['/'] = {\n 'get': {\n 'summary': 'Landing page',\n 'description': 'Landing page',\n 'tags': ['server'],\n 'operationId': 'getLandingPage',\n 'parameters': [\n {'$ref': '#/components/parameters/f'},\n {'$ref': '#/components/parameters/lang'}\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/LandingPage\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n paths['/openapi'] = {\n 'get': {\n 'summary': 'This document',\n 'description': 'This document',\n 'tags': ['server'],\n 'operationId': 'getOpenapi',\n 'parameters': [\n {'$ref': '#/components/parameters/f'},\n {'$ref': '#/components/parameters/lang'},\n {\n 'name': 'ui',\n 'in': 'query',\n 'description': 'UI to render the OpenAPI document',\n 'required': False,\n 'schema': {\n 'type': 'string',\n 'enum': ['swagger', 'redoc'],\n 'default': 'swagger'\n },\n 'style': 'form',\n 'explode': False\n },\n ],\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'}\n }\n }\n }\n\n paths['/conformance'] = {\n 'get': {\n 'summary': 'API conformance definition',\n 'description': 'API conformance definition',\n 'tags': ['server'],\n 'operationId': 'getConformanceDeclaration',\n 'parameters': [\n {'$ref': '#/components/parameters/f'},\n {'$ref': '#/components/parameters/lang'}\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/LandingPage\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n paths['/collections'] = {\n 'get': {\n 'summary': 'Collections',\n 'description': 'Collections',\n 'tags': ['server'],\n 'operationId': 'getCollections',\n 'parameters': [\n {'$ref': '#/components/parameters/f'},\n {'$ref': '#/components/parameters/lang'}\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/LandingPage\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n oas['tags'].append({\n 'name': 'server',\n 'description': l10n.translate(cfg['metadata']['identification']['description'], locale_), # noqa\n 'externalDocs': {\n 'description': 'information',\n 'url': cfg['metadata']['identification']['url']}\n }\n )\n oas['tags'].append({\n 'name': 'stac',\n 'description': 'SpatioTemporal Asset Catalog'\n }\n )\n\n oas['components'] = {\n 'responses': {\n '200': {\n 'description': 'successful operation'\n },\n '204': {\n 'description': 'no content'\n },\n 'default': {\n 'description': 'Unexpected error',\n 'content': gen_media_type_object('application/json', 'oapip', 'schemas/exception.yaml') # noqa\n },\n 'Queryables': {\n 'description': 'successful queryables operation',\n 'content': {\n 'application/json': {\n 'schema': {'$ref': '#/components/schemas/queryables'}\n }\n }\n }\n },\n 'parameters': {\n 'f': {\n 'name': 'f',\n 'in': 'query',\n 'description': 'The optional f parameter indicates the output format which the server shall provide as part of the response document. The default format is GeoJSON.', # noqa\n 'required': False,\n 'schema': {\n 'type': 'string',\n 'enum': ['json', 'html', 'jsonld'],\n 'default': 'json'\n },\n 'style': 'form',\n 'explode': False\n },\n 'lang': {\n 'name': 'lang',\n 'in': 'query',\n 'description': 'The optional lang parameter instructs the server return a response in a certain language, if supported. If the language is not among the available values, the Accept-Language header language will be used if it is supported. If the header is missing, the default server language is used. Note that providers may only support a single language (or often no language at all), that can be different from the server language. Language strings can be written in a complex (e.g. \"fr-CA,fr;q=0.9,en-US;q=0.8,en;q=0.7\"), simple (e.g. \"de\") or locale-like (e.g. \"de-CH\" or \"fr_BE\") fashion.', # noqa\n 'required': False,\n 'schema': {\n 'type': 'string',\n 'enum': [l10n.locale2str(sl) for sl in server_locales],\n 'default': l10n.locale2str(locale_)\n }\n },\n 'properties': {\n 'name': 'properties',\n 'in': 'query',\n 'description': 'The properties that should be included for each feature. The parameter value is a comma-separated list of property names.', # noqa\n 'required': False,\n 'style': 'form',\n 'explode': False,\n 'schema': {\n 'type': 'array',\n 'items': {\n 'type': 'string'\n }\n }\n },\n 'skipGeometry': {\n 'name': 'skipGeometry',\n 'in': 'query',\n 'description': 'This option can be used to skip response geometries for each feature.', # noqa\n 'required': False,\n 'style': 'form',\n 'explode': False,\n 'schema': {\n 'type': 'boolean',\n 'default': False\n }\n },\n 'crs': {\n 'name': 'crs',\n 'in': 'query',\n 'description': 'Indicates the coordinate reference system for the results.', # noqa\n 'style': 'form',\n 'required': False,\n 'explode': False,\n 'schema': {\n 'format': 'uri',\n 'type': 'string'\n }\n },\n 'bbox': {\n 'name': 'bbox',\n 'in': 'query',\n 'description': 'Only features that have a geometry that intersects the bounding box are selected.' # noqa\n 'The bounding box is provided as four or six numbers, depending on whether the ' # noqa\n 'coordinate reference system includes a vertical axis (height or depth).', # noqa\n 'required': False,\n 'style': 'form',\n 'explode': False,\n 'schema': {\n 'type': 'array',\n 'minItems': 4,\n 'maxItems': 6,\n 'items': {\n 'type': 'number'\n }\n }\n },\n 'bbox-crs': {\n 'name': 'bbox-crs',\n 'in': 'query',\n 'description': 'Indicates the coordinate reference system for the given bbox coordinates.', # noqa\n 'style': 'form',\n 'required': False,\n 'explode': False,\n 'schema': {\n 'format': 'uri',\n 'type': 'string'\n }\n },\n # FIXME: This is not compatible with the bbox-crs definition in\n # OGCAPI Features Part 2!\n # We need to change the mapscript provider and\n # get_collection_map() method in the API!\n # So this is for de map-provider only.\n 'bbox-crs-epsg': {\n 'name': 'bbox-crs',\n 'in': 'query',\n 'description': 'Indicates the EPSG for the given bbox coordinates.', # noqa\n 'required': False,\n 'style': 'form',\n 'explode': False,\n 'schema': {\n 'type': 'integer',\n 'default': 4326\n }\n },\n 'offset': {\n 'name': 'offset',\n 'in': 'query',\n 'description': 'The optional offset parameter indicates the index within the result set from which the server shall begin presenting results in the response document. The first element has an index of 0 (default).', # noqa\n 'required': False,\n 'schema': {\n 'type': 'integer',\n 'minimum': 0,\n 'default': 0\n },\n 'style': 'form',\n 'explode': False\n },\n 'vendorSpecificParameters': {\n 'name': 'vendorSpecificParameters',\n 'in': 'query',\n 'description': 'Additional \"free-form\" parameters that are not explicitly defined', # noqa\n 'schema': {\n 'type': 'object',\n 'additionalProperties': True\n },\n 'style': 'form'\n }\n },\n 'schemas': {\n # TODO: change this schema once OGC will definitively publish it\n 'queryable': {\n 'type': 'object',\n 'required': [\n 'queryable',\n 'type'\n ],\n 'properties': {\n 'queryable': {\n 'description': 'the token that may be used in a CQL predicate', # noqa\n 'type': 'string'\n },\n 'title': {\n 'description': 'a human readable title for the queryable', # noqa\n 'type': 'string'\n },\n 'description': {\n 'description': 'a human-readable narrative describing the queryable', # noqa\n 'type': 'string'\n },\n 'language': {\n 'description': 'the language used for the title and description', # noqa\n 'type': 'string',\n 'default': [\n 'en'\n ]\n },\n 'type': {\n 'description': 'the data type of the queryable', # noqa\n 'type': 'string'\n },\n 'type-ref': {\n 'description': 'a reference to the formal definition of the type', # noqa\n 'type': 'string',\n 'format': 'url'\n }\n }\n },\n 'queryables': {\n 'type': 'object',\n 'required': [\n 'queryables'\n ],\n 'properties': {\n 'queryables': {\n 'type': 'array',\n 'items': {'$ref': '#/components/schemas/queryable'}\n }\n }\n }\n }\n }\n\n items_f = deepcopy(oas['components']['parameters']['f'])\n items_f['schema']['enum'].append('csv')\n items_l = deepcopy(oas['components']['parameters']['lang'])\n\n LOGGER.debug('setting up datasets')\n collections = filter_dict_by_key_value(cfg['resources'],\n 'type', 'collection')\n\n for k, v in collections.items():\n if v.get('visibility', 'default') == 'hidden':\n LOGGER.debug(f'Skipping hidden layer: {k}')\n continue\n name = l10n.translate(k, locale_)\n title = l10n.translate(v['title'], locale_)\n desc = l10n.translate(v['description'], locale_)\n collection_name_path = f'/collections/{k}'\n tag = {\n 'name': name,\n 'description': desc,\n 'externalDocs': {}\n }\n for link in l10n.translate(v.get('links', []), locale_):\n if link['type'] == 'information':\n tag['externalDocs']['description'] = link['type']\n tag['externalDocs']['url'] = link['url']\n break\n if len(tag['externalDocs']) == 0:\n del tag['externalDocs']\n\n oas['tags'].append(tag)\n\n paths[collection_name_path] = {\n 'get': {\n 'summary': f'Get {title} metadata',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'describe{name.capitalize()}Collection',\n 'parameters': [\n {'$ref': '#/components/parameters/f'},\n {'$ref': '#/components/parameters/lang'}\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/Collection\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n LOGGER.debug('setting up collection endpoints')\n try:\n ptype = None\n\n if filter_providers_by_type(\n collections[k]['providers'], 'feature'):\n ptype = 'feature'\n\n if filter_providers_by_type(\n collections[k]['providers'], 'record'):\n ptype = 'record'\n\n p = load_plugin('provider', get_provider_by_type(\n collections[k]['providers'], ptype))\n\n items_path = f'{collection_name_path}/items'\n\n coll_properties = deepcopy(oas['components']['parameters']['properties']) # noqa\n\n coll_properties['schema']['items']['enum'] = list(p.fields.keys())\n\n paths[items_path] = {\n 'get': {\n 'summary': f'Get {title} items',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}Features',\n 'parameters': [\n items_f,\n items_l,\n {'$ref': '#/components/parameters/bbox'},\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/limit\"}, # noqa\n {'$ref': '#/components/parameters/crs'}, # noqa\n {'$ref': '#/components/parameters/bbox-crs'}, # noqa\n coll_properties,\n {'$ref': '#/components/parameters/vendorSpecificParameters'}, # noqa\n {'$ref': '#/components/parameters/skipGeometry'},\n {'$ref': f\"{OPENAPI_YAML['oapir']}/parameters/sortby.yaml\"}, # noqa\n {'$ref': '#/components/parameters/offset'},\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/Features\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n },\n 'options': {\n 'summary': f'Options for {title} items',\n 'tags': [name],\n 'operationId': f'options{name.capitalize()}Features',\n 'responses': {\n '200': {'description': 'options response'}\n }\n }\n }\n\n if p.editable:\n LOGGER.debug('Provider is editable; adding post')\n\n paths[items_path]['post'] = {\n 'summary': f'Add {title} items',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'add{name.capitalize()}Features',\n 'requestBody': {\n 'description': 'Adds item to collection',\n 'content': {\n 'application/geo+json': {\n 'schema': {}\n }\n },\n 'required': True\n },\n 'responses': {\n '201': {'description': 'Successful creation'},\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n\n try:\n schema_ref = p.get_schema(SchemaType.create)\n paths[items_path]['post']['requestBody']['content'][schema_ref[0]] = { # noqa\n 'schema': schema_ref[1]\n }\n except Exception as err:\n LOGGER.debug(err)\n\n if ptype == 'record':\n paths[items_path]['get']['parameters'].append(\n {'$ref': f\"{OPENAPI_YAML['oapir']}/parameters/q.yaml\"})\n if p.fields:\n queryables_path = f'{collection_name_path}/queryables'\n\n paths[queryables_path] = {\n 'get': {\n 'summary': f'Get {title} queryables',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}Queryables',\n 'parameters': [\n items_f,\n items_l\n ],\n 'responses': {\n '200': {'$ref': '#/components/responses/Queryables'}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"}, # noqa\n }\n }\n }\n\n if p.time_field is not None:\n paths[items_path]['get']['parameters'].append(\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime\"}) # noqa\n\n for field, type_ in p.fields.items():\n\n if p.properties and field not in p.properties:\n LOGGER.debug('Provider specified not to advertise property') # noqa\n continue\n\n if field == 'q' and ptype == 'record':\n LOGGER.debug('q parameter already declared, skipping')\n continue\n\n if type_ == 'date':\n schema = {\n 'type': 'string',\n 'format': 'date'\n }\n elif type_ == 'float':\n schema = {\n 'type': 'number',\n 'format': 'float'\n }\n elif type_ == 'long':\n schema = {\n 'type': 'integer',\n 'format': 'int64'\n }\n else:\n schema = type_\n\n path_ = f'{collection_name_path}/items'\n paths[path_]['get']['parameters'].append({\n 'name': field,\n 'in': 'query',\n 'required': False,\n 'schema': schema,\n 'style': 'form',\n 'explode': False\n })\n\n paths[f'{collection_name_path}/items/{{featureId}}'] = {\n 'get': {\n 'summary': f'Get {title} item by id',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}Feature',\n 'parameters': [\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/featureId\"}, # noqa\n {'$ref': '#/components/parameters/crs'}, # noqa\n {'$ref': '#/components/parameters/f'},\n {'$ref': '#/components/parameters/lang'}\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/Feature\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n },\n 'options': {\n 'summary': f'Options for {title} item by id',\n 'tags': [name],\n 'operationId': f'options{name.capitalize()}Feature',\n 'parameters': [\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/featureId\"} # noqa\n ],\n 'responses': {\n '200': {'description': 'options response'}\n }\n }\n }\n\n try:\n schema_ref = p.get_schema()\n paths[f'{collection_name_path}/items/{{featureId}}']['get']['responses']['200'] = { # noqa\n 'content': {\n schema_ref[0]: {\n 'schema': schema_ref[1]\n }\n }\n }\n except Exception as err:\n LOGGER.debug(err)\n\n if p.editable:\n LOGGER.debug('Provider is editable; adding put/delete')\n put_path = f'{collection_name_path}/items/{{featureId}}' # noqa\n paths[put_path]['put'] = { # noqa\n 'summary': f'Update {title} items',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'update{name.capitalize()}Features',\n 'parameters': [\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/featureId\"} # noqa\n ],\n 'requestBody': {\n 'description': 'Updates item in collection',\n 'content': {\n 'application/geo+json': {\n 'schema': {}\n }\n },\n 'required': True\n },\n 'responses': {\n '204': {'$ref': '#/components/responses/204'},\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n\n try:\n schema_ref = p.get_schema(SchemaType.replace)\n paths[put_path]['put']['requestBody']['content'][schema_ref[0]] = { # noqa\n 'schema': schema_ref[1]\n }\n except Exception as err:\n LOGGER.debug(err)\n\n paths[f'{collection_name_path}/items/{{featureId}}']['delete'] = { # noqa\n 'summary': f'Delete {title} items',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'delete{name.capitalize()}Features',\n 'parameters': [\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/featureId\"}, # noqa\n ],\n 'responses': {\n '200': {'description': 'Successful delete'},\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n\n except ProviderTypeError:\n LOGGER.debug('collection is not feature based')\n\n LOGGER.debug('setting up coverage endpoints')\n try:\n load_plugin('provider', get_provider_by_type(\n collections[k]['providers'], 'coverage'))\n\n coverage_path = f'{collection_name_path}/coverage'\n\n paths[coverage_path] = {\n 'get': {\n 'summary': f'Get {title} coverage',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}Coverage',\n 'parameters': [\n items_f,\n items_l,\n {'$ref': '#/components/parameters/bbox'},\n {'$ref': '#/components/parameters/bbox-crs'}, # noqa\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/Features\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n coverage_domainset_path = f'{collection_name_path}/coverage/domainset' # noqa\n\n paths[coverage_domainset_path] = {\n 'get': {\n 'summary': f'Get {title} coverage domain set',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}CoverageDomainSet',\n 'parameters': [\n items_f,\n items_l\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oacov']}/schemas/cis_1.1/domainSet.yaml\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n coverage_rangetype_path = f'{collection_name_path}/coverage/rangetype' # noqa\n\n paths[coverage_rangetype_path] = {\n 'get': {\n 'summary': f'Get {title} coverage range type',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}CoverageRangeType',\n 'parameters': [\n items_f,\n items_l\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oacov']}/schemas/cis_1.1/rangeType.yaml\"}, # noqa\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n except ProviderTypeError:\n LOGGER.debug('collection is not coverage based')\n\n LOGGER.debug('setting up tiles endpoints')\n tile_extension = filter_providers_by_type(\n collections[k]['providers'], 'tile')\n\n if tile_extension:\n tp = load_plugin('provider', tile_extension)\n oas['components']['responses'].update({\n 'Tiles': {\n 'description': 'Retrieves the tiles description for this collection', # noqa\n 'content': {\n 'application/json': {\n 'schema': {\n '$ref': '#/components/schemas/tiles'\n }\n }\n }\n }\n }\n )\n\n oas['components']['schemas'].update({\n 'tilematrixsetlink': {\n 'type': 'object',\n 'required': ['tileMatrixSet'],\n 'properties': {\n 'tileMatrixSet': {\n 'type': 'string'\n },\n 'tileMatrixSetURI': {\n 'type': 'string'\n }\n }\n },\n 'tiles': {\n 'type': 'object',\n 'required': [\n 'tileMatrixSetLinks',\n 'links'\n ],\n 'properties': {\n 'tileMatrixSetLinks': {\n 'type': 'array',\n 'items': {\n '$ref': '#/components/schemas/tilematrixsetlink' # noqa\n }\n },\n 'links': {\n 'type': 'array',\n 'items': {'$ref': f\"{OPENAPI_YAML['oapit']}#/components/schemas/link\"} # noqa\n }\n }\n }\n }\n )\n\n tiles_path = f'{collection_name_path}/tiles'\n\n paths[tiles_path] = {\n 'get': {\n 'summary': f'Fetch a {title} tiles description',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'describe{name.capitalize()}Tiles',\n 'parameters': [\n items_f,\n # items_l TODO: is this useful?\n ],\n 'responses': {\n '200': {'$ref': '#/components/responses/Tiles'},\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n\n tiles_data_path = f'{collection_name_path}/tiles/{{tileMatrixSetId}}/{{tileMatrix}}/{{tileRow}}/{{tileCol}}' # noqa\n\n paths[tiles_data_path] = {\n 'get': {\n 'summary': f'Get a {title} tile',\n 'description': desc,\n 'tags': [name],\n 'operationId': f'get{name.capitalize()}Tiles',\n 'parameters': [\n {'$ref': f\"{OPENAPI_YAML['oat']}#/components/parameters/tileMatrixSetId\"}, # noqa\n {'$ref': f\"{OPENAPI_YAML['oat']}#/components/parameters/tileMatrix\"}, # noqa\n {'$ref': f\"{OPENAPI_YAML['oat']}#/components/parameters/tileRow\"}, # noqa\n {'$ref': f\"{OPENAPI_YAML['oat']}#/components/parameters/tileCol\"}, # noqa\n {\n 'name': 'f',\n 'in': 'query',\n 'description': 'The optional f parameter indicates the output format which the server shall provide as part of the response document.', # noqa\n 'required': False,\n 'schema': {\n 'type': 'string',\n 'enum': [tp.format_type],\n 'default': tp.format_type\n },\n 'style': 'form',\n 'explode': False\n }\n ],\n 'responses': {\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/NotFound\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"} # noqa\n }\n }\n }\n mimetype = tile_extension['format']['mimetype']\n paths[tiles_data_path]['get']['responses']['200'] = {\n 'description': 'successful operation',\n 'content': {\n mimetype: {\n 'schema': {\n 'type': 'string',\n 'format': 'binary'\n }\n }\n }\n }\n\n LOGGER.debug('setting up edr endpoints')\n edr_extension = filter_providers_by_type(\n collections[k]['providers'], 'edr')\n\n if edr_extension:\n ep = load_plugin('provider', edr_extension)\n\n edr_query_endpoints = []\n\n for qt in ep.get_query_types():\n edr_query_endpoints.append({\n 'path': f'{collection_name_path}/{qt}',\n 'qt': qt,\n 'op_id': f'query{qt.capitalize()}{k.capitalize()}'\n })\n if ep.instances:\n edr_query_endpoints.append({\n 'path': f'{collection_name_path}/instances/{{instanceId}}/{qt}', # noqa\n 'qt': qt,\n 'op_id': f'query{qt.capitalize()}Instance{k.capitalize()}' # noqa\n })\n\n for eqe in edr_query_endpoints:\n paths[eqe['path']] = {\n 'get': {\n 'summary': f\"query {v['description']} by {eqe['qt']}\", # noqa\n 'description': v['description'],\n 'tags': [k],\n 'operationId': eqe['op_id'],\n 'parameters': [\n {'$ref': f\"{OPENAPI_YAML['oaedr']}/parameters/{eqe['qt']}Coords.yaml\"}, # noqa\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime\"}, # noqa\n {'$ref': f\"{OPENAPI_YAML['oaedr']}/parameters/parameter-name.yaml\"}, # noqa\n {'$ref': f\"{OPENAPI_YAML['oaedr']}/parameters/z.yaml\"}, # noqa\n {'$ref': '#/components/parameters/f'}\n ],\n 'responses': {\n '200': {\n 'description': 'Response',\n 'content': {\n 'application/prs.coverage+json': {\n 'schema': {\n '$ref': f\"{OPENAPI_YAML['oaedr']}/schemas/coverageJSON.yaml\" # noqa\n }\n }\n }\n }\n }\n }\n }\n\n LOGGER.debug('setting up maps endpoints')\n map_extension = filter_providers_by_type(\n collections[k]['providers'], 'map')\n\n if map_extension:\n mp = load_plugin('provider', map_extension)\n\n map_f = deepcopy(oas['components']['parameters']['f'])\n map_f['schema']['enum'] = [map_extension['format']['name']]\n map_f['schema']['default'] = map_extension['format']['name']\n\n pth = f'/collections/{k}/map'\n paths[pth] = {\n 'get': {\n 'summary': 'Get map',\n 'description': f\"{v['description']} map\",\n 'tags': [k],\n 'operationId': 'getMap',\n 'parameters': [\n {'$ref': '#/components/parameters/bbox'},\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime\"}, # noqa\n {\n 'name': 'width',\n 'in': 'query',\n 'description': 'Response image width',\n 'required': False,\n 'schema': {\n 'type': 'integer',\n },\n 'style': 'form',\n 'explode': False\n },\n {\n 'name': 'height',\n 'in': 'query',\n 'description': 'Response image height',\n 'required': False,\n 'schema': {\n 'type': 'integer',\n },\n 'style': 'form',\n 'explode': False\n },\n {\n 'name': 'transparent',\n 'in': 'query',\n 'description': 'Background transparency of map (default=true).', # noqa\n 'required': False,\n 'schema': {\n 'type': 'boolean',\n 'default': True,\n },\n 'style': 'form',\n 'explode': False\n },\n {'$ref': '#/components/parameters/bbox-crs-epsg'},\n map_f\n ],\n 'responses': {\n '200': {\n 'description': 'Response',\n 'content': {\n 'application/json': {}\n }\n },\n '400': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError\"}, # noqa\n }\n }\n }\n if mp.time_field is not None:\n paths[pth]['get']['parameters'].append(\n {'$ref': f\"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime\"}) # noqa\n\n LOGGER.debug('setting up STAC')\n stac_collections = filter_dict_by_key_value(cfg['resources'],\n 'type', 'stac-collection')\n if stac_collections:\n paths['/stac'] = {\n 'get': {\n 'summary': 'SpatioTemporal Asset Catalog',\n 'description': 'SpatioTemporal Asset Catalog',\n 'tags': ['stac'],\n 'operationId': 'getStacCatalog',\n 'parameters': [],\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n 'default': {'$ref': '#/components/responses/default'}\n }\n }\n }\n\n process_manager = get_manager(cfg)\n\n if len(process_manager.processes) > 0:\n paths['/processes'] = {\n 'get': {\n 'summary': 'Processes',\n 'description': 'Processes',\n 'tags': ['server'],\n 'operationId': 'getProcesses',\n 'parameters': [\n {'$ref': '#/components/parameters/f'}\n ],\n 'responses': {\n '200': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/ProcessList.yaml\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'}\n }\n }\n }\n LOGGER.debug('setting up processes')\n\n for k, v in process_manager.processes.items():\n if k.startswith('_'):\n LOGGER.debug(f'Skipping hidden layer: {k}')\n continue\n name = l10n.translate(k, locale_)\n p = process_manager.get_processor(k)\n md_desc = l10n.translate(p.metadata['description'], locale_)\n process_name_path = f'/processes/{name}'\n tag = {\n 'name': name,\n 'description': md_desc, # noqa\n 'externalDocs': {}\n }\n for link in p.metadata.get('links', []):\n if link['type'] == 'information':\n translated_link = l10n.translate(link, locale_)\n tag['externalDocs']['description'] = translated_link[\n 'type']\n tag['externalDocs']['url'] = translated_link['url']\n break\n if len(tag['externalDocs']) == 0:\n del tag['externalDocs']\n\n oas['tags'].append(tag)\n\n paths[process_name_path] = {\n 'get': {\n 'summary': 'Get process metadata',\n 'description': md_desc,\n 'tags': [name],\n 'operationId': f'describe{name.capitalize()}Process',\n 'parameters': [\n {'$ref': '#/components/parameters/f'}\n ],\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n 'default': {'$ref': '#/components/responses/default'}\n }\n }\n }\n\n paths[f'{process_name_path}/execution'] = {\n 'post': {\n 'summary': f\"Process {l10n.translate(p.metadata['title'], locale_)} execution\", # noqa\n 'description': md_desc,\n 'tags': [name],\n 'operationId': f'execute{name.capitalize()}Job',\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n '201': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/ExecuteAsync.yaml\"}, # noqa\n '404': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/NotFound.yaml\"}, # noqa\n '500': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/ServerError.yaml\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'}\n },\n 'requestBody': {\n 'description': 'Mandatory execute request JSON',\n 'required': True,\n 'content': {\n 'application/json': {\n 'schema': {\n '$ref': f\"{OPENAPI_YAML['oapip']}/schemas/execute.yaml\" # noqa\n }\n }\n }\n }\n }\n }\n if 'example' in p.metadata:\n paths[f'{process_name_path}/execution']['post']['requestBody']['content']['application/json']['example'] = p.metadata['example'] # noqa\n\n name_in_path = {\n 'name': 'jobId',\n 'in': 'path',\n 'description': 'job identifier',\n 'required': True,\n 'schema': {\n 'type': 'string'\n }\n }\n\n paths['/jobs'] = {\n 'get': {\n 'summary': 'Retrieve jobs list',\n 'description': 'Retrieve a list of jobs',\n 'tags': ['jobs'],\n 'operationId': 'getJobs',\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n '404': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/NotFound.yaml\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'}\n }\n }\n }\n\n paths['/jobs/{jobId}'] = {\n 'get': {\n 'summary': 'Retrieve job details',\n 'description': 'Retrieve job details',\n 'tags': ['jobs'],\n 'parameters': [\n name_in_path,\n {'$ref': '#/components/parameters/f'}\n ],\n 'operationId': 'getJob',\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n '404': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/NotFound.yaml\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'} # noqa\n }\n },\n 'delete': {\n 'summary': 'Cancel / delete job',\n 'description': 'Cancel / delete job',\n 'tags': ['jobs'],\n 'parameters': [\n name_in_path\n ],\n 'operationId': 'deleteJob',\n 'responses': {\n '204': {'$ref': '#/components/responses/204'},\n '404': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/NotFound.yaml\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'} # noqa\n }\n },\n }\n\n paths['/jobs/{jobId}/results'] = {\n 'get': {\n 'summary': 'Retrieve job results',\n 'description': 'Retrive job resiults',\n 'tags': ['jobs'],\n 'parameters': [\n name_in_path,\n {'$ref': '#/components/parameters/f'}\n ],\n 'operationId': 'getJobResults',\n 'responses': {\n '200': {'$ref': '#/components/responses/200'},\n '404': {'$ref': f\"{OPENAPI_YAML['oapip']}/responses/NotFound.yaml\"}, # noqa\n 'default': {'$ref': '#/components/responses/default'} # noqa\n }\n }\n }\n\n tag = {\n 'name': 'jobs',\n 'description': 'Process jobs',\n }\n oas['tags'].insert(1, tag)\n\n oas['paths'] = paths\n\n return oas", "def get_go():\n # decompress obo file if it wasn't yet\n if not os.path.exists(OBO_FILE):\n _decompress_obofile()\n # create global variable\n if __GO__[0] is None:\n __GO__[0] = onto.Ontology(OBO_FILE, with_rels=True, include_alt_ids=False)\n return __GO__[0]", "def open_changeset(self):\n pass", "def __read(self):\n from .OpenSearchEngine import OpenSearchEngine\n engine = OpenSearchEngine()\n \n while not self.isStartElement() and not self.atEnd():\n self.readNext()\n \n if (\n self.name() != \"OpenSearchDescription\" or\n self.namespaceUri() != \"http://a9.com/-/spec/opensearch/1.1/\"\n ):\n self.raiseError(QCoreApplication.translate(\n \"OpenSearchReader\",\n \"The file is not an OpenSearch 1.1 file.\"))\n return engine\n \n while not self.atEnd():\n self.readNext()\n \n if not self.isStartElement():\n continue\n \n if self.name() == \"ShortName\":\n engine.setName(self.readElementText())\n \n elif self.name() == \"Description\":\n engine.setDescription(self.readElementText())\n \n elif self.name() == \"Url\":\n type_ = self.attributes().value(\"type\")\n url = self.attributes().value(\"template\")\n method = self.attributes().value(\"method\")\n \n if (\n type_ == \"application/x-suggestions+json\" and\n engine.suggestionsUrlTemplate()\n ):\n continue\n \n if (\n (not type_ or\n type_ == \"text/html\" or\n type_ == \"application/xhtml+xml\") and\n engine.searchUrlTemplate()\n ):\n continue\n \n if not url:\n continue\n \n parameters = []\n \n self.readNext()\n \n while not (self.isEndElement() and self.name() == \"Url\"):\n if (\n not self.isStartElement() or\n (self.name() != \"Param\" and\n self.name() != \"Parameter\")\n ):\n self.readNext()\n continue\n \n key = self.attributes().value(\"name\")\n value = self.attributes().value(\"value\")\n \n if key and value:\n parameters.append((key, value))\n \n while not self.isEndElement():\n self.readNext()\n \n if type_ == \"application/x-suggestions+json\":\n engine.setSuggestionsUrlTemplate(url)\n engine.setSuggestionsParameters(parameters)\n engine.setSuggestionsMethod(method)\n elif (\n not type_ or\n type_ == \"text/html\" or\n type_ == \"application/xhtml+xml\"\n ):\n engine.setSearchUrlTemplate(url)\n engine.setSearchParameters(parameters)\n engine.setSearchMethod(method)\n \n elif self.name() == \"Image\":\n engine.setImageUrl(self.readElementText())\n \n if (\n engine.name() and\n engine.description() and\n engine.suggestionsUrlTemplate() and\n engine.searchUrlTemplate() and\n engine.imageUrl()\n ):\n break\n \n return engine", "def add_ontology(metadata):\n metadata = add_surface_ontology(metadata)\n metadata = add_place_ontology(metadata)\n return metadata", "def _open_data_source(self, *args):\n if len(args) != 0:\n # For first call to open (open())\n self.ds_filename = args[0]\n self.ds_tablename = args[1]\n self.ds_file = load_workbook(filename = args[0], use_iterators = True)\n self.ds_table = self.ds_file.get_sheet_by_name(name = args[1])\n else:\n # For reopening the file (reset())\n self.ds_file = load_workbook(filename = self.ds_filename, use_iterators = True)\n self.ds_table = self.ds_file.get_sheet_by_name(name = self.ds_tablename)\n # In any case we need a reader object to iterate over the table content \n self.ds_reader = self.ds_table.iter_rows()", "def __iter__(self):\n # Wait to open file until needed. Automatically close file when done.\n with open(self.obo_file) as fstream:\n rec_curr = None # Stores current GO Term\n typedef_curr = None # Stores current typedef\n for line in fstream:\n # obo lines start with any of: [Term], [Typedef], /^\\S+:/, or /^\\s*/\n if self.data_version is None:\n self._init_obo_version(line)\n if rec_curr is None and line[0:6].lower() == \"[term]\":\n rec_curr = GOTerm()\n if self.optobj:\n self.optobj.init_datamembers(rec_curr)\n elif typedef_curr is None and line[0:9].lower() == \"[typedef]\":\n typedef_curr = TypeDef()\n elif rec_curr is not None or typedef_curr is not None:\n line = line.rstrip() # chomp\n if line:\n self._add_to_obj(rec_curr, typedef_curr, line)\n else:\n if rec_curr is not None:\n yield rec_curr\n rec_curr = None\n elif typedef_curr is not None:\n # Save typedef.\n self.typedefs[typedef_curr.id] = typedef_curr\n typedef_curr = None\n # Return last record, if necessary\n if rec_curr is not None:\n yield rec_curr", "def _load(self, list_of_schema_urls):\n for uri in list_of_schema_urls:\n with urllib.request.urlopen(uri) as url:\n data = {}\n try:\n data = json.loads(url.read().decode())\n except:\n print(\"Failed to read schema from \" + uri)\n self._parser._load_schema(data)\n return self", "def open(self, handle):\n raise NotImplementedError", "def initialize_oauth2_session(self):\n\n def token_updater(token):\n \"\"\"Stores oauth2 token on disk\"\"\"\n try:\n with open(self.OAUTH_TOKEN_PATH, 'w') as f:\n json.dump(token, f)\n except Exception as err:\n log.Error('Could not save the OAuth2 token to %s. This means '\n 'you may need to do the OAuth2 authorization '\n 'process again soon. Original error: %s' % (\n self.OAUTH_TOKEN_PATH, err))\n\n token = None\n try:\n with open(self.OAUTH_TOKEN_PATH) as f:\n token = json.load(f)\n except IOError as err:\n log.Notice('Could not load OAuth2 token. '\n 'Trying to create a new one. (original error: %s)' % err)\n\n self.http_client = OAuth2Session(\n self.CLIENT_ID,\n scope=self.OAUTH_SCOPE,\n redirect_uri=self.OAUTH_REDIRECT_URL,\n token=token,\n auto_refresh_kwargs={\n 'client_id': self.CLIENT_ID,\n 'client_secret': self.CLIENT_SECRET,\n },\n auto_refresh_url=self.OAUTH_TOKEN_URL,\n token_updater=token_updater)\n\n if token is not None:\n self.http_client.refresh_token(self.OAUTH_TOKEN_URL)\n\n endpoints_response = self.http_client.get(self.metadata_url +\n 'account/endpoint')\n if endpoints_response.status_code != requests.codes.ok:\n token = None\n\n if token is None:\n if not sys.stdout.isatty() or not sys.stdin.isatty():\n log.FatalError('The OAuth2 token could not be loaded from %s '\n 'and you are not running duplicity '\n 'interactively, so duplicity cannot possibly '\n 'access Amazon Drive.' % self.OAUTH_TOKEN_PATH)\n authorization_url, _ = self.http_client.authorization_url(\n self.OAUTH_AUTHORIZE_URL)\n\n print('')\n print('In order to allow duplicity to access Amazon Drive, please '\n 'open the following URL in a browser and copy the URL of the '\n 'page you see after authorization here:')\n print(authorization_url)\n print('')\n\n redirected_to = (raw_input('URL of the resulting page: ')\n .replace('http://', 'https://', 1)).strip()\n\n token = self.http_client.fetch_token(\n self.OAUTH_TOKEN_URL,\n client_secret=self.CLIENT_SECRET,\n authorization_response=redirected_to)\n\n endpoints_response = self.http_client.get(self.metadata_url +\n 'account/endpoint')\n endpoints_response.raise_for_status()\n token_updater(token)\n\n urls = endpoints_response.json()\n if 'metadataUrl' not in urls or 'contentUrl' not in urls:\n log.FatalError('Could not retrieve endpoint URLs for this account')\n self.metadata_url = urls['metadataUrl']\n self.content_url = urls['contentUrl']", "def open_datastore(self, connection_string, inspectors, *args, **kwargs):\n\n for inspector in inspectors:\n insp = inspector(connection_string, *args, **kwargs)\n data = insp.open()\n if data is not None:\n return data, insp", "def test_twice_dependent_object_import(self):\n pass", "def Open(self):\n return True", "def Open(self):\n return True", "def __init__(self):\n # Define class API\n self.api = API2()\n # Initialize a list of SAVED_OBJECTS (used in get_ancestors)\n self.SAVED_OBJECTS = []\n # Initialize the total number of objects\n self.total = 0\n # Load saved objects from file to continue the last execution\n self.load_saved_objects()", "def _load(self):\n service_manager = helper_util.getServiceManager(self.hostname, self.port,\n self.uno_path,\n self.office_binary_path)\n desktop = service_manager.createInstance(\"com.sun.star.frame.Desktop\")\n uno_url = self.systemPathToFileUrl(self.document_url)\n uno_document = desktop.loadComponentFromURL(uno_url, \"_blank\", 0, ())\n if not uno_document:\n raise AttributeError(\"This document can not be loaded or is empty\")\n if self.refresh:\n # Before converting to expected format, refresh dynamic\n # value inside document.\n dispatcher = service_manager.createInstance(\"com.sun.star.frame.DispatchHelper\")\n for uno_command in ('UpdateFields', 'UpdateAll', 'UpdateInputFields',\n 'UpdateAllLinks', 'UpdateCharts',):\n dispatcher.executeDispatch(uno_document.getCurrentController().getFrame(),\n '.uno:%s' % uno_command, '', 0, ())\n module_manager = service_manager.createInstance(\"com.sun.star.frame.ModuleManager\")\n self.document_type = module_manager.identify(uno_document)\n self.document_loaded = uno_document", "def from_file(path) -> ontol.Ontology:\n abs_path = os.path.abspath(os.path.normpath(path))\n\n return __ontology[abs_path]", "def open(self, path: str, **kwargs) -> Iterator[OpenFileType]:\n full_path = self._full_path(path)\n logger.debug(f\"entering fs.open context manager for {full_path}\")\n of = self.fs.open(full_path, **kwargs)\n logger.debug(f\"FSSpecTarget.open yielding {of}\")\n yield of\n logger.debug(\"FSSpecTarget.open yielded\")\n of.close()", "def test_get_interactions(self):\n wp22_rdf_graph = parse_rdf(WP22)\n wp706_rdf_graph = parse_rdf(WP706)\n wp1871_rdf_graph = parse_rdf(WP1871)\n wp2799_rdf_graph = parse_rdf(WP2799)\n\n nodes_wp22 = _get_interactions(wp22_rdf_graph)\n nodes_wp706 = _get_interactions(wp706_rdf_graph)\n nodes_wp1871 = _get_interactions(wp1871_rdf_graph)\n nodes_wp2799 = _get_interactions(wp2799_rdf_graph)\n\n self.assertEqual(len(nodes_wp22), 10)\n self.assertEqual(len(nodes_wp706), 44)\n self.assertEqual(len(nodes_wp1871), 51)\n self.assertEqual(len(nodes_wp2799), 28)", "def _ofind(self,oname):\n\n # the @ in magics isn't really part of the name\n oname = oname.strip()\n if oname.startswith('@'):\n oname = oname[1:]\n\n # Namespaces to search in:\n user_ns = self.shell.user_ns\n user_config_ns = self.shell.user_config_ns\n internal_ns = self.shell.internal_ns\n builtin_ns = __builtin__.__dict__\n\n # Put them in a list. The order is important so that we find things in the\n # same order that Python finds them.\n namespaces = [ ('Interactive',user_ns),\n ('User-defined configuration',user_config_ns),\n ('IPython internal',internal_ns),\n ('Python builtin',builtin_ns)\n ]\n\n # initialize results to 'null'\n found = 0; obj = None; ospace = None; ds = None; ismagic = 0\n\n try:\n for nsname,ns in namespaces:\n try:\n obj = ns[oname]\n except KeyError:\n pass\n else:\n found = 1\n ospace = nsname\n ds = inspect.getdoc(obj)\n raise 'found it'\n except 'found it':\n pass\n\n # try to see if it's magic\n if not found:\n try:\n obj = eval('self.magic_'+oname)\n found = 1\n ospace = 'IPython internal'\n ismagic = 1\n ds = inspect.getdoc(obj)\n except:\n pass\n # Play some games to try and find info about dotted objects\n # and for things like {}.get? or ''.remove? to work\n if not found:\n try:\n self.tmp_obj = eval(oname,user_ns)\n found = 1\n except:\n try:\n self.tmp_obj = eval(oname,builtin_ns)\n found = 1\n except:\n pass\n if found:\n ds = inspect.getdoc(self.tmp_obj)\n ospace = 'Currently not defined in user session.'\n obj = self.tmp_obj\n del self.tmp_obj\n return found,obj,ospace,ds,ismagic", "def load_ontology(\n scenarios_dir: pathlib.Path,\n) -> Tuple[Optional[rasaeco.model.Ontology], List[str]]:\n errors = [] # type: List[str]\n\n path_map = dict() # type: MutableMapping[str, pathlib.Path]\n meta_map = dict() # type: MutableMapping[str, rasaeco.meta.Meta]\n\n scenario_pths = sorted(scenarios_dir.glob(\"**/scenario.md\"))\n\n for pth in scenario_pths:\n xml_pth = as_xml_path(scenario_path=pth)\n if not xml_pth.exists():\n errors.append(\n f\"The intermediate XML representation for the scenario {pth} \"\n f\"does not exist: {xml_pth}; \"\n f\"did you render the scenarios to intermediate XML representation \"\n f\"already?\"\n )\n\n if errors:\n return None, errors\n\n for pth in scenario_pths:\n meta, meta_errors = rasaeco.meta.extract_meta(\n text=pth.read_text(encoding=\"utf-8\")\n )\n\n for error in meta_errors:\n errors.append(f\"In file {pth}: {error}\")\n\n if meta_errors:\n continue\n\n assert meta is not None\n\n for i, cubelet in enumerate(meta[\"volumetric\"]):\n ##\n # Verify aspect range\n ##\n\n range_error = rasaeco.model.verify_aspect_range(\n first=cubelet[\"aspect_from\"], last=cubelet[\"aspect_to\"]\n )\n\n if range_error:\n errors.append(\n f\"In file {pth} and cubelet {i + 1}: \"\n f\"Invalid aspect range: {range_error}\"\n )\n\n range_error = rasaeco.model.verify_phase_range(\n first=cubelet[\"phase_from\"], last=cubelet[\"phase_to\"]\n )\n\n if range_error:\n errors.append(\n f\"In file {pth} and cubelet {i + 1}: \"\n f\"Invalid phase range: {range_error}\"\n )\n\n range_error = rasaeco.model.verify_level_range(\n first=cubelet[\"level_from\"], last=cubelet[\"level_to\"]\n )\n\n if range_error:\n errors.append(\n f\"In file {pth} and cubelet {i + 1}: \"\n f\"Invalid level range: {range_error}\"\n )\n\n identifier = pth.parent.relative_to(scenarios_dir).as_posix()\n\n meta_map[identifier] = meta\n path_map[identifier] = pth\n\n scenario_id_set = set(meta_map.keys())\n\n for identifier, meta in meta_map.items():\n for relate_to in meta[\"relations\"]:\n if relate_to[\"target\"] not in scenario_id_set:\n errors.append(\n f\"In file {path_map[identifier]}: \"\n f\"The relation {relate_to['nature']!r} is invalid \"\n f\"as the identifier of the target scenario can not be found: \"\n f\"{relate_to['target']!r}\"\n )\n\n if errors:\n return None, errors\n\n scenarios = [] # type: List[rasaeco.model.Scenario]\n for identifier, meta in meta_map.items():\n volumetric = [] # type: List[rasaeco.model.Cubelet]\n for cubelet in meta[\"volumetric\"]:\n volumetric.append(\n rasaeco.model.Cubelet(\n aspect_range=rasaeco.model.AspectRange(\n first=cubelet[\"aspect_from\"], last=cubelet[\"aspect_to\"]\n ),\n phase_range=rasaeco.model.PhaseRange(\n first=cubelet[\"phase_from\"], last=cubelet[\"phase_to\"]\n ),\n level_range=rasaeco.model.LevelRange(\n first=cubelet[\"level_from\"], last=cubelet[\"level_to\"]\n ),\n )\n )\n\n pth = path_map[identifier]\n definitions, extraction_errors = _extract_definitions(xml_path=as_xml_path(pth))\n if extraction_errors:\n errors.extend(extraction_errors)\n else:\n assert definitions is not None\n\n scenario = rasaeco.model.Scenario(\n identifier=identifier,\n title=meta[\"title\"],\n contact=meta[\"contact\"],\n volumetric=volumetric,\n definitions=definitions,\n relative_path=pth.relative_to(scenarios_dir),\n )\n\n scenarios.append(scenario)\n\n relations = [] # type: List[rasaeco.model.Relation]\n for identifier, meta in meta_map.items():\n for relation in meta[\"relations\"]:\n relations.append(\n rasaeco.model.Relation(\n source=identifier,\n target=relation[\"target\"],\n nature=relation[\"nature\"],\n )\n )\n\n ontology = rasaeco.model.Ontology(scenarios=scenarios, relations=relations)\n\n for scenario in ontology.scenarios:\n pth = scenarios_dir / scenario.relative_path\n validation_errors = _validate_references(\n scenario=scenario, ontology=ontology, xml_path=as_xml_path(pth)\n )\n\n for error in validation_errors:\n errors.append(f\"When validating references in {pth}: {error}\")\n\n if errors:\n return None, errors\n\n return ontology, []", "def __init__(self, ontology_path=None, hierarchies_path=None):\r\n\r\n\t\tself.ontology = rdflib.Graph()\r\n\t\tif ontology_path: # custom ontology path\r\n\t\t\t#self.ontology = owlready2.get_ontology(ontology_path).load()\r\n\t\t\tself.ontology.parse(ontology_path)\r\n\t\telse: # default ontology path\r\n\t\t\tself.ontology.parse('./sket/ont_proc/ontology/examode.owl')\r\n\t\tif hierarchies_path: # custom hierarchy relations path\r\n\t\t\tself.hrels = utils.read_hierarchies(hierarchies_path)\r\n\t\telse: # default hierarchy relations path\r\n\t\t\tself.hrels = utils.read_hierarchies('./sket/ont_proc/rules/hierarchy_relations.txt')\r\n\t\tself.disease = {'colon': '0002032', 'lung': '0008903', 'cervix': '0002974', 'celiac': '0005130'}", "def _doOpenTool(self):\n self._cmdOpenTool()", "def doopen(lines, password):\n \n if lines:\n fh = FileHandles()\n try:\n SpssClient.StartClient()\n uialerts = SpssClient.GetUIAlerts()\n SpssClient.SetUIAlerts(False)\n for line in lines:\n line = fh.resolve(line.lstrip()) # file handles are supported for all file types\n ext = os.path.splitext(line)[-1].lower()\n if ext == \".sav\":\n cmd = \"\"\"GET FILE=\"%s\" \"\"\" % line\n if password is not None:\n cmd = cmd + \"\"\"PASSWORD=\"%s\". \"\"\" % password\n spss.Submit(cmd)\n # assign a random dataset name\n spss.Submit(\"\"\"DATASET NAME %s.\"\"\" % _(\"\"\"Dataset\"\"\") + str(random.randint(1000, 100000)))\n print(_(\"\"\"Opened file %s\"\"\") % line)\n elif ext == \".sps\":\n try:\n if password is None:\n SpssClient.OpenSyntaxDoc(line)\n else:\n SpssClient.OpenSyntaxDoc(line, password)\n print(_(\"\"\"Opened file %s\"\"\") % line)\n except:\n print(_(\"\"\"File: %s already open and has changed or could not be opened. Not opened\"\"\") % line)\n elif ext == \".spv\":\n try:\n if password is None:\n SpssClient.OpenOutputDoc(line)\n else:\n SpssClient.OpenOutputDoc(line, password)\n print(_(\"\"\"Opened file %s\"\"\") % line)\n except:\n print(_(\"\"\"File: %s already open and has changed or could not be opened. Not opened\"\"\") % line) \n else:\n raise ValueError(_(\"\"\"File to open has unknown extension: %s\"\"\") % line)\n except:\n print(_(\"\"\"File open failure: %s\"\"\") % line)\n finally:\n SpssClient.SetUIAlerts(uialerts)\n SpssClient.StopClient()", "def update_oids(url: str=typer.Argument(ROOT_SPEC, help=\"URL to OID node specification\",\n metavar='URL')):\n console.print(\"Downloading OID specifications ... \", end='')\n specifications, errors = get_specifications(url)\n if errors:\n console.print(RICH_ERROR)\n console.print_error(\"Errors occured during download:\")\n for err_url, error in errors:\n console.print_error(f\"URL: {err_url}\")\n console.print_error(f\"error: {error}\")\n return\n console.print(RICH_OK)\n console.print(\"Parsing OID specifications ... \", end='')\n specifications, errors = parse_specifications(specifications)\n if errors:\n console.print(RICH_ERROR)\n console.print_error(\"Errors detected while parsing OID specifications:\")\n for err_url, error in errors:\n console.print_error(f\"URL: {err_url}\")\n console.print_error(f\"error: {error}\")\n return\n console.print(RICH_OK)\n #\n console.print(\"Updating OID registry ... \", end='')\n try:\n oid_registry.update_from_specifications(specifications)\n except Exception as exc: # pylint: disable=W0703\n console.print(RICH_ERROR)\n console.print_error(exc)\n console.print(RICH_OK)\n directory_scheme.site_oids_toml.write_text(oid_registry.as_toml())", "def open_connection(self, connection):\n pass", "def ontology() -> Ontology:\n return Ontology()", "def open(self):\n raise NotImplementedError(\"Implement this method in child class\")", "def open_links(self):\n if self._is_open:\n raise Exception('Already opened')\n\n try:\n self.parallel_safe(lambda scf: scf.open_link())\n self._is_open = True\n except Exception as e:\n self.close_links()\n raise e", "async def open(self):\n pass", "def _open(args):\n p = Path(args.uri)\n if p.is_file():\n uri = p.resolve().as_uri()\n else:\n # hope the user has provided a valid URI\n uri = args.uri\n\n print(f'opening {uri}')\n args.service.open(uri)", "def open(self):\r\n if not self.filename:\r\n raise ValueError(\"Can only open on-disk databases\")\r\n self.db = anydbm.open(self.filename, \"w\") #raises anydbm.error\r\n try:\r\n if self.db[\"--Reserved--type\"] != self.type:\r\n raise ValueError(\"Not a %s database\" % self.type)\r\n except KeyError:\r\n raise ValueError(\"Not a recognized database\")", "def open(self):\n with self._not_full:\n self._closed = False", "def open_observation(self, mode):\n\n # check if current observation must be closed to open a new one\n if self.observationId:\n response = dialog.MessageDialog(programName,\n \"The current observation will be closed. Do you want to continue?\",\n [YES, NO])\n if response == NO:\n return \"\"\n else:\n self.close_observation()\n\n if mode == \"start\":\n result, selectedObs = self.selectObservations(OPEN)\n if mode == VIEW:\n result, selectedObs = self.selectObservations(VIEW)\n\n if selectedObs:\n return self.load_observation(selectedObs[0], mode)\n else:\n return \"\"", "def check_opentextbook( self ):\n ( isbn_dct, open_textbook_lst ) = self.setup_data()\n for (isbn, other_data) in isbn_dct.items():\n for book_dct in open_textbook_lst:\n if isbn == book_dct['ISBN13']:\n isbn_dct[isbn]['opentextbook_url'] = book_dct['Opentextbooks URL']\n else:\n isbn_dct[isbn]['opentextbook_url'] = 'no_match_found'\n jsn = json.dumps( isbn_dct, sort_keys=True, indent=2 )\n log.debug( f'jsn, ```{jsn}```' )\n with open( f'{project_dir}/data/05b_after_opentextbook_check.json', 'w', encoding='utf-8' ) as f:\n f.write( jsn )", "def lint_ontology(\n oi: BasicOntologyInterface, dry_run=False, entities: Iterable[CURIE] = None\n) -> Iterable[ISSUE]:\n for actionable, change in _lint_ontology_dry_run(oi, entities):\n if actionable and not dry_run:\n if isinstance(oi, PatcherInterface):\n oi.apply_patch(change)\n yield actionable, change", "def OpenFile(self,path):\n\t\tself.acad.Documents.Open(path)", "def pick_otus_open_ref(input_fname, output_dir, verbose=None, qiime_opts={}):\n\n output_fname = new_file(\"otu_table.biom\", basedir=output_dir)\n revcomp_fname = new_file(\n \"revcomp.fna\", basedir=os.path.dirname(input_fname))\n\n verbose = settings.workflows.verbose if verbose is None else verbose\n\n default_opts = {\n \"reference_fp\": settings.workflows.sixteen.otu_refseq\n }\n default_opts.update(qiime_opts)\n opts = dict_to_cmd_opts(default_opts)\n\n cmd = (\" pick_open_reference_otus.py\"+\n \" --input_fp={}\"+\n \" --output_dir=\"+output_dir+\n \" -f\"+\n \" \"+opts)\n\n revcomp_cmd = (\"sequence_convert\"+\n \" --format=fasta\"+\n \" --to=fasta \"+\n \" -r\"+\n \" \"+input_fname+\n \" > \"+revcomp_fname)\n\n def run(targets):\n strategies.backup(\n (CmdAction(cmd.format(input_fname),verbose=verbose),\n strategies.Group(\n CmdAction(revcomp_cmd),\n CmdAction(cmd.format(revcomp_fname),verbose=verbose))),\n extra_conditions = [ \n lambda ret, output_fname: os.stat(output_fname).st_size == 0\n ],\n output_fname=output_fname\n )\n\n return {\n \"name\": \"pick_otus_open_ref:\"+input_fname,\n \"actions\": [run],\n \"targets\": [output_fname],\n \"file_dep\": [input_fname],\n }", "def test_objectresource_loadallobjects(self):\n\n home01 = yield self.homeUnderTest(txn=self.theTransactionUnderTest(0), name=\"user01\", create=True)\n self.assertTrue(home01 is not None)\n calendar01 = yield home01.childWithName(\"calendar\")\n yield calendar01.createCalendarObjectWithName(\"1.ics\", Component.fromString(self.caldata1))\n yield self.commitTransaction(0)\n\n home = yield self._remoteHome(self.theTransactionUnderTest(1), \"user01\")\n self.assertTrue(home is not None)\n calendar = yield home.childWithName(\"calendar\")\n objects = yield calendar.objectResources()\n self.assertEqual(len(objects), 1)\n self.assertEqual(objects[0].name(), \"1.ics\")\n yield self.commitTransaction(1)", "def read_relations(db, openfile):\n pass", "def convert_original_to_open_doors(self, step_path: str):\n self.logger.info(\"Converting metadata tables from eFiction to Open Doors structure...\")\n\n self.convert_all_tags()\n\n old_authors = self.sql.read_table_to_dict(self.working_original, \"authors\")\n self.authors = self._convert_authors(old_authors)\n\n # Prompt for original db file if we don't already have it in the config\n if not self.config.has_option('Archive', 'language_code'):\n language = input(\"Two-letter Language code of the stories in this archive (default: en - press enter):\\n>> \")\n self.config['Archive']['language_code'] = language\n else:\n language = self.config['Archive']['language_code']\n\n self.convert_stories(language)\n\n database_dump = os.path.join(step_path, f\"{self.working_open_doors}_without_chapters.sql\")\n self.logger.info(f\"Exporting converted tables to {database_dump}...\")\n self.sql.dump_database(self.working_open_doors, database_dump)\n return True", "def OpenFileObject(self, file_object):\n if self._is_open:\n raise IOError('Already open.')\n\n self._exe_file.open_file_object(file_object)\n self._exe_section = self._exe_file.get_section_by_name('.rsrc')\n\n if self._exe_section:\n self._wrc_stream.set_virtual_address(self._exe_section.virtual_address)\n self._wrc_stream.open_file_object(self._exe_section)\n\n self._file_object = file_object\n self._is_open = True", "def open(self, path: str, **kwargs): # don't know how to type hint this\n pass", "def open(self, fullurl, data=None, method=None):\n fullurl = unwrap(toBytes(fullurl))\n # percent encode url, fixing lame server errors for e.g, like space\n # within url paths.\n fullurl = quote(fullurl, safe=\"%/:=&?~#+!$,;'@()*[]|\")\n if self.tempcache and fullurl in self.tempcache:\n filename, headers = self.tempcache[fullurl]\n fp = open(filename, 'rb')\n return addinfourl(fp, headers, fullurl)\n urltype, url = splittype(fullurl)\n if not urltype:\n urltype = 'file'\n if urltype in self.proxies:\n proxy = self.proxies[urltype]\n urltype, proxyhost = splittype(proxy)\n host, selector = splithost(proxyhost)\n url = (host, fullurl) # Signal special case to open_*()\n else:\n proxy = None\n name = 'open_' + urltype\n self.type = urltype\n name = name.replace('-', '_')\n if not hasattr(self, name):\n if proxy:\n return self.open_unknown_proxy(proxy, fullurl, data)\n else:\n return self.open_unknown(fullurl, data)\n try:\n return getattr(self, name)(url, data, method)\n except socket.error, msg:\n raise IOError, ('socket error', msg), sys.exc_info()[2]", "def import_workspace( ws , objects):\n\n if not isinstance( objects, list ):\n objects = [objects,]\n\n ## NOTE getattr is needed to escape python keyword import\n for o in objects:\n getattr( ws, \"import\") ( o )", "def import_workspace( ws , objects):\n\n if not isinstance( objects, list ):\n objects = [objects,]\n\n ## NOTE getattr is needed to escape python keyword import\n for o in objects:\n getattr( ws, \"import\") ( o )", "def open(self):\n _url = f\"{self.connector.base_url}/projects/{self.project_id}/open\"\n\n _response = self.connector.http_call(\"post\", _url)\n\n # Update object\n self._update(_response.json())", "def open_chicago_graph(path=\"chicago.xml\"):\n\t\n\treturn ox.load_graphml(filename=path)", "def open_file(self):\n if not self.loaded:\n self.load()\n\n # call a plugin action to perform the open action\n from cviewer.plugins.cff2.actions.actions import OpenFile", "def iter_genotypes(self):\n # Seeking at the beginning of the file\n self._impute2_file.seek(0)\n\n # Parsing each lines of the IMPUTE2 file\n for i, line in enumerate(self._impute2_file):\n genotypes = self._parse_impute2_line(line)\n\n variant_info = None\n if self.has_index:\n variant_info = self._impute2_index.iloc[i, :]\n self._fix_genotypes_object(genotypes, variant_info)\n\n yield genotypes", "def open (self, path, mode):\r\n pass", "def svn_fs_open(*args):\r\n return _fs.svn_fs_open(*args)", "def open( self, filename ):\r\n #http://www.oooforum.org/forum/viewtopic.phtml?t=35344\r\n properties = []\r\n properties.append( OpenOfficeDocument._makeProperty( 'Hidden', True ) ) \r\n properties = tuple( properties )\r\n self.oodocument = self.openoffice.loadComponentFromURL( uno.systemPathToFileUrl( os.path.abspath( filename ) ), \"_blank\", 0, properties )", "def open(self):\n if not self.filename:\n raise ValueError(\"Can only open on-disk databases\")\n self.db = dbm.open(self.filename, \"w\") #raises anydbm.error\n try:\n if self.db[\"--Reserved--type\"] != self.type:\n raise ValueError(\"Not a %s database\" % self.type)\n except KeyError:\n raise ValueError(\"Not a recognized database\")", "def test_fetching_a_single_office(self):\n\n access_token = self.generate_token()\n self.create_office()\n response_data = self.client.get(\n \"api/v2/admin/offices/1\",\n headers ={'content-type': 'application/json',\n 'Authorization': f'Bearer {access_token}' }\n )\n self.assertEqual(response_data.status_code, 200)" ]
[ "0.56189865", "0.5365172", "0.52742535", "0.5039581", "0.50218326", "0.50218326", "0.50218326", "0.49638537", "0.48817602", "0.48285356", "0.47886187", "0.47444418", "0.47354096", "0.47354096", "0.47251382", "0.47234756", "0.4720916", "0.4704863", "0.46936986", "0.4692322", "0.46841657", "0.4680763", "0.4680763", "0.4680763", "0.4680763", "0.4680763", "0.4680763", "0.4680763", "0.4676635", "0.46510923", "0.46472487", "0.46382484", "0.46368307", "0.46331224", "0.46158093", "0.46091717", "0.4595129", "0.45898697", "0.45896813", "0.4563369", "0.45601904", "0.45601904", "0.4550757", "0.45360032", "0.45350462", "0.45343846", "0.45299438", "0.4517056", "0.4498612", "0.44950467", "0.4492376", "0.4481424", "0.44771972", "0.4465253", "0.4465017", "0.4462509", "0.44474217", "0.4438838", "0.4438838", "0.4436469", "0.44335708", "0.44171616", "0.44127417", "0.44116655", "0.4409868", "0.44037274", "0.44031465", "0.44023937", "0.4395387", "0.4389295", "0.43758893", "0.43706766", "0.43664172", "0.43620828", "0.43606734", "0.43581855", "0.43490484", "0.43454134", "0.4338589", "0.43267488", "0.43248242", "0.43139142", "0.43137455", "0.4310855", "0.4299835", "0.4295937", "0.42925513", "0.42833957", "0.42818987", "0.42804793", "0.42804793", "0.42764163", "0.42756692", "0.4274094", "0.42719042", "0.42707223", "0.42670566", "0.4252166", "0.425179", "0.42469916" ]
0.6786061
0
Open feature ontology objects.
def open_feature_ontology_pbobject(ontology_file): try: ontology = open_pbobject(ontology_file, FeatureOntologyPb2) if ontology is not None: logging.info('Successfully loaded FeatureOntology spec.') return ontology except Exception: logging.error('Failed to load ontology file' + ontology_file + '.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_ontology_db_xrefs(self, feature):\n ontology = collections.defaultdict(dict) # type: dict\n db_xrefs = []\n # these are keys are formatted strangely and require special parsing\n for key in (\"go_process\", \"go_function\", \"go_component\"):\n ontology_event_index = self._create_ontology_event(\"GO\")\n for term in feature.get(key, []):\n sp = term.split(\" - \")\n ontology['GO'][sp[0]] = [ontology_event_index]\n self.ontologies_present['GO'][sp[0]] = self.ont_mappings['GO'].get(sp[0], '')\n\n # CATH terms are not distinct from EC numbers so myst be found by key\n for term in feature.get('cath_funfam', []) + feature.get('cath', []):\n for ref in term.split(','):\n ontology['CATH'][ref] = [self._create_ontology_event(\"CATH\")]\n self.ontologies_present['CATH'][ref] = self.ont_mappings['CATH'].get(ref, '')\n\n search_keys = ['ontology_term', 'db_xref', 'dbxref', 'product_source', 'tigrfam', 'pfam',\n 'cog', 'go', 'po', 'ko']\n ont_terms = [] # type: list\n # flatten out into list of values\n for key in search_keys:\n if key in feature:\n ont_terms += [x for y in feature[key] for x in y.split(',')]\n\n for ref in ont_terms:\n if ref.startswith('GO:'):\n ontology['GO'][ref] = [self._create_ontology_event(\"GO\")]\n self.ontologies_present['GO'][ref] = self.ont_mappings['GO'].get(ref, '')\n elif ref.startswith('PO:'):\n ontology['PO'][ref] = [self._create_ontology_event(\"PO\")]\n self.ontologies_present['PO'][ref] = self.ont_mappings['PO'].get(ref, '')\n elif ref.startswith('KO:'):\n ontology['KO'][ref] = [self._create_ontology_event(\"KO\")]\n self.ontologies_present['KO'][ref] = self.ont_mappings['KO'].get(ref, '')\n elif ref.startswith('COG'):\n ontology['COG'][ref] = [self._create_ontology_event(\"COG\")]\n self.ontologies_present['COG'][ref] = self.ont_mappings['COG'].get(ref, '')\n elif ref.startswith('PF'):\n ontology['PFAM'][ref] = [self._create_ontology_event(\"PFAM\")]\n self.ontologies_present['PFAM'][ref] = self.ont_mappings['PFAM'].get(ref, '')\n elif ref.startswith('TIGR'):\n ontology['TIGRFAM'][ref] = [self._create_ontology_event(\"TIGRFAM\")]\n self.ontologies_present['TIGRFAM'][ref] = self.ont_mappings['TIGRFAM'].get(ref, '')\n elif \":\" not in ref:\n db_xrefs.append(tuple([\"Unknown_Source\", ref]))\n else:\n db_xrefs.append(tuple(ref.split(\":\", 1)))\n return dict(ontology), db_xrefs", "def add_ontology(metadata):\n metadata = add_surface_ontology(metadata)\n metadata = add_place_ontology(metadata)\n return metadata", "def open_ontology_pbobject(ontology_file):\n try:\n ontology = parse_pbobject(ontology_file, OntologyV2Pb2)\n if ontology is not None:\n logging.info('Successfully loaded Ontology V2 spec.')\n return ontology\n except Exception:\n logging.error('Failed to load ontology file with V2 spec, trying V1 spec.')\n try:\n ontology = parse_pbobject(ontology_file, OntologyV1Pb2)\n if ontology is not None:\n logging.info('Successfully loaded Ontology V1 spec.')\n return ontology\n except Exception:\n if isinstance(ontology_file, str):\n logging.error('Failed to load ontology file' + ontology_file + 'with V1 spec also, returning None.')\n else:\n logging.error('Failed to load ontology file with V1 spec also, returning None.')", "def open_shapefile(file_path):\n datasource = ogr.Open(file_path)\n layer = datasource.GetLayerByIndex(0)\n print(\"Opening {}\".format(file_path))\n print(\"Number of features: {}\".format(layer.GetFeatureCount()))\n return datasource", "def open(self):\n\n self._key_generator = KeyGenerator()\n\n # A map from LOD to LODHistory instance for all LODs that have\n # been referenced so far:\n self._lod_histories = {}\n\n # This corresponds to the 'nodes' table in a Subversion fs. (We\n # don't need a 'representations' or 'strings' table because we\n # only track file existence, not file contents.)\n self._node_db = _NodeDatabase()\n\n # Start at revision 0 without a root node.\n self._youngest = 0", "def findFeatures(self):\n\t\tpass", "def addOntologyToObject(self, obj):\n i = -1\n for item in obj.ontologyItems.items:\n i = i + 1\n ana = vsdModels.ObjectOntology(\n type=vsdModels.OntologyItem(**item).type,\n position=i,\n ontologyItem=vsdModels.APIBase(selfUrl=vsdModels.OntologyItem(**item).selfUrl),\n object=vsdModels.APIBase(selfUrl=obj.selfUrl)\n )\n print(ana.to_struct())\n self.postRequest(\n 'object-ontologies/{0}'.format(\n vsdModels.OntologyItem(**item).type\n ),\n data=ana.to_struct())", "def test1_loading(self):\n\t\tprint \"\\nTEST 1: Loading ontologies from %s folder.\\n=================\" % DATA_FOLDER\n\t\t\n\t\tfor f in os.listdir(DATA_FOLDER):\n\t\t\tif not f.startswith('.'):\n\t\t\t\tprint \"Loading... >\", f\t\t\n\t\t\t\t\n\t\t\t\to = ontospy.Ontology(DATA_FOLDER + f)\n\t\t\t\t\n\t\t\t\tself.assertEqual(type(o), ontospy.Ontology)\n\t\t\t\tprint \"Success.\"", "def show(data_objects, **options):\n if not is_loaded():\n return data_objects\n\n # (else)\n if not hasattr(data_objects, '__iter__'):\n data_objects = [data_objects]\n\n # print(data_objects)\n scene = pygeojs.scene(**options)\n scene.createLayer('osm')\n\n if not data_objects:\n print('No data objects')\n return scene\n\n # feature_layer = scene.createLayer('feature')\n feature_layer = None\n\n combined_bounds = None\n # Reverse order so that first item ends on top\n for data_object in reversed(data_objects):\n if data_object._getdatatype() == gaia.types.VECTOR:\n # print('Adding vector object')\n # Special handling for vector datasets:\n # First, make a copy of the geopandas frame\n df = geopandas.GeoDataFrame.copy(data_object.get_data())\n\n # Convert to lon-lat if needed\n epsg = data_object.get_epsg()\n if epsg and str(epsg) != '4326':\n print('Converting crs')\n df[df.geometry.name] = df.geometry.to_crs(epsg='4326')\n\n # Strip any z coordinates (force to z = 1)\n df.geometry = df.geometry.scale(zfact=0.0).translate(zoff=1.0)\n # df.to_file('/home/john/temp/df.pandas')\n # print(df)\n # print(df.geometry)\n\n # Calculate bounds\n geopandas_bounds = df.geometry.total_bounds\n xmin, ymin, xmax, ymax = geopandas_bounds\n meta_bounds = [\n [xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]\n ]\n\n # Add map feature\n if feature_layer is None:\n feature_layer = scene.createLayer('feature')\n\n # Use __geo_interface__ to get the geojson\n feature_layer.readGeoJSON(df.__geo_interface__)\n # print(df.__geo_interface__)\n else:\n # Get bounds, in order to compute overall bounds\n meta = data_object.get_metadata()\n # print('meta: {}'.format(meta))\n # print(meta)\n raster_bounds = meta.get('bounds').get('coordinates')[0]\n # print(meta_bounds)\n assert raster_bounds, 'data_object missing bounds'\n\n # meta bounds inconsistent between sources, so compute brute force\n xvals, yvals = zip(*raster_bounds)\n xmin, xmax = min(xvals), max(xvals)\n ymin, ymax = min(yvals), max(yvals)\n meta_bounds = [\n [xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]\n ]\n\n # Bounds format is [xmin, ymin, xmax, ymax]\n bounds = [\n meta_bounds[0][0], meta_bounds[0][1],\n meta_bounds[2][0], meta_bounds[2][1]\n ]\n\n # print(bounds)\n if combined_bounds is None:\n combined_bounds = bounds\n else:\n combined_bounds[0] = min(combined_bounds[0], bounds[0])\n combined_bounds[1] = min(combined_bounds[1], bounds[1])\n combined_bounds[2] = max(combined_bounds[2], bounds[2])\n combined_bounds[3] = max(combined_bounds[3], bounds[3])\n\n # print('options:', options)\n rep = options.get('representation')\n if rep == 'outline':\n # Create polygon object\n rect = [\n [bounds[0], bounds[1]],\n [bounds[2], bounds[1]],\n [bounds[2], bounds[3]],\n [bounds[0], bounds[3]],\n [bounds[0], bounds[1]],\n ]\n geojs_polygon = geojson.Polygon([rect])\n properties = {\n 'fillColor': '#fff',\n 'fillOpacity': 0.1,\n 'stroke': True,\n 'strokeColor': '#333',\n 'strokeWidth': 2\n }\n geojson_feature = geojson.Feature(\n geometry=geojs_polygon, properties=properties)\n geojson_collection = geojson.FeatureCollection([geojson_feature])\n # print(geojson_collection)\n\n if feature_layer is None:\n feature_layer = scene.createLayer('feature')\n\n feature_layer.createFeature(\n 'geojson', geojson_collection, **options)\n\n elif data_object.__class__.__name__ == 'GirderDataObject':\n if data_object._getdatatype() == 'raster':\n # Use large-image display\n # Todo - verify that it is installed\n tiles_url = data_object._get_tiles_url()\n # print('tiles_url', tiles_url)\n opacity = 1.0\n if hasattr(data_object, 'opacity'):\n opacity = data_object.opacity\n scene.createLayer(\n 'osm', url=tiles_url, keepLower=False, opacity=opacity)\n else:\n raise GaiaException(\n 'Cannot display GirderDataObject with data type {}'.format(\n data_object._getdatatype()))\n\n elif data_object._getdatatype() == gaia.types.VECTOR:\n pass # vector objects handled above\n else:\n msg = 'Cannot display dataobject, type {}'.format(\n data_object.__class__.__name__)\n raise GaiaException(msg)\n\n # Send custom message to (javascript) client to set zoom & center\n rpc = {'method': 'set_zoom_and_center', 'params': combined_bounds}\n scene.send(rpc)\n return scene", "def add_features(self, obj, annotation):\n if annotation['problem']:\n obj.add(folia.Feature, subset='problem', cls=annotation['problem'])\n if annotation['pos']:\n obj.add(folia.Feature, subset='pos', cls=annotation['pos'])", "def __init__(self, features=None, **kwargs):\n super(FeatureIO, self).__init__(**kwargs)\n self.features = features", "def __init__(self, feature):\n\n super(OsmpFeature, self).__init__(geometry=feature.geometry,\n attributes=feature.attributes)\n #self.id = db_id\n #self.id_field_name = id_field_name\n\n # Memoize for later use.\n self._latlon_coords = None\n self._name = None", "def export_representations(self):\n\n dbpath, config = self._start()\n self.logger.msg1(\"Loading ontology\")\n obo_path = check_file(config.obo, dbpath, \"obo\")\n self.obo = MinimalObo(obo_path, True)\n self._export_reference_representations()\n self._export_model_representations(config)\n self._end()", "def __init__(self):\r\n\t\tself.label = \"Linked Data Location Linkage Exploration\"\r\n\t\tself.description = \"\"\"This Tool enables the users to explore the linkages between locations in wikidata. \r\n\t\tGiven an input feature class, this tool gets all properties whose objects are also locations. \r\n\t\tThe output is another feature class which contains the locations which are linked to the locations of input feature class.\"\"\"\r\n\t\tself.canRunInBackground = False", "def __init__(self, features=None):\n self.features = features", "def feature_to_open511_element(feature):\n\n # Using a hash of the geometry for an ID. For proper production use,\n # there'll probably have to be some code in the importer\n # that compares to existing entries in the DB to determine whether\n # this is new or modified...\n geom_hash = hashlib.md5(feature.geom.wkt).hexdigest()\n id = JURISDICTION + ':' + geom_hash\n while id in ids_seen:\n id += 'x'\n ids_seen.add(id)\n\n elem = E.RoadEvent(id=id)\n\n def set_val(tag, val):\n if val not in (None, ''):\n e = etree.Element(tag)\n e.text = unicode(val)\n elem.append(e)\n\n set_val('Title', feature.get('Name').decode('utf8'))\n\n blob = lxml.html.fragment_fromstring(feature.get('Description').decode('utf8'),\n create_parent='content')\n\n description_label = blob.xpath('//strong[text()=\"Description\"]')\n if description_label:\n description_bits = []\n el = description_label[0].getnext()\n while el.tag == 'p':\n description_bits.append(_get_el_text(el))\n el = el.getnext()\n set_val('Description', '\\n\\n'.join(description_bits))\n\n localisation = blob.cssselect('div#localisation p')\n if localisation:\n set_val('AffectedRoads', '\\n\\n'.join(_get_el_text(el) for el in localisation))\n\n try:\n set_val('ExternalURL', blob.cssselect('#avis_residants a, #en_savoir_plus a')[0].get('href'))\n except IndexError:\n pass\n\n facultatif = blob.cssselect('div#itineraire_facult p')\n if facultatif:\n set_val('Detour', '\\n\\n'.join(_get_el_text(el) for el in facultatif))\n\n if blob.cssselect('div#dates strong'):\n try:\n start_date = blob.xpath(u'div[@id=\"dates\"]/strong[text()=\"Date de d\\xe9but\"]')[0].tail\n end_date = blob.xpath(u'div[@id=\"dates\"]/strong[text()=\"Date de fin\"]')[0].tail\n if start_date and end_date:\n set_val('StartDate', _fr_string_to_date(start_date))\n set_val('EndDate', _fr_string_to_date(end_date))\n except IndexError:\n pass\n\n elem.append(E.Geometry(\n geom_to_xml_element(feature.geom)\n ))\n\n return elem", "def getFeatureInfo(self,feature):\n geomRef = feature.GetGeometryRef()\n nameIndex = feature.GetFieldIndex(\"OBJNAM\")\n featureName = \"NO OBJNAM\"\n if(nameIndex != -1 and feature.GetFieldAsString(nameIndex) != \"\" ):\n featureName = feature.GetFieldAsString(nameIndex)\n featureInfo = (featureName, feature.GetFID(), geomRef.GetX(), geomRef.GetY())\n # rospy.loginfo(featureInfo)\n return featureInfo", "def open(self) -> None:", "def open(self) -> None:", "def open(self) -> None:", "def composeWorkplaceOntology():\n\n import ossPyFuncs \n import pandas as pd\n \n #mysql query to extract full table from government organizations\n #certian table columns feature capital letters which cases uproblems\n postgreSql_selectQuery=\"SELECT * FROM us_gov_manual.us_govman_2019 ;\"\n #pass querry and obtain table\n govTable=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n\n #mysql query to obtain academic instutions\n postgreSql_selectQuery=\"SELECT institution FROM hipolabs.universities ;\"\n #pass querry and obtain table\n univTable=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n \n postgreSql_selectQuery=\"SELECT company FROM forbes.fortune2018_us1000;\"\n businesses1=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n \n postgreSql_selectQuery=\"SELECT company FROM forbes.fortune2019_us1000;\"\n businesses2=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n \n postgreSql_selectQuery=\"SELECT company FROM forbes.fortune2020_global2000;\"\n businesses3=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n\n #combine theinsitutions into a vector\n combinedSeries=[govTable['AgencyName'],univTable['institution'],businesses1['company'],businesses2['company'],businesses3['company']]\n #turn the multi item vector into a single series\n fullWordbank=pd.concat(combinedSeries)\n #turn that series into a pd dataframe\n wordbankTable=pd.DataFrame(fullWordbank.unique())\n\n return wordbankTable", "def ontology() -> Ontology:\n return Ontology()", "def os_open_graph( self, ):\r\n pass", "def open(self):", "def get_go():\n # decompress obo file if it wasn't yet\n if not os.path.exists(OBO_FILE):\n _decompress_obofile()\n # create global variable\n if __GO__[0] is None:\n __GO__[0] = onto.Ontology(OBO_FILE, with_rels=True, include_alt_ids=False)\n return __GO__[0]", "def Open(self, file_object):", "def load_features(self, features):\n pass\n # self.features = features", "def __init__(self):\r\n self.label = \"Batch OVL to Feature\"\r\n self.description = \"Batch OVL to Feature searches a folder for OVL files from CPOF, C2PC, GCCS or similar system and converts it to a series of Feature Class for Point, Line, and Polygons.\"\r\n self.canRunInBackground = False", "def load_gene_ontology(self, file_path):\n\t\tpass", "def get_features(item, GP):\n contents_url = '%s/contents' % item['url']\n\n # scrape readme\n gf.get_readme_length(contents_url, GP)\n\n # scrape file-by-file stats\n digest_repo(contents_url, GP)\n\n # scrape commit history\n gf.get_repo_commit_history(item, GP)\n\n # scrape stargazers\n GP.n_stars = item['stargazers_count']\n\n # scrape forks\n GP.n_forks = item['forks_count']\n\n return GP", "def OpenFace(openface_features, PID, EXP):\n\n # tidy up data frame:\n filter_col = [col for col in openface_features if col.startswith('AU')]\n filter_col.insert(0,'time')\n filter_col.insert(0,'participant_id')\n filter_col.insert(0,'experiment_id')\n openface_features['participant_id'] = PID\n openface_features['experiment_id'] = EXP\n openface_features = openface_features[filter_col]\n openface_features.columns = openface_features.columns.str.replace('_', '')\n openface_features = openface_features.rename(columns = {'experimentid':'experiment_id'})\n openface_features = openface_features.rename(columns = {'participantid':'participant_id'})\n return openface_features", "def __init__(self):\n \n self.csv_features = {} # Create dictionary to load the CSV features\n self.meta_features = [] # Create list to load the metadata features", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def test_creating_simple_feature():\n # given & when\n feature = Feature(1, \"Feature\", \"I am a feature\", \"foo.feature\", 1, tags=None)\n\n # then\n assert feature.id == 1\n assert feature.keyword == \"Feature\"\n assert feature.sentence == \"I am a feature\"\n assert feature.path == \"foo.feature\"\n assert feature.line == 1\n assert feature.tags == []", "def create_from_feature_list(self, features): \n for f in features:\n featuretype = f.pop('featuretype', None)\n if featuretype is None:\n raise LoopException\n if featuretype == 'strati':\n self.create_and_add_foliation(f)\n # if featuretype == 'fault':\n # self.create_and_add_fault(f)\n if featuretype == 'folded_strati':\n self.create_and_add_folded_foliation(f)", "def get_ontology(base_iri='emmo-inferred.owl', verbose=False, name=None):\n\n if (not base_iri.endswith('/')) and (not base_iri.endswith('#')):\n base_iri = '%s#'%base_iri\n if base_iri in default_world.ontologies:\n onto = default_world.ontologies[base_iri]\n else:\n onto = MyOntology(default_world, base_iri, name=name)\n onto._verbose = verbose\n return onto", "def open_idf(self):\n\n self.save()\n\n filepath = self.idfname\n\n import os\n import platform\n import subprocess\n\n if platform.system() == \"Darwin\": # macOS\n subprocess.call((\"open\", filepath))\n elif platform.system() == \"Windows\": # Windows\n os.startfile(filepath)\n else: # linux variants\n subprocess.call((\"xdg-open\", filepath))", "def buildFeatureList():\n with open('./feature_list.txt', 'w')as out:\n res = es.search(index=indexName, doc_type=document,\n body={\n 'query': {\n 'query_string': {\n \"default_field\": \"split\",\n \"query\": \"training\"\n }\n },\n \"size\": indexSize\n })\n ids = [d['_id'] for d in res['hits']['hits']]\n for id in ids:\n text = es.get(index=indexName, doc_type=document, id=id)['_source']['body']\n terms = text.split()\n for term in terms:\n features[term] = term\n count = 0\n for term in features:\n count += 1\n out.write(str(count)+ \" \" + term + '\\n')", "def getFeatures(self, state, action, thisAgent):\n util.raiseNotDefined()", "def setup_features():\n\n core_features = {\"web\": [\"content_directory\", \"controllers\", \"templates\"]}\n\n imported_features = []\n for feature_type, feature_list in core_features.items():\n features_list_names = \", \".join(feature_list)\n print(\n \"** Setting up {0} features {1}\".format(\n info(feature_type), info(features_list_names)\n )\n )\n for feature_name in feature_list:\n script_dir = dirname(abspath(__file__))\n module_fname = join(\n script_dir, \"features\", feature_type, feature_name + \".py\"\n )\n\n feature_dict = {}\n with open(module_fname) as source_file:\n exec(compile(source_file.read(), module_fname, \"exec\"), feature_dict)\n try:\n feature = feature_dict[\"Feature\"]()\n except KeyError:\n print_error(\n \"Feature module '%s' does not provide a Feature class!\"\n % feature_name\n )\n sys.exit(1)\n try:\n feature.setup()\n except: # NOQA: E722\n print_error(\"Failed setting up feature '%s' !\" % feature_name)\n raise\n imported_features.append(feature)\n\n for feature in imported_features:\n if hasattr(feature, \"activate\"):\n feature.activate()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('-i', '--input-ontology',\n default=config_test.config[\"msh_test_onto\"])\n parser.add_argument('-s', '--signature')\n parser.add_argument('-f', '--format-name', default=None)\n parser.add_argument('-o', '--output-file', default=\"ontology/output.owl\")\n parser.add_argument('-d', '--max-depth', default=10)\n parser.add_argument('-l', '--locality', default='top')\n\n args = parser.parse_args()\n\n g = Graph().parse(args.input_ontology, format=args.format_name)\n resource = entity_mapper.match_entity(args.signature, g)\n ontomodule = extract_module.extract_module(\n [resource], g, locality=args.locality, max_depth=args.max_depth)\n\n with open(args.output_file, \"w\") as f:\n ontomodule.serialize(f)", "def open( self ):\n pass", "def connectOntology(ontology, endpoint=None):\r\n world = World()\r\n world.get_ontology(ontology).load()\r\n graph = world.as_rdflib_graph()\r\n if graph:\r\n return graph\r\n else:\r\n print(\"connection failed\")\r\n return", "def __feature_set__(self):\r\n import numpy as np\r\n import datetime\r\n import time\r\n cols_norm = [col for col in self.columns]\r\n cols_lower = [col.lower() for col in self.columns]\r\n fields = []\r\n features = []\r\n date_fields = []\r\n _geom_types = {\r\n arcgis.geometry._types.Point : \"esriGeometryPoint\",\r\n arcgis.geometry._types.Polyline : \"esriGeometryPolyline\",\r\n arcgis.geometry._types.MultiPoint : \"esriGeometryMultipoint\",\r\n arcgis.geometry._types.Polygon : \"esriGeometryPolygon\"\r\n }\r\n if self.sr is None:\r\n sr = {'wkid' : 4326}\r\n else:\r\n sr = self.sr\r\n fs = {\r\n \"objectIdFieldName\" : \"\",\r\n \"globalIdFieldName\" : \"\",\r\n \"displayFieldName\" : \"\",\r\n \"geometryType\" : _geom_types[type(self.geometry[self.geometry.first_valid_index()])],\r\n \"spatialReference\" : sr,\r\n \"fields\" : [],\r\n \"features\" : []\r\n }\r\n if 'objectid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('objectid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('objectid')]\r\n elif 'fid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('fid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('fid')]\r\n elif 'oid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('oid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('oid')]\r\n else:\r\n self['OBJECTID'] = list(range(1, self.shape[0] + 1))\r\n res = self.__feature_set__\r\n del self['OBJECTID']\r\n return res\r\n if 'objectIdFieldName' in fs:\r\n fields.append({\r\n \"name\" : fs['objectIdFieldName'],\r\n \"type\" : \"esriFieldTypeOID\",\r\n \"alias\" : fs['objectIdFieldName']\r\n })\r\n cols_norm.pop(cols_norm.index(fs['objectIdFieldName']))\r\n if 'globalIdFieldName' in fs and len(fs['globalIdFieldName']) > 0:\r\n fields.append({\r\n \"name\" : fs['globalIdFieldName'],\r\n \"type\" : \"esriFieldTypeGlobalID\",\r\n \"alias\" : fs['globalIdFieldName']\r\n })\r\n cols_norm.pop(cols_norm.index(fs['globalIdFieldName']))\r\n elif 'globalIdFieldName' in fs and \\\r\n len(fs['globalIdFieldName']) == 0:\r\n del fs['globalIdFieldName']\r\n if self._geometry_column_name in cols_norm:\r\n cols_norm.pop(cols_norm.index(self._geometry_column_name))\r\n for col in cols_norm:\r\n try:\r\n idx = self[col].first_valid_index()\r\n col_val = self[col].loc[idx]\r\n except:\r\n col_val = \"\"\r\n if isinstance(col_val, (str, np.str)):\r\n l = self[col].str.len().max()\r\n if str(l) == 'nan':\r\n l = 255\r\n\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeString\",\r\n \"length\" : int(l),\r\n \"alias\" : col\r\n })\r\n if fs['displayFieldName'] == \"\":\r\n fs['displayFieldName'] = col\r\n elif isinstance(col_val, (datetime.datetime,\r\n pd.Timestamp,\r\n np.datetime64,\r\n pd.datetime)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeDate\",\r\n \"alias\" : col\r\n })\r\n date_fields.append(col)\r\n elif isinstance(col_val, (np.int32, np.int16, np.int8)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeSmallInteger\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (int, np.int, np.int64)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeInteger\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (float, np.float64)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeDouble\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (np.float32)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeSingle\",\r\n \"alias\" : col\r\n })\r\n fs['fields'] = fields\r\n for row in self.to_dict('records'):\r\n geom = {}\r\n if self._geometry_column_name in row:\r\n geom = row[self._geometry_column_name]\r\n del row[self._geometry_column_name]\r\n for f in date_fields:\r\n try:\r\n row[f] = int(row[f].to_pydatetime().timestamp() * 1000)\r\n except:\r\n row[f] = None\r\n features.append(\r\n {\r\n \"geometry\" : dict(geom),\r\n \"attributes\" : row\r\n }\r\n )\r\n del row\r\n del geom\r\n fs['features'] = features\r\n return fs", "def open(self):\r\n pass", "def open(self):\r\n pass", "def __init__(self):\r\n self.label = \"OVL to Feature\"\r\n self.description = \"OVL to Feature converts an OVL file from CPOF, C2PC, GCCS or similar system and converts it to a series of Feature Class for Point, Line, and Polygons.\"\r\n self.canRunInBackground = False", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def open(self):\n pass", "def test_all_features(self):\n to_create = ['looktest1', 'looktest2', 'looktest3']\n for f in to_create:\n Feature(f).activate()\n\n all_features = Feature.all_features()\n self.assertEqual(len(all_features), len(to_create))\n for f in to_create:\n self.assertTrue(f in all_features)", "def feature():\n pass", "def open(self):\n raise NotImplementedError", "def open(self):\n raise NotImplementedError", "def __init__(self, geneId, gtfFeature):\n\n self.geneId = geneId\n self.features = {}", "def add_features(self, fbids):\n if not fbids:\n warnings.warn(\"No fbids provided.\")\n return False\n feats = self.name_synonym_lookup(fbids)\n proc_names = [f._asdict() for f in feats.values()]\n for d in proc_names:\n d['synonyms'] = '|'.join(d['synonyms'])\n statement = \"MERGE (n:Feature:Class { short_form : line.fbid } ) \" \\\n \"SET n.label = line.symbol SET n.synonyms = split(line.synonyms, '|') \" \\\n \"SET n.iri = 'http://flybase.org/reports/' + line.fbid\" # Why not using ni? Can kbw have switch to work via csv?\n self.commit_via_csv(statement, proc_names)\n self.addTypes2Neo(fbids)\n return feats", "def updateOpengraphableObjects(event):\n update_opengraphable_objects(event.context,\n event.data.get('content_types', []))", "def idf_object_features_set(set_id):\n # idf for calc features of new docs\n # object-features for learning model\n # doc_index links doc_id and row index in object-features\n # lemma_index links lemmas and column index in object-features\n\n # get lemmas of all docs in set\n docs = db.get_lemmas_freq(set_id)\n\n # document frequency - number of documents with lemma\n doc_freq = {}\n # number (sum of weights) of lemmas in document\n doc_size = {}\n # index of lemma in overall list\n lemma_index = {}\n # lemma counter in overall list\n lemma_counter = 0\n # document index\n doc_index = {}\n # document counter in overall list\n doc_counter = 0\n\n for doc_id in docs:\n # initialize doc_size\n doc_size[doc_id] = 0\n # add document in overall list by giving index\n doc_index[doc_id] = doc_counter\n doc_counter += 1\n # count lemmas of doc\n for lemma in docs[doc_id]:\n # increase number of docs with lemma\n doc_freq[lemma] = doc_freq.get(lemma, 0) + 1\n # increase number of lemmas in document\n doc_size[doc_id] += docs[doc_id][lemma]\n\n # compute idf\n idf = {}\n for lemma in doc_freq:\n idf[lemma] = - math.log(doc_freq[lemma]/doc_counter)\n\n # and lemmas add in overall list by giving index\n for lemma in idf:\n if idf[lemma] != 0:\n lemma_index[lemma] = lemma_counter\n lemma_counter += 1\n\n # initialization objects-features matrix\n object_features = np.zeros((doc_counter, lemma_counter))\n\n # fill objects-features matrix\n for doc_id in docs:\n doc_lemmas = docs[doc_id]\n for lemma in doc_lemmas:\n if lemma_index.get(lemma, -1) != -1:\n object_features[doc_index[doc_id], lemma_index[lemma]] = \\\n doc_lemmas[lemma] / doc_size[doc_id] * idf[lemma]\n\n # check features with 0 for all documents\n feat_max = np.sum(object_features, axis=0)\n # print_lemmas(set_id, [k for k, v in enumerate(feat_max) if v == 0], lemma_index, idf)\n # check documents with 0 for all lemmas\n # print(np.min(np.sum(object_features, axis=1)))\n\n # save to db: idf, indexes and object_features\n db.put_training_set_params(set_id, idf, doc_index, lemma_index, object_features)\n\n # print(idf)\n # print(doc_index)\n # print(lemma_index)\n # print(object_features)", "def __init__(self, data_filename):\n with open(data_filename, 'rb') as data_file:\n loaded_features = pickle.load(data_file)\n self.title_nlp_tfidf_features = loaded_features['title_NLP_TFIDF_features']\n self.other_features = loaded_features['other_features']\n self.category1_features = loaded_features['category1_features']\n self.category2_features = loaded_features['category2_features']\n self.category3_features = loaded_features['category3_features']\n self.material_features = loaded_features['material_features']\n self.who_made_features = loaded_features['whoMade_features']\n self.when_made_features = loaded_features['whenMade_features']\n self.style1_features = loaded_features['style1_features']\n self.style2_features = loaded_features['style2_features']\n self.feature_labels = loaded_features['feature_labels']", "def from_file(path) -> ontol.Ontology:\n abs_path = os.path.abspath(os.path.normpath(path))\n\n return __ontology[abs_path]", "def __init__(self, obo_file=OBO_FILE, optional_attrs=None):\n self.optobj = self._init_optional_attrs(optional_attrs) # OboOptionalAttrs or None\n self.format_version = None # e.g., \"1.2\" of \"format-version:\" line\n self.data_version = None # e.g., \"releases/2016-07-07\" from \"data-version:\" line\n self.typedefs = {}\n\n # True if obo file exists or if a link to an obo file exists.\n print(\"obo_file:\")\n print(obo_file)\n if os.path.isfile(obo_file):\n self.obo_file = obo_file\n # GOTerm attributes that are necessary for any operations:\n else:\n raise Exception(\"COULD NOT READ({OBO})\\n\"\n \"download obo file first\\n \"\n \"[http://geneontology.org/ontology/\"\n \"go-basic.obo]\".format(OBO=obo_file))", "def __init__(self, ontology_path=None, hierarchies_path=None):\r\n\r\n\t\tself.ontology = rdflib.Graph()\r\n\t\tif ontology_path: # custom ontology path\r\n\t\t\t#self.ontology = owlready2.get_ontology(ontology_path).load()\r\n\t\t\tself.ontology.parse(ontology_path)\r\n\t\telse: # default ontology path\r\n\t\t\tself.ontology.parse('./sket/ont_proc/ontology/examode.owl')\r\n\t\tif hierarchies_path: # custom hierarchy relations path\r\n\t\t\tself.hrels = utils.read_hierarchies(hierarchies_path)\r\n\t\telse: # default hierarchy relations path\r\n\t\t\tself.hrels = utils.read_hierarchies('./sket/ont_proc/rules/hierarchy_relations.txt')\r\n\t\tself.disease = {'colon': '0002032', 'lung': '0008903', 'cervix': '0002974', 'celiac': '0005130'}", "def assign_openings(self, air_faces):\n boundary_name = generate_unique_name(\"Opening\")\n self.modeler.create_face_list(air_faces, \"boundary_faces\")\n props = {}\n air_faces = self.modeler._convert_list_to_ids(air_faces)\n\n props[\"Faces\"] = air_faces\n props[\"Temperature\"] = \"AmbientTemp\"\n props[\"External Rad. Temperature\"] = \"AmbientRadTemp\"\n props[\"Inlet Type\"] = \"Pressure\"\n props[\"Total Pressure\"] = \"AmbientPressure\"\n bound = BoundaryObject(self, boundary_name, props, \"Opening\")\n if bound.create():\n self.boundaries.append(bound)\n self.logger.glb.info(\"Opening Assigned\")\n return bound\n return None", "def clfFeature(feature, mode):\r\n \r\n feature_path = 'C:\\\\Users\\\\Tom\\\\Documents\\\\Informatiekunde\\\\Thesis\\\\features\\\\' + feature + '.txt'\r\n classlist = ['negative', 'positive']\r\n features = pd.DataFrame()\r\n\r\n for label in classlist:\r\n path = 'C:\\\\Users\\\\Tom\\\\Documents\\\\Informatiekunde\\\\Thesis\\\\data\\\\' + mode + '\\\\' + label + '\\\\'\r\n allFiles = glob.glob(path + \"*.txt\")\r\n for review in allFiles:\r\n title = review.strip('.txt').split('\\\\')[-1]\r\n file = open(review, 'r', encoding='utf8').read().lower()\r\n wordlist = []\r\n featreader = csv.reader(open(feature_path, 'r'), delimiter= '\\n')\r\n for word in featreader:\r\n if word[0] in file:\r\n wordlist.append(word[0])\r\n df = pd.DataFrame({'File': [title], feature.capitalize(): [', '.join(wordlist)]}).set_index('File')\r\n features = features.append(df)\r\n \r\n return features", "def get_features(self, feature_type=\"all\"):\n # if exists(path=\"data.csv\"):\n # return pd.read_csv(\"data.csv\")\n # else:\n # reading through directory\n for file_path in self.list_news_path:\n with open(file_path, 'r') as f:\n\n # open document to read and assign to doc\n doc = json.load(f)\n # skip the empty title or body\n if doc['title'] == \"\" or doc['text'] == \"\":\n pass\n else:\n # to extract all data from news content\n if feature_type == \"all\":\n news = doc['title'] + doc['text']\n\n # preprocesses news content\n words = preprocess(news)\n yield words\n\n # to extract title and text as a pair\n elif feature_type == \"pair\":\n title = preprocess(doc[\"title\"])\n body = preprocess(doc['text'])\n yield title, body\n # if not title or not body:\n # pass\n # else:\n # yield title, body\n\n # else you only need either title or body\n else:\n assert feature_type in doc.keys(), \"feature not in the document: \" + file_path\n # without stemming\n # CUSTOM_FILTERS = [lambda x: x.lower(), strip_tags, strip_punctuation, strip_multiple_whitespaces,\n # strip_numeric, remove_stopwords]\n\n feature = doc[feature_type]\n words = preprocess(feature)\n # using alternative preprocessing function\n # words = preprocess_string(words, filters=CUSTOM_FILTERS)\n yield words", "def features(self, state, action, next_state):\n raise NotImplementedError", "def feature(self):\n Feature(run=default_frame, flags=TE)\n Feature(run=load(\"window_functions.tests.rows_frame\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_frame\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_overflow\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_datetime\", \"feature\"), flags=TE)\n Feature(run=load(\"window_functions.tests.range_errors\", \"feature\"), flags=TE)", "def getFeatures(featureInput):\n featureList = []\n for defTerm,candidateSent in featureInput:\n tokens = nltk.word_tokenize(candidateSent)\n features = {}\n POScenter,POSleft,POSright = wordPOS(tokens,defTerm)\n features['Pos of first Article'] = posFirstArticle(tokens)\n## features['Num Punct Marks'] = numPunctuation(tokens)\n features['Subj words Predicate'] = subWordPerdicate(candidateSent,defTerm,tokens)\n features['Word before def term'] = wordBeforeDef(tokens,defTerm)\n features['POS centered word'] = POScenter\n features['POS left word'] = POSleft\n## features['POS right word'] = POSright \n featureList.append(features)\n return featureList", "def test_create_gene_ontology(self):\n\n # Here are mappings for just a few yeast genes.\n\n mapping = {}\n mapping['STE7'] = ['GO:0000187']\n mapping['PBS2'] = ['GO:0000187']\n mapping['NOP8'] = [\n 'GO:0003676', 'GO:0003723', 'GO:0042254', 'GO:0005634', 'GO:0005730'\n ]\n\n # Build the ontology, then see if it looks correct.\n\n root = dc.models.tensorgraph.models.ontology.create_gene_ontology(\n mapping, min_node_features=1)\n assert len(root.feature_ids) == 0\n\n def find_features(node, features):\n features.update(node.feature_ids)\n for child in node.children:\n find_features(child, features)\n\n all_features = set()\n find_features(root, all_features)\n assert len(all_features) == 3\n for key in mapping:\n assert key in all_features", "def open(self):\n raise NotImplementedError( 'Needs implementation' )", "def pick_otus_open_ref(input_fname, output_dir, verbose=None, qiime_opts={}):\n\n output_fname = new_file(\"otu_table.biom\", basedir=output_dir)\n revcomp_fname = new_file(\n \"revcomp.fna\", basedir=os.path.dirname(input_fname))\n\n verbose = settings.workflows.verbose if verbose is None else verbose\n\n default_opts = {\n \"reference_fp\": settings.workflows.sixteen.otu_refseq\n }\n default_opts.update(qiime_opts)\n opts = dict_to_cmd_opts(default_opts)\n\n cmd = (\" pick_open_reference_otus.py\"+\n \" --input_fp={}\"+\n \" --output_dir=\"+output_dir+\n \" -f\"+\n \" \"+opts)\n\n revcomp_cmd = (\"sequence_convert\"+\n \" --format=fasta\"+\n \" --to=fasta \"+\n \" -r\"+\n \" \"+input_fname+\n \" > \"+revcomp_fname)\n\n def run(targets):\n strategies.backup(\n (CmdAction(cmd.format(input_fname),verbose=verbose),\n strategies.Group(\n CmdAction(revcomp_cmd),\n CmdAction(cmd.format(revcomp_fname),verbose=verbose))),\n extra_conditions = [ \n lambda ret, output_fname: os.stat(output_fname).st_size == 0\n ],\n output_fname=output_fname\n )\n\n return {\n \"name\": \"pick_otus_open_ref:\"+input_fname,\n \"actions\": [run],\n \"targets\": [output_fname],\n \"file_dep\": [input_fname],\n }", "def wfs_common(request, response, mode, spatial_mode='wfs'):\n\n outputpath = configuration.get_config_value('server', 'outputpath')\n outputurl = configuration.get_config_value('server', 'outputurl')\n\n list_of_files = []\n for one_resource in request.inputs['resource']:\n # Download if not opendap\n # Adding a maximum file size from a server config file would\n # be possible here...\n try:\n nc_file = opendap_or_download(\n one_resource.data,\n auth_tkt_cookie=request.http_request.cookies,\n output_path='/tmp')\n except:\n raise Exception(traceback.format_exc())\n list_of_files.append(nc_file)\n\n if ('typename' in request.inputs) and ('featureids' in request.inputs):\n typename = request.inputs['typename'][0].data\n features = [f.data for f in request.inputs['featureids']]\n if 'geoserver' in request.inputs:\n geoserver = request.inputs['geoserver'][0].data\n else:\n geoserver = configuration.get_config_value('extra', 'geoserver')\n if 'mosaic' in request.inputs:\n mosaic = request.inputs['mosaic'][0].data\n else:\n mosaic = False\n try:\n conn = WebFeatureService(url=geoserver, version='2.0.0')\n resp = conn.getfeature([typename], featureid=features,\n outputFormat='application/json')\n feature = json.loads(resp.read())\n crs_code = owslib.crs.Crs(\n feature['crs']['properties']['name']).code\n crs = ocgis.CoordinateReferenceSystem(epsg=crs_code)\n geom = [\n {'geom': shape(f['geometry']), 'crs': crs,\n 'properties': f['properties']}\n for f in feature['features']]\n except Exception as e:\n msg = ('Failed to fetch features.\\ngeoserver: {0} \\n'\n 'typename: {1}\\nfeatures {2}\\n{3}').format(\n geoserver, typename, features, e)\n raise Exception(msg)\n if mosaic:\n new_geom = geom[0]\n for merge_geom in geom[1:]:\n new_geom['geom'] = new_geom['geom'].union(merge_geom['geom'])\n new_geom['properties'] = {'bbox': feature['bbox']}\n geom = new_geom\n elif spatial_mode == 'bbox':\n geom = [[request.inputs['lon0'][0].data,\n request.inputs['lat0'][0].data,\n request.inputs['lon1'][0].data,\n request.inputs['lat1'][0].data]]\n else:\n geom = [None]\n\n if ('initial_datetime' in request.inputs) and \\\n ('final_datetime' in request.inputs):\n tr = [request.inputs['initial_datetime'][0].data,\n request.inputs['final_datetime'][0].data]\n else:\n tr = None\n\n try:\n output_files = []\n output_urls = []\n mv_dir = tempfile.mkdtemp(dir=outputpath)\n os.chmod(mv_dir, 0755)\n\n for one_file in list_of_files:\n file_name = os.path.basename(one_file)\n if file_name[-3:] == '.nc':\n file_prefix = file_name[:-3]\n else:\n file_prefix = file_name\n ocgis.env.DIR_OUTPUT = tempfile.mkdtemp(dir=os.getcwd())\n ocgis.env.OVERWRITE = True\n nc = netCDF4.Dataset(one_file, 'r')\n var_names = guess_main_variables(nc)\n nc.close()\n rd = ocgis.RequestDataset(one_file, var_names)\n for i, one_geom in enumerate(geom):\n if one_geom is None:\n ocgis_geom = None\n elif spatial_mode == 'bbox':\n ocgis_geom = one_geom\n else:\n ocgis_geom = one_geom['geom']\n if mode == 'averager':\n # Extent errors are ignored\n try:\n # Here with aggregate=True, can't pass the whole\n # one_geom dictionary, is this a sign that this does\n # not support multipolygon?\n ops = ocgis.OcgOperations(\n dataset=rd, geom=ocgis_geom,\n spatial_operation='clip', aggregate=True,\n time_range=tr, output_format='nc',\n interpolate_spatial_bounds=True,\n prefix=file_prefix).execute()\n except ExtentError:\n continue\n elif mode == 'subsetter':\n # Extent errors are ignored\n try:\n # Still having problem with the geometry, previously\n # was passing geom=[one_geom]\n ops = ocgis.OcgOperations(\n dataset=rd, geom=ocgis_geom, time_range=tr,\n output_format='nc',\n interpolate_spatial_bounds=True,\n prefix=file_prefix).execute()\n except ExtentError:\n continue\n # Here, the global attribute 'subset_typename' and\n # 'subset_featureid' are added to the NetCDF file to keep\n # track of the feature used.\n if (geom != [None]) and (spatial_mode == 'wfs'):\n with netCDF4.Dataset(ops, 'a') as nc:\n nc.subset_typename = typename\n nc.subset_featureid = features[i]\n\n if (spatial_mode == 'wfs') and \\\n ('featureids' in request.inputs):\n mv_name = '{0}_{1}.nc'.format(\n os.path.basename(ops)[:-3], features[i])\n else:\n mv_name = '{0}_{1}.nc'.format(\n os.path.basename(ops)[:-3], 'subset')\n\n mv_file = os.path.join(mv_dir, mv_name)\n shutil.move(ops, mv_file)\n output_files.append(mv_file)\n shutil.rmtree(ocgis.env.DIR_OUTPUT)\n\n # Cover the case of an online wps server and the offline\n # mode for tests.\n if outputurl == 'file:///tmp':\n disk_file = 'file:///' + mv_file.lstrip('/')\n output_urls.append(disk_file)\n else:\n url_file = os.path.join(\n outputurl, os.path.basename(mv_dir), mv_name)\n output_urls.append(url_file)\n except:\n raise Exception(traceback.format_exc())\n\n # If only ExtentError occured, the output_urls will be empty...\n if not output_urls:\n raise ExtentError(message=\"All ocgis calls returned ExtentError.\")\n\n time_str = time.strftime(\"%Y-%m-%dT%H:%M:%SZ\", time.gmtime())\n output_file_name = \"result_%s_.json\" % (time_str,)\n output_file = os.path.join('/tmp', output_file_name)\n f1 = open(output_file, 'w')\n f1.write(json.dumps(output_urls))\n f1.close()\n response.outputs['output'].file = output_file\n response.outputs['output'].output_format = json_format\n response.update_status(\"done\", 100)\n return response", "def open(self) -> None:\n\n raise NotImplementedError", "def __init__(self, objGeo=None, objPath='', verbose=True, notation='[1 of 1]', *args, **kwargs):\n # containers for file contents\n self.comments = []\n self.v = []\n self.vt = []\n self.vn = []\n self.f = []\n self.mrgb = []\n self.g = []\n self.remainder = []\n # helpers and args\n self.verbose = verbose\n self.notation = notation\n self.objGeo = objGeo\n self.objPath = objPath\n self.objName = os.path.basename(objPath)\n\n # if you pass a list, such as the selection list, only grab the first xform element\n # also applies to newly created geo - maya preselects the primative creation node\n if isinstance(self.objGeo, list):\n for item in self.objGeo:\n if isinstance(item, pm.nt.Transform):\n self.objGeo = item\n break\n lcUtility.Utility.lc_print('You supplied a list. Making only first Xform element: {0}'.format(self.objGeo),\n mode='warning')\n\n # decide how to load based on given parameters\n if self.objPath and self.objGeo is None:\n self.get_data_file()\n elif self.objGeo is not None:\n self.get_data_geo()\n else:\n lcUtility.Utility.lc_print('not loading anything')", "def attach_feature_accessors(obj, feats: FeaturesTuple):\n BLACKLIST = (\n 'PixelFormat', # PixelFormats have special access methods.\n )\n\n for feat in feats:\n feat_name = feat.get_name()\n if feat_name not in BLACKLIST:\n setattr(obj, feat_name, feat)", "def Run(self, ontologyFile, deleteIfFound):\n\n # start the import\n self.helpers.Info(Messages().Get(113))\n\n # Check if the schema is empty\n if deleteIfFound == False:\n schemaThingsCount, schemaActionsCount = self.helpers.SchemaCount()\n if schemaThingsCount != 0 or schemaActionsCount != 0:\n self.helpers.Error(Messages().Get(208))\n\n # check if things files is url\n if validators.url(ontologyFile) is True:\n ontologyFile = self.downloadSchemaFiles('./ontology.json', ontologyFile)\n\n # open the thingsfile\n try:\n with open(ontologyFile, 'r') as file:\n ontology = json.load(file)\n except IOError:\n self.helpers.Error(Messages().Get(201) + ontologyFile)\n\n # Set things and actions from ontology file\n if \"actions\" not in ontology:\n self.helpers.Error(Messages().Get(209) + \"actions\")\n elif \"things\" not in ontology:\n self.helpers.Error(Messages().Get(209) + \"things\")\n\n actions = ontology[\"actions\"]\n things = ontology[\"things\"]\n\n # Validate if delete function would work\n if deleteIfFound is True:\n self.helpers.Info(Messages().Get(114))\n\n # check if there is data\n if self.checkIfThereIsData(\"things\") is True \\\n or self.checkIfThereIsData(\"actions\") is True:\n self.helpers.Error(Messages().Get(203))\n\n # Render and create things\n self.helpers.Info(Messages().Get(115) + \"things\")\n self.helpers.CreateConceptClasses(\"things\", things[\"classes\"], deleteIfFound)\n\n # Render and create actions\n self.helpers.Info(Messages().Get(115) + \"actions\")\n self.helpers.CreateConceptClasses(\"actions\", actions[\"classes\"], deleteIfFound)\n\n # Add properties to things (needs to run after CreateConceptClasses()!)\n self.helpers.Info(Messages().Get(116) + \"things\")\n self.helpers.AddPropsToConceptClasses(\"things\", things[\"classes\"], deleteIfFound)\n\n # Add properties to actions (needs to run after CreateConceptClasses()!)\n self.helpers.Info(Messages().Get(116) + \"actions\")\n self.helpers.AddPropsToConceptClasses(\"actions\", actions[\"classes\"], deleteIfFound)\n\n # Validate Things & Actions\n self.helpers.Info(Messages().Get(117))\n if self.helpers.ValidateConceptClasses(things[\"classes\"], actions[\"classes\"]) is True:\n self.helpers.Info(Messages().Get(118))\n exit(0)\n else:\n self.helpers.Error(Messages().Get(204))", "def open(f):\n tree = etree.parse(f)\n root = tree.getroot()\n\n # FIXME Add file identification for design rules, autorouter rules, CAM\n # jobs, ULPs.\n for tag, cls in [('library', types.Library),\n ('schematic', types.Schematic),\n ('board', types.Board)]:\n nodes = root.xpath('drawing/' + tag)\n if len(nodes) == 1:\n node = root.xpath('drawing')[0]\n return cls.from_drawing_xml(node, from_file=f)\n raise NotImplementedError", "def OBQAFacts():\n download_dataset(Collection.ALLEN_AI_OBQA, check_shallow_integrity)\n facts_file = os.path.join(\n OBQA_CACHE_DIR, \"OpenBookQA-V1-Sep2018\",\n \"Data\", \"Main\", \"openbook.txt\"\n )\n with open(facts_file, \"rt\") as f:\n for line in f:\n fact = line.strip(string.whitespace + \"\\\"\")\n if len(fact) > 0:\n yield fact", "def buildFeature(self, action):\n return self.findFeature(action)()", "def generateFeatures(self, data):\n pass", "def display_features():\n\n # Parse the URL, check for implicit resources, extract the primary record\n # http://127.0.0.1:8000/eden/gis/display_features&module=pr&resource=person&instance=1&jresource=presence\n ok = 0\n if \"module\" in request.vars:\n res_module = request.vars.module\n ok +=1\n if \"resource\" in request.vars:\n resource = request.vars.resource\n ok +=1\n if \"instance\" in request.vars:\n instance = int(request.vars.instance)\n ok +=1\n if \"jresource\" in request.vars:\n jresource = request.vars.jresource\n ok +=1\n if ok != 4:\n session.error = T(\"Insufficient vars: Need module, resource, jresource, instance\")\n raise HTTP(400, body=s3xrc.xml.json_message(False, 400, session.error))\n\n component, pkey, fkey = s3xrc.model.get_component(res_module, resource, jresource)\n table = db[\"%s_%s\" % (res_module, resource)]\n jtable = db[str(component.table)]\n query = (jtable[fkey] == table[pkey]) & (table.id == instance)\n # Filter out deleted\n deleted = (table.deleted == False)\n query = query & deleted\n # Filter out inaccessible\n query2 = db.gis_location.id == jtable.location_id\n accessible = s3_accessible_query(\"read\", db.gis_location)\n query2 = query2 & accessible\n\n features = db(query).select(db.gis_location.ALL, left = [db.gis_location.on(query2)])\n\n # Calculate an appropriate BBox\n bounds = gis.get_bounds(features=features)\n\n map = gis.show_map(\n feature_queries = [{\"name\" : \"Features\", \"query\" : features, \"active\" : True}],\n bbox = bounds,\n window = True,\n closable = False,\n collapsed = True\n )\n\n return dict(map=map)", "def create(feature, bo=None):\n if feature is None:\n features = BOFeatures(bo)\n return(features)\n \n else:\n \n if feature.is_collection:\n return(feature)\n else:\n features = BOFeatures(bo)\n features.add(feature)\n return(features)", "def getFeatures(gdf):\r\n import json\r\n features = [json.loads(gdf.to_json())['features'][0]['geometry']]\r\n return features", "def _ofind(self,oname):\n\n # the @ in magics isn't really part of the name\n oname = oname.strip()\n if oname.startswith('@'):\n oname = oname[1:]\n\n # Namespaces to search in:\n user_ns = self.shell.user_ns\n user_config_ns = self.shell.user_config_ns\n internal_ns = self.shell.internal_ns\n builtin_ns = __builtin__.__dict__\n\n # Put them in a list. The order is important so that we find things in the\n # same order that Python finds them.\n namespaces = [ ('Interactive',user_ns),\n ('User-defined configuration',user_config_ns),\n ('IPython internal',internal_ns),\n ('Python builtin',builtin_ns)\n ]\n\n # initialize results to 'null'\n found = 0; obj = None; ospace = None; ds = None; ismagic = 0\n\n try:\n for nsname,ns in namespaces:\n try:\n obj = ns[oname]\n except KeyError:\n pass\n else:\n found = 1\n ospace = nsname\n ds = inspect.getdoc(obj)\n raise 'found it'\n except 'found it':\n pass\n\n # try to see if it's magic\n if not found:\n try:\n obj = eval('self.magic_'+oname)\n found = 1\n ospace = 'IPython internal'\n ismagic = 1\n ds = inspect.getdoc(obj)\n except:\n pass\n # Play some games to try and find info about dotted objects\n # and for things like {}.get? or ''.remove? to work\n if not found:\n try:\n self.tmp_obj = eval(oname,user_ns)\n found = 1\n except:\n try:\n self.tmp_obj = eval(oname,builtin_ns)\n found = 1\n except:\n pass\n if found:\n ds = inspect.getdoc(self.tmp_obj)\n ospace = 'Currently not defined in user session.'\n obj = self.tmp_obj\n del self.tmp_obj\n return found,obj,ospace,ds,ismagic", "def features(self, features):\n\n self._features = features", "def iter_features(self):\n features = self.features\n if (features is not None):\n yield from features", "def open(self) -> None:\n pass", "def test_read_feature_collection(self):\n fc = self.read_feature()\n assert len(fc.features) == 1\n feature = fc.features[0]\n self.check_feature(feature)", "def orfFinder(data):\n\t\n\tORFile = getORFs(data.seqFile, \n\t\t\t\t\t data.queryName, \n\t\t\t\t\t data.o)\n\tsetattr(data, \"ORFs\", ORFile)", "def open(self) -> None:\n raise NotImplementedError()", "def open_observation(self, mode):\n\n # check if current observation must be closed to open a new one\n if self.observationId:\n response = dialog.MessageDialog(programName,\n \"The current observation will be closed. Do you want to continue?\",\n [YES, NO])\n if response == NO:\n return \"\"\n else:\n self.close_observation()\n\n if mode == \"start\":\n result, selectedObs = self.selectObservations(OPEN)\n if mode == VIEW:\n result, selectedObs = self.selectObservations(VIEW)\n\n if selectedObs:\n return self.load_observation(selectedObs[0], mode)\n else:\n return \"\"", "def __init__(self, feat):\n\n self.feat = feat\n self.shape = shape(feat[\"geometry\"])\n\n self.prop = feat[\"properties\"]\n self.feat[\"properties\"][\"style\"] = {\n \"fill_opacity\": 0.1,\n \"opacity\": 0.1, \n \"color\": \"white\", \n \"weight\": 0.75}\n self.id = self.prop[\"grid_id\"]\n self.level = self.prop[\"grid_level\"]\n \n self.layer = GeoJSON(\n data=self.feat,\n hover_style = {\n \"weight\": 1, \n \"color\": \"white\",\n \"fillColor\": \"white\",\n \"fillOpacity\": 0.3})\n self.layer.on_click(self.toggle)\n self.on = False", "def sgd_features(filepath=None):\n\n if filepath == None:\n filepath=load_sgd_tab()\n\n arabic_to_roman_dict=chromosomename_roman_to_arabic()[0]\n \n with open(filepath) as f:\n lines = f.readlines()\n\n\n feature_list = []\n feature_orf_dict = {}\n feature_ars_dict = {}\n feature_telomere_dict = {}\n feature_ltr_dict = {}\n feature_centromere_dict = {}\n feature_Xelement_dict = {}\n feature_intron_dict = {}\n feature_ncrna_dict = {}\n feature_ncexon_dict = {}\n feature_trna_dict = {}\n feature_snorna_dict = {}\n feature_teg_dict = {}\n feature_5p_utrintron_dict = {}\n feature_mas_dict = {}\n feature_snrna_dict = {}\n feature_rrna_dict = {}\n feature_ets_dict = {}\n feature_its_dict = {}\n feature_oor_dict = {}\n feature_telrna_dict = {}\n \n for line in lines:\n l = line.strip('\\n').split('\\t')\n if not l[1] in feature_list:\n feature_list.append(l[1])\n\n if not l[8].endswith('micron') and not l[8] == '':\n chromosome = arabic_to_roman_dict.get(int(l[8]))\n if l[1] == 'ORF':\n feature_orf_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'ARS':\n feature_ars_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'telomere':\n feature_telomere_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'long_terminal_repeat':\n feature_ltr_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'centromere':\n feature_centromere_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'X_element':\n feature_Xelement_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'intron':\n feature_intron_dict[l[6]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'ncRNA_gene':\n feature_ncrna_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'noncoding_exon':\n feature_ncexon_dict[l[6]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'tRNA_gene':\n feature_trna_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'snoRNA_gene':\n feature_snorna_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'transposable_element_gene':\n feature_teg_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'five_prime_UTR_intron':\n feature_5p_utrintron_dict[l[6]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'matrix_attachment_site':\n feature_mas_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'snRNA_gene':\n feature_snrna_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'rRNA_gene':\n feature_rrna_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'external_transcribed_spacer_region':\n feature_ets_dict[l[6]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'internal_transcribed_spacer_region':\n feature_its_dict[l[6]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'origin_of_replication':\n feature_oor_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n elif l[1] == 'telomerase_RNA_gene':\n feature_telrna_dict[l[3]] = [l[1], l[2], l[4], l[5], l[6], chromosome, l[9],l[10]]\n\n\n \n\n\n genomicregions_list = ['ORF', 'ARS', 'Telomere', 'long_terminal_repeat',\n 'Centromere', 'X_element', 'Intron', 'ncRNA_gene',\n 'Noncoding_exon', 'tRNA_gene', 'snoRNA_gene',\n 'transposable_element_gene', 'five_prime_UTR_intron',\n 'matrix_attachment_site', 'snRNA_gene', 'rRNA_gene',\n 'external_transcribed_spacer_region',\n 'internal_transcribed_spacer_region',\n 'origin_of_replication', 'telomerase_RNA_gene']\n\n\n return(genomicregions_list, feature_orf_dict, feature_ars_dict, feature_telomere_dict,\n feature_ltr_dict, feature_centromere_dict, feature_Xelement_dict, feature_intron_dict,\n feature_ncrna_dict, feature_ncexon_dict, feature_trna_dict,\n feature_snorna_dict, feature_teg_dict, feature_5p_utrintron_dict,\n feature_mas_dict, feature_snrna_dict, feature_rrna_dict,\n feature_ets_dict, feature_its_dict, feature_oor_dict,\n feature_telrna_dict)" ]
[ "0.5661019", "0.55700505", "0.5532482", "0.5513073", "0.5394623", "0.53802025", "0.53731954", "0.53539014", "0.5332702", "0.53029007", "0.53000814", "0.52954465", "0.52883095", "0.5224929", "0.522275", "0.5220456", "0.5214841", "0.5178848", "0.5178848", "0.5178848", "0.51736695", "0.51729804", "0.51715726", "0.51650476", "0.5158309", "0.51329446", "0.5130598", "0.51106197", "0.50905573", "0.5075711", "0.5061657", "0.50588745", "0.5048514", "0.5041661", "0.5031754", "0.5023072", "0.5011325", "0.4991129", "0.49863958", "0.49749327", "0.49609956", "0.49609956", "0.49609956", "0.4959868", "0.49478725", "0.49395138", "0.49206066", "0.4913981", "0.4913981", "0.4912413", "0.49118847", "0.49118847", "0.49118847", "0.49118847", "0.49118847", "0.49118847", "0.49118847", "0.4911248", "0.48997942", "0.48980284", "0.48980284", "0.48892975", "0.48795053", "0.48577806", "0.48553738", "0.48502362", "0.4830345", "0.4829138", "0.48234236", "0.48216057", "0.48139128", "0.48071507", "0.4799815", "0.4799226", "0.47935572", "0.47857383", "0.47816852", "0.47815", "0.4778757", "0.47783226", "0.47766536", "0.47729468", "0.47710782", "0.47650078", "0.47637653", "0.47610947", "0.47445655", "0.4735006", "0.47349757", "0.47343728", "0.4729395", "0.47270724", "0.4720677", "0.47202033", "0.4713823", "0.47065356", "0.47065285", "0.47051376", "0.46919447", "0.4691632" ]
0.7093566
0
Given a pb object, return the deterministic SHA1 hash hexdigest. Used for creating unique IDs.
def generate_uid_from_pbobject(pb_object): json_string = json.dumps( MessageToDict(pb_object, including_default_value_fields=True, preserving_proto_field_name=True), indent=2, sort_keys=True ) out = StringIO() out.write(json_string) uid = hashlib.sha1(out.getvalue().encode('utf-8')).hexdigest() out.close() return uid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def object_sha1(obj):\n\n return hashlib.sha1(json.dumps(obj).encode()).hexdigest()", "def SHA1(self) -> _n_0_t_3[_n_0_t_9]:", "def hexdigest(self):\n return self.hashObject.hexdigest()", "def sha1(self) -> str:\n return self.data.sha1", "def _sha1_hash_json(self, value):\n hash = hashlib.new(\"sha1\")\n binary_value = value.encode(\"ascii\")\n hash.update(binary_value)\n sha1_res = hash.hexdigest()\n return sha1_res", "def _sha1(self):\n return hashlib.sha1(self._blob).hexdigest()", "def hash(self) -> bytes:", "def nice_hash(*args):\n h = sha1()\n for item in args:\n h.update(unicode(item))\n return b32encode(h.digest())", "def hex_sha1_of_bytes(data: bytes) -> Sha1HexDigest:\n return Sha1HexDigest(hashlib.sha1(data).hexdigest())", "def sha1hex(doc):\n doc_id = doc.pop('_id',None)\n doc_rev = doc.get('_rev',None)\n doc_string = str(doc)\n\n if doc_id is not None:\n doc['_id'] = doc_id\n\n if doc_rev is not None:\n doc['_rev'] = doc_rev\n\n return hashlib.sha1(doc_string).hexdigest().upper()", "def hash(obj):\n \n import hashlib\n import pickle\n \n sha = hashlib.sha256()\n sha.update(pickle.dumps(obj))\n \n return sha.hexdigest()", "def hash(self) -> str:\r\n ...", "def hash_data(obj):\n collect = sha1()\n for text in bytes_iter(obj):\n if isinstance(text, six.text_type):\n text = text.encode('utf-8')\n collect.update(text)\n return collect.hexdigest()", "def hash_simple_obj_to_hex(obj):\n\n hash_ = sha256()\n try:\n update_hash(hash_, obj)\n except ValueError as e:\n raise ValueError(\"%s (full object was %r)\" % (e, obj))\n return hash_.hexdigest()", "def hash(self):\n return hashlib.sha1(str(self._dict))", "def __get_hashstr(_config_object: dict):\n hashobj = hashlib.md5()\n json_str = json.dumps(_config_object, sort_keys=True).encode('utf-8')\n hashobj.update(json_str)\n dig = hashobj.hexdigest()\n return dig\n # return hashobj.update(json.dumps(_config_object, sort_keys=True).encode('utf-8')).hexdigest()", "def sha1(self):\n return self.tag(\"sha1\")", "def hex(self) -> str:\n return self.__hash.hexdigest()", "def calc_statistics_hash(self) -> bytes:\n return b\"somehash\"", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s)\n return h.hexdigest()", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s.encode('utf-8'))\n return h.hexdigest()", "def get_checksum(str):\n hash_object = hashlib.sha1(b'%s' % str)\n hex_dig = hash_object.hexdigest()\n return hex_dig", "def get_hash(self) -> str:\n return self.__hash.hexdigest()", "def get_hash(content):\n return hashlib.sha1(content).hexdigest()", "def _hash_function(self, x):\n return hashlib.sha1(x).hexdigest()", "def _Hash(self):\n out = [self.key.string_id()]\n properties = self._PropList()\n for prop in properties:\n out.append(unicode(getattr(self, prop, '')))\n to_hash = ''.join(out)\n return hashlib.md5(to_hash.encode('utf-8')).hexdigest()", "def printable_hash(h):\n return int(h).to_bytes(32, byteorder='big', signed=False).hex()", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s.encode())\n return h.hexdigest()", "def hash_obj(self, obj):\r\n md5er = hashlib.md5()\r\n update_hash(md5er, obj)\r\n return md5er.hexdigest()", "def object_hash(obj):\n try:\n code = obj.__code__.co_code\n except AttributeError:\n attrlist = [getattr(obj, name) for name in dir(obj)\n if not name.startswith('__')]\n codelist = [attr.__code__.co_code for attr in attrlist\n if hasattr(attr, '__code__')]\n code = b','.join(codelist)\n digest = hashlib.md5(code).hexdigest()\n return digest", "def hashhex(s):\n h = hashlib.sha1()\n h.update(s.encode('utf-8'))\n return h.hexdigest()", "def sha1(data):\n\n d = rpki.POW.Digest(rpki.POW.SHA1_DIGEST)\n d.update(data)\n return d.digest()", "def getFingerprint(self):\r\n return b2a_hex(SHA1(self.bytes))", "def GenerateHash(params):\n exp_params = params.ConvertToDict()\n return hashlib.sha1(\n repr(sorted(exp_params.items())).encode('utf-8')).hexdigest()", "def hexdigest(jsonable):\n string = json.dumps(jsonable, sort_keys=True).encode()\n return hashlib.sha1(string).hexdigest()", "def __str__(self: Hash) -> str:\n return self.to_hex()", "def hash_cli_name(name):\n from hashlib import blake2b\n return blake2b(name.encode(), digest_size=32).hexdigest()", "def pickle_and_hash(obj: Any) -> str:\n try:\n s = dill.dumps(obj)\n except:\n raise UnpickleableError()\n\n return hashlib.sha512(s).hexdigest()", "def hash_1(self):\n return self.unpack_qword(0x18)", "def hashable(obj):\n return bytes(str(obj), \"utf-8\")", "def do_hash(dat: typing.Any) -> str:\n return hashlib.sha1(json.dumps(dat, sort_keys=True).encode('utf-8')).hexdigest()", "def _get_hash(self, query):\n return hashlib.sha1(str(query)).hexdigest()", "def hash(password):\n result = hashlib.sha1(password.encode())\n # return a hexadecimal digits\n return result.hexdigest()", "def getHash():\n return str(uuid.uuid4())[-17:].replace(\"-\", \"\")", "def sha1(s: str) -> str:\n return hashlib.sha1(s.encode()).hexdigest()", "def create_hash(self):\n return os.urandom(32).encode('hex')", "def hash(self):\n return Hash.dhash(bytes(self))", "def hexdigest(self):\n # bytes.hex() is simpler, but not available For Python <= 3.4\n return \"\".join(\"{0:0>2x}\".format(b) for b in self.digest())", "def fingerprint_public_key_blob(blob):\n hash = sha256(blob).digest()\n encoded = b64encode(hash).decode('UTF-8').rstrip('=')\n return 'SHA256:{}'.format(encoded)", "def get_hash(self):\n return self.__hash", "def track_to_hash(track):\n return hashlib.sha1(track.encode('utf-8')).hexdigest()", "def hash(self) -> bytes:\n block_string = json.dumps(self.serialize(), sort_keys=True).encode()\n return bytes.fromhex(hashlib.sha256(block_string).hexdigest())", "def get_hash(self):\r\n return", "def get_hash(file_buffer):\n data = file_buffer.read()\n hasher = sha1()\n hasher.update(data)\n return hasher.hexdigest()", "def get_hash(self, params):\n return self.sha", "def sha1(self, s):\n\t\tself.sha1_calls += 1\n\t\treturn int(hashlib.sha1(s).hexdigest(), 16)", "def get_report_hash(self, consolidated):\n jsonstr = json.dumps(consolidated, sort_keys=True)\n hashobj = hashlib.sha1(jsonstr)\n hexval = hashobj.hexdigest()\n return hexval", "def generate_hash(passwd):\n return hashlib.sha512(passwd.encode(\"utf-8\")).hexdigest()", "def calculate_hash(stuff):\n\tsha1 = hashlib.sha1()\n\tsha1.update(stuff)\n\treturn sha1.hexdigest()", "def calculate_hash_id(self):\n return get_md5_hash(f'{self.type}{self.get_primary_id()}')", "def hash(self) -> str:\n return pulumi.get(self, \"hash\")", "def processor_hash(value):\r\n shared_secret = settings.CC_PROCESSOR['CyberSource'].get('SHARED_SECRET', '')\r\n hash_obj = hmac.new(shared_secret.encode('utf-8'), value.encode('utf-8'), sha1)\r\n return binascii.b2a_base64(hash_obj.digest())[:-1] # last character is a '\\n', which we don't want\r", "def create_hash(*args):\n challenge_str = jsonpickle.encode(args)\n challenge_hash = hashlib.sha256(challenge_str.encode())\n return Bn.from_binary(challenge_hash.digest())", "def get_content_sha1(self):", "def hash_generator(self, value):\n hash_string = hashlib.sha256(bytes(value))\n return hash_string.hexdigest()", "def compute_hash(self):\n block_string = json.dumps(self.__dict__, sort_keys=True)\n return sha256(block_string.encode()).hexdigest()", "def compute_hash(self):\n block_string = json.dumps(self.__dict__, sort_keys=True)\n return sha256(block_string.encode()).hexdigest()", "def symlink_hash(path):\n hasher = sha1()\n data = path_to_bytes(os.readlink(path))\n hasher.update(('blob %u\\0' % len(data)).encode('ascii'))\n hasher.update(data)\n return hasher", "def generate_hash(self):\n if not self.public_key:\n raise ValueError('Requires a public publicKey')\n return self.public_key.encode(encoding='bytes')", "def HexDigest(self, name, truncation_length=None):\n\n if truncation_length is None:\n truncation_length = 64\n name_bytes = name.encode('UTF-8')\n return hashlib.sha256(name_bytes).hexdigest()[:truncation_length]", "def hex_form(hash):\n final_hash = ''\n for i in range(len(hash)):\n final_hash += format(hash[i], '02x')\n return final_hash", "def hash(self):\n return hashlib.sha256(self.to_json().encode()).hexdigest()", "def get_binary_sha256_hash(hash: str) -> str:\n result = \"\"\n\n for character in hash:\n character_number = int(character, base=16)\n binary_number = bin(character_number)\n # CAVEAT: each hash character is 4 bit size since SHA256 hash is hexidecimal string, so 4 * 64 = 256 bit\n formatted_binary_number = binary_number[2:].ljust(4, \"0\")\n result += formatted_binary_number\n\n return result", "def _get_hash(self, *args):\n url_hash = hashlib.sha1()\n try:\n for value in args:\n value = unicode(value).encode('utf-8', 'replace')\n url_hash.update(value)\n return url_hash.hexdigest()\n except UnicodeDecodeError:\n return None", "def hash(self):\n return xxhash.xxh64(self._pwm_to_str(3)).hexdigest()", "def GetFileSha1(file_path):\n return base64.b64encode(GetFileHashes(file_path, do_sha1=True)['sha1'])", "def get_raw_hash(cls, order_item):\n obj = copy.deepcopy(order_item)\n obj.order_id = None\n obj.order_version = None\n raw_order_id = yeti_utils_common.generate_id_md5_digit_20_for_object(obj)\n return raw_order_id", "def hash_string(self):\n return self._hash_string", "def _get_pubickey_sha1_hash(cert):\n pkey = cert.get_pubkey()\n pkey_asn1 = dump_publickey(FILETYPE_ASN1, pkey)\n decoded_pkey, _ = der_decoder.decode(\n pkey_asn1, rfc2459.SubjectPublicKeyInfo())\n pubkey = bit_string_to_bytearray(decoded_pkey['subjectPublicKey'])\n # algorithm = decoded_pkey['algorithm'] # RSA encryption\n sha1_hash = hashlib.sha1()\n sha1_hash.update(pubkey)\n return sha1_hash", "def sha1_p(value):\n # check if the value has the expected type\n string_p(value)\n\n # SHA-1 hash has 40 hexadecimal characters\n if not re.fullmatch(r\"^[a-f0-9]{40}$\", value):\n raise Invalid(\"the value '{value}' does not seem to be SHA1 hash\".format(value=value))", "def get_hash(self):\r\n block_data = self.prev_hash\r\n block_data += bytearray(struct.pack(\"f\", self.time))\r\n block_data += self.user_id.encode()\r\n block_data += self.public_key.public_bytes(serialization.Encoding.X962,\r\n serialization.PublicFormat.CompressedPoint)\r\n\r\n digest = hashes.Hash(hashes.SHA256())\r\n digest.update(block_data)\r\n return digest.finalize()", "def hexdigest(self):\r\n return ''.join(['%02x' % ord(c) for c in self.digest()])", "def get_hash(s):\n hash_object = hashlib.md5(s.encode())\n return hash_object.hexdigest()", "def object_sha256(obj):\n\n return hashlib.sha256(json.dumps(obj).encode()).hexdigest()", "def _calc_sha1(path):\n calc = hashlib.sha1()\n with open(path, 'r') as f:\n calc.update(f.read())\n return calc.hexdigest()", "def get_hash(self):\r\n block_data = self.prev_hash\r\n block_data += bytearray(struct.pack(\"!f\", self.time))\r\n block_data += self.user_id.encode()\r\n block_data += self.signature.encode()\r\n block_data += self.choice.encode()\r\n\r\n digest = hashes.Hash(hashes.SHA256())\r\n digest.update(block_data)\r\n return digest.finalize()", "def _electrum_script_hash(script: bytes) -> str:\n bytes = bytearray(scripts.sha256(script))\n bytes.reverse()\n return bytes.hex()", "def get_hash(thing):\n n = hashlib.sha256()\n \n if isinstance(thing,str):\n n.update(thing.encode('utf-8' ))\n elif isinstance(thing, bytes):\n n.update(thing)\n elif isinstance(thing,BeautifulSoup):\n n.update(get_hash(str(thing)))\n else:\n raise RuntimeError(\"unknown type: {}\".format(str(type(thing))))\n \n return(n.digest())", "def _get_signature(value):\n mySha = hashlib.sha256()\n mySha.update(value)\n # print mySha.hexdigest()\n return mySha.hexdigest()", "def getHash(self, hashtype='sha1'):\n if not self.svghash256:\n blob_reader = blobstore.BlobReader(self.svgBlob)\n digest = hashlib.sha256(blob_reader.read()).digest()\n self.svghash256 = \"sha256-%s\" % (base64.b64encode(digest))\n self.put() # write back hash\n if not self.svghash:\n blob_reader = blobstore.BlobReader(self.svgBlob)\n digest = hashlib.sha1(blob_reader.read()).digest()\n self.svghash = \"sha1-%s\" % (base64.b64encode(digest))\n self.put() # write back hash\n if hashtype=='sha1':\n return \"%s\" % (self.svghash)\n elif hashtype == 'sha256':\n return \"%s\" % (self.svghash256)\n elif hashtype == 'both':\n return \"%s %s\" % (self.svghash,self.svghash256)", "def compute_hash(self) -> str:\r\n #block_dict = self.__dict__.pop('hash', None) # Remove hash field value before calculating hash\r\n block_dict = self.__dict__.copy()\r\n block_dict.pop('hash', None) # Remove hash field value before calculating hash\r\n block_string = json.dumps(block_dict, sort_keys=True).encode('utf-8')\r\n return sha256(block_string).hexdigest()", "def instance(data):\n return Fieldsha1(data)", "def hashcode(o):", "def sha1(self):\n if not hasattr(self, \"_sha1\"):\n self._sha1 = self.job_config.get('sha1')\n if not self._sha1:\n self._sha1 = self._get_package_sha1()\n return self._sha1", "def existing_hash(self, id):\r\n return self._read_sha_by_id(id)", "def get_hash(dictionary):\n dhash = hashlib.md5()\n # We need to sort arguments so {'a': 1, 'b': 2} is\n # the same as {'b': 2, 'a': 1}\n encoded = json.dumps(dictionary, sort_keys=True).encode()\n dhash.update(encoded)\n return dhash.hexdigest()", "def hash_value(self, value):\n h = hashlib.sha256()\n h.update(str(value))\n return h.hexdigest()", "def hexdigest(self):\n return \"\".join(\"%02x\" % ord(x)\n for x in MegaCrypto.a32_to_str(self.digest()))", "def _Hash(content: bytes) -> str:\n return hashlib.sha256(content).hexdigest()", "def hash(self):\n return os.popen('git rev-parse HEAD').read().strip()" ]
[ "0.6950721", "0.68938136", "0.6875082", "0.68536943", "0.68525857", "0.68343544", "0.6825216", "0.676524", "0.6735063", "0.67005074", "0.664288", "0.6572037", "0.65659446", "0.6546874", "0.653492", "0.6523896", "0.6502122", "0.64945704", "0.64752346", "0.640935", "0.6403112", "0.63836133", "0.6381282", "0.63753676", "0.6367303", "0.6365533", "0.6356099", "0.6355097", "0.6348392", "0.6340609", "0.63386196", "0.6336102", "0.63237774", "0.6317194", "0.6280785", "0.6268704", "0.62670183", "0.6263943", "0.62507415", "0.6227853", "0.6227655", "0.62102675", "0.61647075", "0.61640024", "0.6155063", "0.6147056", "0.6132909", "0.6118568", "0.6117734", "0.61138165", "0.6111476", "0.6106862", "0.6103756", "0.60990506", "0.60831404", "0.6066803", "0.6052392", "0.60471", "0.60297745", "0.60272056", "0.6016021", "0.6013538", "0.6009583", "0.60050917", "0.5994511", "0.5982451", "0.5982451", "0.5982438", "0.5970635", "0.59683", "0.5955806", "0.59504193", "0.594874", "0.59481776", "0.5947417", "0.594625", "0.5937956", "0.59374166", "0.5935468", "0.5931388", "0.59301513", "0.591527", "0.5912327", "0.59119016", "0.5909716", "0.5902454", "0.58951306", "0.58922046", "0.58912617", "0.58821505", "0.5879392", "0.5874163", "0.58731765", "0.5872234", "0.58694875", "0.58632326", "0.5857659", "0.5849484", "0.5842695", "0.5839905" ]
0.6621342
11
From a list of 'scene.json' and/or 'scene_.json' paths in s3, return a Scene object for the one with the latest timestamp.
def get_latest_scene(s3_scene_jsons): # Fetch all 'scene*.json' files and load Scenes scenes = [open_remote_pb_object(scene_json, Scene) for scene_json in s3_scene_jsons] # Find Scene with latest creation timestamp creation_ts = [_s.creation_date.ToMicroseconds() for _s in scenes] index = creation_ts.index(max(creation_ts)) return scenes[index], s3_scene_jsons[index]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_latest_year_month_day_prefix(s3_path):\n latest = date.min\n keys = get_contents_of_directory(s3_path)\n\n for key in keys:\n search = re.search(r'.*year=(\\d{4}).*month=(\\d{2}).*day=(\\d{2})', key)\n if search:\n year, month, day = search.groups()\n bucket_date = date(int(year), int(month), int(day))\n if bucket_date > latest:\n latest = bucket_date\n\n if latest == date.min:\n return None\n return latest", "def read_s3_file(date):\n \"\"\" history from S3 \"\"\"\n bucket = os.getenv(\"SPOTIFY_BUCKET_NAME\")\n path = os.getenv(\"SPOTIFY_BUCKET_PATH\")\n s3 = boto3.resource('s3')\n try:\n s3.Object(bucket, \"%s/%s.json\" % (path, date)).load()\n except botocore.exceptions.ClientError as e:\n logger.info(\"No existing history file found for %s, %s\" %\n (date, e.response['Error']['Code']))\n if e.response['Error']['Code'] == '404':\n return []\n else:\n logger.warning(\"Unexpected error code returned!\")\n return []\n else:\n logger.info(\"Reading history file for %s\" % date)\n content_object = s3.Object(bucket, \"%s/%s.json\" % (path, date))\n file_content = content_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def latest_archive_zip_revision(doi_id, s3_keys, journal, status):\n s3_key_name = None\n\n name_prefix_to_match = (journal + '-' + utils.pad_msid(doi_id)\n + '-' + status + '-v')\n\n highest = 0\n for key in s3_keys:\n if key[\"name\"].startswith(name_prefix_to_match):\n version_and_date = None\n try:\n parts = key[\"name\"].split(name_prefix_to_match)\n version = parts[1].split('-')[0]\n date_formatted = dateutil.parser.parse(key[\"last_modified\"])\n date_part = date_formatted.strftime(utils.S3_DATE_FORMAT)\n version_and_date = int(version + date_part)\n except:\n pass\n if version_and_date and version_and_date > highest:\n s3_key_name = key[\"name\"]\n highest = version_and_date\n\n return s3_key_name", "def SELECT_LATEST_FILE_JSON(directory=LOCAL_DIRECTORY_OF_SENSOR_DATA):\n latest_time = None\n latest_path = None\n first_loop = True\n for file_name in os.listdir(directory):\n file_path_json = os.path.join(directory, file_name)\n if os.path.isfile(file_path_json):\n current_time = os.stat(file_path_json)\n if not first_loop and int(current_time.st_mtime) > int(latest_time.st_mtime) and \\\n file_path_json[-len('.json'):] == '.json':\n latest_time = os.stat(file_path_json)\n latest_path = file_path_json\n elif first_loop:\n latest_time = os.stat(file_path_json)\n latest_path = file_path_json\n first_loop = False\n return latest_path", "def load_archives_from_s3(self):\n s3_bucket = S3Backend(self.conf).bucket\n try:\n k = Key(s3_bucket)\n k.key = self.backup_key\n\n return json.loads(k.get_contents_as_string())\n except S3ResponseError, exc:\n log.error(exc)\n return {}", "def get_latest_file_name(bucket_name,prefix):\n s3_client = boto3.client('s3')\n objs = s3_client.list_objects_v2(Bucket=bucket_name)['Contents']\n shortlisted_files = dict() \n for obj in objs:\n key = obj['Key']\n timestamp = obj['LastModified']\n # if key starts with folder name retrieve that key\n if key.startswith(prefix): \n # Adding a new key value pair\n shortlisted_files.update( {key : timestamp} ) \n latest_filename = max(shortlisted_files, key=shortlisted_files.get)\n print('Lastest File Name: ' + latest_filename)\n return latest_filename", "def load_s3_njson(bucket, prefix, key_list, honorary_list):\n # Get list of files in bucket and with prefix:\n s3_file_list = list_s3_files(bucket, prefix)\n \n # Load data from all files:\n structured_data = []\n for s3_file in s3_file_list:\n structured_data = structured_data + s3_file_to_dict_list(bucket, s3_file, key_list, honorary_list)\n \n return structured_data", "def get_most_recent_folder_from_s3_bucket():\n s3 = boto3.resource('s3')\n bucket = s3.Bucket(BUCKET_NAME)\n result = bucket.meta.client.list_objects(Bucket=bucket.name, Delimiter='/')\n folders = []\n date_pattern = re.compile(r\"[0-9_]+\")\n for o in result.get('CommonPrefixes'):\n folder_name = o.get('Prefix')\n if re.match(date_pattern, folder_name):\n folders.append(folder_name)\n folders.sort(reverse=True)\n return folders[0]", "def get_archive(katfilenames):\n\timport requests\n\n\tfile_refs = []\n\tfor filename in katfilenames:\n\t\tif filename.startswith('s3'):\n\t\t\tres = requests.post(S3_URL, headers=S3_HEAD, data='{\"s3_ref\":\"%s\",\"ref_key\":\"Nope\"}'%(filename,))\n\t\t\turl = res.json()['url']\n\t\t\tres1 = requests.get(url)\n\t\t\toutfile = filename.split('/')[-1]\n\t\t\topen(outfile, 'wb').write(res1.content)\n\t\t\tfile_refs.append(outfile)\n\t\telse:\n\t\t\tfile_refs.append(filename)\n\treturn file_refs", "def get_old_new(s3, cdc_prefixes: typing.Optional[list] = None, full_load_prefixes: typing.Optional[list] = None,\n old_info: typing.Optional[str] = None):\n if not cdc_prefixes and full_load_prefixes:\n raise ValueError(\"cdc_info and full_load_info cannot both be null. One must be specified\")\n\n if old_info:\n old_bucket, old_prefix = get_bucket_key(old_info)\n s3.download_file(old_bucket, old_prefix, 'old_info.json')\n old_file = open(\"old_info.json\", \"r\")\n old = json.loads(old_file.read())\n old_file.close()\n os.remove('old_info.json')\n new_run_id = old['run_id'] + 1\n else:\n # Assumes that there are no previous runs/no previously processed files\n old = {'cdc_files': {}}\n new_run_id = 0\n\n if cdc_prefixes:\n new_cdc = {}\n # Add any newly added identifiers, update previous prefixes, drop missing ones\n for prefix in cdc_prefixes:\n old_cdc = old['cdc_files']\n old_files = old_cdc.get(prefix, {}).get('files', [])\n since = old_cdc.get(prefix, {}).get('max_ts', \"1970-01-01 00:00:00.000\")\n files, max_ts = find_latest(old_files, s3_list(s3, prefix, ListType.full), since)\n new_cdc[prefix] = {'files': files, 'max_ts': max_ts}\n else:\n new_cdc = {}\n\n if full_load_prefixes:\n new_full = {}\n for prefix in full_load_prefixes:\n files = s3_list(s3, prefix, ListType.full)\n new_full[prefix] = {'files': [x[0] for x in files]}\n else:\n new_full = {}\n\n output = {\n 'cdc_files': new_cdc,\n 'full_load_files': new_full,\n 'run_id': new_run_id\n }\n return output", "def get_last_modified_from_first_matching_file(key_list, framework_slug, prefix):\n path_starts_with = '{}/{}'.format(framework_slug, prefix)\n return next((key for key in key_list if key.get('path').startswith(path_starts_with)), {}).get('last_modified')", "def ingest_latests(last_timestamp, file_list):\n def _iterator(file_name):\n # Is a radar image file\n if re.match(r'cag01est2400\\d{4}-\\d{2}-\\d{2}_\\d{2}:\\d{2}:\\d{2}.png', file_name):\n file_timestamp = datetime.datetime.strptime(\n file_name, 'cag01est2400%Y-%m-%d_%H:%M:%S.png')\n if file_timestamp > last_timestamp:\n return True\n else:\n return False\n else:\n return False\n\n return list(filter(_iterator, file_list))", "def get_gzipped_s3_objects_from_dict(session, event):\n return get_s3_objects_from_dict(\n session, event, default_unzip_s3_object_handler_function\n )", "def download_json_metadata_from_s3(bucket_name, prefix=\"\", num_threads=20):\n\n # simple method for threads to pull from a queue and download JSON files\n def download_object(queue):\n while True:\n obj = queue.get()\n if obj is None:\n break\n obj.Object().download_file(obj.key.replace(prefix, ''))\n queue.task_done()\n\n # create a directory to store downloaded metadata\n cwd = Path.cwd()\n data_dir = cwd / 'data'\n json_dir = data_dir / 'json'\n # try:\n os.makedirs(json_dir, exist_ok=True)\n # except FileExistsError:\n # shutil.rmtree(json_dir)\n # os.makedirs(json_dir)\n os.chdir(json_dir)\n\n # create a queue for objects that need to be downloaded\n # and spawn threads to download them concurrently\n download_queue = Queue(maxsize=0)\n workers = []\n for worker in range(num_threads):\n worker = Thread(target=download_object, args=(download_queue, ))\n worker.setDaemon(True)\n worker.start()\n workers.append(worker)\n\n # loop through the files in the bucket and filter for JSON metadata\n # files for only labeled images; add them to the queue\n s3 = boto3.resource(\"s3\")\n bucket = s3.Bucket(bucket_name)\n for obj in bucket.objects.filter(Prefix=prefix):\n if obj.key.endswith(\"meta.json\"):\n download_queue.put(obj)\n\n # wait for the queue to be empty, then join all threads\n download_queue.join()\n for _ in range(num_threads):\n download_queue.put(None)\n for worker in workers:\n worker.join()\n\n os.chdir(cwd)", "def _get_s3_object(self, s3_path):\n bucket_name, key = S3Util.get_bucket_and_key(s3_path)\n return self.s3_resource.Object(bucket_name, key)", "def get_pickle_from_s3(path):\n return load_pickle_from_s3(*load_bucket_and_path(path))", "def data_pull_s3(self):\n year = self.month_year[0]\n month = self.month_year[1]\n self.s3 = boto3.resource('s3',aws_access_key_id=self.creds_data['key_id'],\n aws_secret_access_key=self.creds_data['key_access'])\n bucket = self.s3.Bucket('himatdata')\n home = os.getcwd()\n file_path = os.path.join(*[home, 'Trmm/', self.output_folder, year + '_' + month])\n print(file_path)\n if not os.path.exists(file_path):\n os.makedirs(file_path)\n for obj in bucket.objects.filter(Delimiter='', Prefix='Trmm/{}{}_{}'.format(self.output_folder, year, month)):\n if obj.key.endswith('.nc4'):\n bucket.download_file(obj.key,os.path.join(os.path.join(home, obj.key)))\n logging.info(\"Done with Year Month: %s\", month_year)", "def get_latest_data(bucket, dir):\n # get all the scraped json files in the directory in the bucket\n files = client.list_objects_v2(Bucket=BUCKET,\n Prefix=DIR)['Contents']\n # read the data from the object\n str_file = client.get_object(\n Bucket=BUCKET, Key=files[-1]['Key'])['Body'].read().decode('UTF-8')\n data = json.loads(str_file)\n return data", "def get_radar_from_aws(site, datetime_t):\n\n # First create the query string for the bucket knowing\n # how NOAA and AWS store the data\n my_pref = datetime_t.strftime('%Y/%m/%d/') + site\n\n # Connect to the bucket\n conn = S3Connection(anon = True)\n bucket = conn.get_bucket('noaa-nexrad-level2')\n\n # Get a list of files\n bucket_list = list(bucket.list(prefix = my_pref))\n\n # we are going to create a list of keys and datetimes to allow easy searching\n keys = []\n datetimes = []\n\n # populate the list\n for i in range(len(bucket_list)):\n this_str = str(bucket_list[i].key)\n if 'gz' in this_str:\n endme = this_str[-22:-4]\n fmt = '%Y%m%d_%H%M%S_V0'\n dt = datetime.strptime(endme, fmt)\n datetimes.append(dt)\n keys.append(bucket_list[i])\n\n if this_str[-3::] == 'V06':\n endme = this_str[-19::]\n fmt = '%Y%m%d_%H%M%S_V06'\n dt = datetime.strptime(endme, fmt)\n datetimes.append(dt)\n keys.append(bucket_list[i])\n\n # find the closest available radar to your datetime\n closest_datetime = _nearestDate(datetimes, datetime_t)\n index = datetimes.index(closest_datetime)\n\n localfile = tempfile.NamedTemporaryFile()\n keys[index].get_contents_to_filename(localfile.name)\n radar = pyart.io.read(localfile.name)\n return radar", "def from_s3(cls, *, bucket_name, prefix, suffix='.mos.xml', allow_incomplete=False):\n mos_file_keys = s3.get_mos_files(\n bucket_name=bucket_name,\n prefix=prefix,\n suffix=suffix,\n )\n logger.info(\"Making MosCollection from %s S3 files\", len(mos_file_keys))\n mos_readers = sorted([\n mr\n for mr in [MosReader.from_s3(bucket_name, key) for key in mos_file_keys]\n if mr is not None\n ])\n return cls(mos_readers, allow_incomplete=allow_incomplete)", "def latest(self, key, **args):\n record = self.storage.latest(key)\n if record is None:\n return self.klass(**args)\n return self.klass.from_json(record)", "def get_amazon_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n amzn_filename = \"AMZN.json\"\n complete_path = os.path.join(prefix, amzn_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def create_data_schema_from_s3_path(s3_path):\n # We should have only directories at the first level of this S3 path:\n fs = s3fs.S3FileSystem()\n components = fs.ls(s3_path)\n \n # Loops through each subdirectory found in the root dir:\n DATASET_COMPONENT_FIELDS_MAP = dict()\n for subsystem in components:\n # The first tag should always be Timestamp\n subsystem_tags = ['timestamp']\n \n # Opens the first file (they have the same structure):\n files = fs.ls(subsystem)\n for file in files:\n if file[-1] != '/':\n break\n\n current_subsystem_df = pd.read_csv(f's3://{file}', nrows=1)\n subsystem_tags = subsystem_tags + current_subsystem_df.columns.tolist()[1:]\n \n DATASET_COMPONENT_FIELDS_MAP.update({subsystem.split('/')[-1]: subsystem_tags})\n\n # Generate the associated JSON schema:\n schema = create_data_schema(DATASET_COMPONENT_FIELDS_MAP)\n \n return schema", "def get_exports(client, bucket, prefix, latest=True):\n keys = client.list_objects_v2(\n Bucket=bucket, Prefix=prefix, Delimiter='/').get('CommonPrefixes', [])\n found = []\n years = []\n for y in keys:\n part = y['Prefix'].rsplit('/', 2)[-2]\n if not part.isdigit():\n continue\n year = int(part)\n years.append(year)\n\n if not years:\n return []\n\n years.sort(reverse=True)\n if latest:\n years = [years[0]]\n\n for y in years:\n keys = client.list_objects_v2(\n Bucket=bucket, Prefix=\"%s/%d/\" % (prefix.strip('/'), y),\n Delimiter='/').get('CommonPrefixes', [])\n months = []\n for m in keys:\n part = m['Prefix'].rsplit('/', 2)[-2]\n if not part.isdigit():\n continue\n month = int(part)\n date_key = (y, month)\n months.append(month)\n months.sort(reverse=True)\n if not months:\n continue\n if latest:\n months = [months[0]]\n for m in months:\n keys = client.list_objects_v2(\n Bucket=bucket, Prefix=\"%s/%d/%s/\" % (\n prefix.strip('/'), y, ('%d' % m).rjust(2, '0')),\n Delimiter='/').get('CommonPrefixes', [])\n for d in keys:\n part = d['Prefix'].rsplit('/', 2)[-2]\n if not part.isdigit():\n continue\n day = int(part)\n date_key = (y, m, day)\n found.append(date_key)\n found.sort(reverse=True)\n if latest:\n found = [found[0]]\n return found", "def collect_s3(self):\n print('Collecting artifacts matching %s from S3 bucket %s' % (self.match, s3_bucket))\n self.s3 = boto3.resource('s3')\n self.s3_bucket = self.s3.Bucket(s3_bucket)\n self.s3_client = boto3.client('s3')\n for item in self.s3_client.list_objects(Bucket=s3_bucket, Prefix='librdkafka/').get('Contents'):\n self.collect_single(item.get('Key'))\n\n for a in self.artifacts:\n a.download()", "def from_s3(cls, bucket_name, mos_file_key):\n xml = s3.get_file_contents(bucket_name, mos_file_key)\n return cls.from_string(xml)", "def get_s3_objects_from_dict(session, event, object_handler_function):\n\n objects = []\n s3 = session.client(\"s3\")\n # Get the object from the event and show its content type\n for record in event.get(\"Records\", []):\n bucket = record[\"s3\"][\"bucket\"][\"name\"]\n unprocessed_key = record[\"s3\"][\"object\"][\"key\"]\n # urllib changes structure and encoding is different\n # between python 2 and 3\n key = (\n urllib.parse.unquote_plus(unprocessed_key)\n # if sys.version_info[0] >= 3\n # else urllib.unquote_plus(unprocessed_key.encode(\"utf-8\"))\n )\n logging.info(\"Bucket: %s. Key: %s\", bucket, key)\n\n # get S3 object and add it to return list\n response = s3.get_object(Bucket=bucket, Key=key)\n objects.append(object_handler_function(response))\n return objects", "def objs_with_prefix(bucket, log_type, query_time):\n prefix = get_prefix(log_type, query_time)\n # S3 guarantees to return objects in ascending key order based on the UTF-8\n # binary representation of the key. Unfortunately the server-side filtering\n # is quite limited; we can't specify the sort order or the sort key.\n objs = list(bucket.objects.filter(Prefix=prefix))\n logging.info('Found %s files with prefix %s',\n 'no' if not objs else len(objs), prefix)\n return objs", "def lastThree(catalog):\n return model.lastThree(catalog)", "def get_matching_s3_keys(bucket, prefix=\"\", suffix=\"\"):\n for obj in get_matching_s3_objects(bucket, prefix, suffix):\n yield obj[\"Key\"]\n\n def download_froms3(myfile, env='prod'):\n # session = boto3.Session(profile_name=PROFILE)\n boto_s3_session = boto3.Session(profile_name=env)\n s3 = boto_s3_session.resource('s3')\n s3client = boto_s3_session.client('s3', region_name='eu-west-2')\n try:\n file_name = unquote(myfile.split('/')[-1])\n oparse = urlparse(myfile, allow_fragments=False)\n print(oparse)\n S3_SRC_BUCKET_NAME = oparse.netloc\n key = oparse.path[1:]\n download_path = '{0}{1}'.format(BASE_PATH, file_name)\n print(f'Downloading from {S3_SRC_BUCKET_NAME} , {key} to {download_path} ')\n # s3.Bucket(S3_SRC_BUCKET_NAME).download_file(key, download_path)\n # s3.Bucket(S3_SRC_BUCKET_NAME).download_file(file_name, download_path)\n s3client.download_file(S3_SRC_BUCKET_NAME, key, download_path)\n print('File Downloaded')\n except botocore.exceptions.ClientError as err:\n if err.response['Error']['Code'] == \"404\":\n print(\"The object does not exist.\", err)\n else:\n # raise\n error = str(err)\n print(error)\n\n return myfile", "def download_privacy_score(bucket_name, bucket_prefix):\n s3_client = boto3.client('s3')\n s3_client.get_paginator('list_objects_v2')\n input_matcher = re.compile(f'^{bucket_prefix}2[0-9][0-9][0-9]-[0-9][0-9][.]json$')\n\n def iterate_bucket(s3_client, bucket_name, bucket_prefix, input_matcher):\n pageinator = s3_client.get_paginator('list_objects_v2')\n\n for page in pageinator.paginate(Bucket=bucket_name, Prefix=bucket_prefix):\n if page['KeyCount'] == 0:\n continue\n\n for item in page['Contents']:\n if input_matcher.match(item['Key']):\n yield item['Key']\n\n latest_key = max(iterate_bucket(s3_client, bucket_name, bucket_prefix, input_matcher))\n print(f'Downloading latest_key file s3://{bucket_name}/{latest_key} ...')\n return json.loads(s3_client.get_object(Bucket=bucket_name, Key=latest_key)['Body'].read())", "def aws_s3_ls(s3_uri: str, list_extended=False)->list:\n client = boto3.client(\"s3\")\n bucket, prefix = _extract_bucket_key(s3_uri)\n s3_objects = []\n cont_token = None\n while (True):\n if cont_token is None:\n kwargs = {\n \"Bucket\": bucket,\n \"MaxKeys\": 100,\n \"Prefix\": prefix\n }\n else:\n kwargs = {\n \"Bucket\": bucket,\n \"MaxKeys\": 100,\n \"Prefix\": prefix,\n \"ContinuationToken\": cont_token\n } \n try:\n response = client.list_objects_v2(**kwargs)\n if response[\"KeyCount\"] == 0:\n print (\"Requested s3 object doesn't exist.\")\n break\n for record in response[\"Contents\"]:\n if record[\"Size\"] > 0: # ignore just prefix names\n if list_extended:\n s3_objects.append((record[\"Size\"], \n record[\"LastModified\"].strftime(\"%Y%m%d %H:%M:%S.%s\"), \n record[\"Key\"]))\n else:\n s3_objects.append(record[\"Key\"])\n if response[\"IsTruncated\"]:\n cont_token = response[\"NextContinuationToken\"]\n else:\n break\n except Exception as exc:\n raise Error(\"Error {} occurred while listing objects.\".format(exc))\n return s3_objects", "def get_path_to_scene(self, coordinate, date, filename):\n try:\n # Filter list of root directories based on coordinate and date\n buffer = self.filter_on_regexp(self.root_directories, str(coordinate))\n buffer = self.filter_on_regexp(buffer, date.replace('-', ''))\n\n # Join to absolute scene directory path\n scene_directory = next(buffer)\n scene_directory_path = os.path.join(self.root, scene_directory)\n\n # Filter scenes based on filename substring\n scene_files = os.listdir(scene_directory_path)\n buffer = self.filter_on_regexp(scene_files, filename)\n\n # Join to absolute path to scene and return\n filename = next(buffer)\n path_to_scene = os.path.join(scene_directory_path, filename)\n return path_to_scene\n except StopIteration:\n raise FileNotFoundError(f\"No Landsat file corresponding to specified arguments\")", "def lice_main(base_folder, s3_client):\n\n sql_credentials = json.load(open(os.environ[\"SQL_CREDENTIALS\"]))\n sql_engine = create_engine(\n \"postgresql://{}:{}@{}:{}/{}\".format(sql_credentials[\"user\"], sql_credentials[\"password\"],\n sql_credentials[\"host\"], sql_credentials[\"port\"],\n sql_credentials[\"database\"]))\n\n metadata = MetaData()\n # step 1 - download crops + json\n # get the two tables we care about\n fish_crops = Table('lati_fish_detections', metadata, autoload=True, autoload_with=sql_engine)\n lice_crops = Table('lati_fish_detections_lice_annotations_reconciled', metadata, autoload=True,\n autoload_with=sql_engine)\n\n # inner join on fish crop id\n # TODO @Thomas debug this\n query = select([fish_crops.c.image_key, lice_crops.c.lice_bbox_list]) \\\n .select_from(lice_crops.join(fish_crops, lice_crops.c.lati_fish_detections_id == fish_crops.c.id)) \\\n .where(and_(fish_crops.c.site_id == 23,\n lice_crops.c.lice_bbox_list != None,\n # func.json_array_length(lice_crops.c.lice_bbox_list) > 0,\n lice_crops.c.created_by == \"gunnar@aquabyte.ai\"))\n\n json_files = []\n counter = 0\n with sql_engine.connect() as conn:\n for row in conn.execute(query):\n\t if len(row) == 0:\n\t \tcontinue\n # [image_key, lice_json]\n results = {}\n key = row[0]\n _, farm, penid, date, image_name = key.split('/')\n results[\"key\"] = key\n results[\"farm\"] = farm\n results[\"penid\"] = penid\n results[\"date\"] = date\n results[\"image_name\"] = image_name\n results[\"detections\"] = row[1]\n results[\"processed\"] = False\n destination = os.path.join(base_folder, \"crops\", farm, date, penid)\n\n results[\"image_path\"] = os.path.join(destination, image_name)\n if not os.path.isdir(destination):\n os.makedirs(destination)\n with open(os.path.join(destination, image_name.replace(\"jpg\", \"json\")), \"w\") as f:\n json.dump(results, f)\n if not os.path.isfile(os.path.join(destination, image_name)):\n s3_client.download_file(\"aquabyte-crops\", key, os.path.join(destination, image_name))\n counter += 1\n json_files.append(os.path.join(destination, image_name.replace(\"jpg\", \"json\")))\n print(\"{} new files have downloaded\".format(counter))\n\n # step 2 - create training and validation sets\n for jf in json_files:\n with open(jf, \"r\") as f:\n annotations = json.load(f)\n if annotations[\"processed\"]:\n continue\n image = io.imread(annotations[\"image_path\"])\n farm = annotations[\"farm\"]\n date = annotations[\"date\"]\n penid = annotations[\"penid\"]\n image_name = annotations[\"image_name\"]\n for (i, annotation) in enumerate(annotations['detections']):\n category = annotation['category']\n position = annotation['position']\n x1, height, y1, width = position[\"left\"], position[\"height\"], position[\"top\"], position[\"width\"]\n destination = os.path.join(base_folder, \"lice_only\", farm, date, penid, category)\n if not os.path.isdir(destination):\n os.makedirs(destination)\n lice_name = image_name + \".lice_{}.jpg\".format(i)\n io.imsave(os.path.join(destination, lice_name), image[y1:y1+height, x1:x1+width, :])\n # tag as processed\n annotations[\"processed\"] = True\n with open(jf, \"w\") as f:\n json.dump(annotations, f)", "def make_scene_folders_0001(jsonFile, rootDir):\n sceneShotList = [[1, ['A', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AJ', 'AK',\n 'C', 'D', 'E', 'E_v2', 'G', 'H', 'N']],\n [8, ['D', 'DA', 'DB', 'DC', 'D_v2']],\n [9, ['A', 'A_v2', 'B', 'B_v2']],\n [11, ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'G_v2',\n 'K', 'K_v2', 'K_v3', 'K_v4', 'K_v5', 'K_v6',\n 'M', 'M_v2', 'M_v3', 'M_v4', 'M_v5', 'M_v6', 'M_v7',\n 'N', 'N_v2', 'N_v3', 'N_v4', 'N_v5', 'R', 'R_v2', 'R_v3', 'R_v4', 'R_v5']],\n [12, ['A', 'A_v2', 'B', 'C', 'C_v2', 'C_v3', 'C_v4', 'D',\n 'E', 'E_v2']],\n [13, ['A', 'C', 'D', 'E', 'F']],\n [14, ['A', 'B', 'B_v2', 'B_v3', 'B_v4', 'B_v5', 'B_v6',\n 'C', 'C_v2', 'C_v3', 'C_v4', 'D', 'F', 'FF', 'H',\n 'H_v2', 'J', 'JJ', 'JJ_v2', 'K', 'L', 'M', 'M_v2',\n 'M_v3', 'M_v4', 'M_v5', 'N', 'P', 'P_v2', 'P_v3',\n 'R', 'R_v2', 'T', 'T_v2']],\n [15, ['A', 'J', 'J_v2', 'K', 'K_v2', 'L']]]\n\n for i, l in enumerate(sceneShotList):\n shotFolder = \"s\" + str(l[0]).zfill(3) # gives padding of 4\n shotFolderFinal = shotFolder\n if len(l) == 1 or l[1] == []:\n shotFolderFinal = os.path.join(rootDir, shotFolder)\n make_tree_from_dict(jsonFile, shotFolderFinal)\n else:\n for shot in l[1]:\n shotFolderFinal = shotFolder + shot\n shotFolderFinal = os.path.join(rootDir, shotFolderFinal)\n make_tree_from_dict(jsonFile, shotFolderFinal)", "def upload(jsonfiles):\n # clear S3 Bucket\n bucket = S3Bucket()\n bucket.clear()\n for jsonfile in jsonfiles:\n filename = os.path.basename(jsonfile)\n key = build_key(filename)\n logging.info(\"%s %s\", filename, key)\n # store json in S3 object\n bucket.store(key, jsonfile)", "def get_object_with_timestamp(self, key):\n response = self.client.get_object(Bucket=self.bucket, Key=key)\n return response['Body'].read().decode('latin-1').encode('ascii', 'ignore').decode('utf-8'), response[\n 'LastModified']", "def get_latest_items(parser, token):\n bits = token.split_contents()\n\n if len(bits) != 4:\n raise TemplateSyntaxError, \"get_latest_item tag takes exactly three arguments\"\n if bits[2] != 'as':\n raise TemplateSyntaxError, \"second argument to get_latest_item tag must be 'as'\"\n return LatestItemNode(bits[1], bits[3])", "def delete_scene_objects(scene=None):\n #\n # Sort out the scene object.\n if scene is None:\n # Not specified: it's the current scene.\n scene = bpy.context.scene\n else:\n if isinstance(scene, str):\n # Specified by name: get the scene object.\n scene = bpy.data.scenes[scene]\n # Otherwise, assume it's a scene object already.\n #\n # Remove objects.\n for object_ in scene.objects:\n bpy.data.objects.remove(object_, do_unlink=True)\n #", "def load_pickle_from_s3(bucket, path):\n pkl = get_from_s3(bucket, path)\n try:\n return pickle.loads(pkl, encoding='utf-8') # python3\n except TypeError:\n return pickle.loads(pkl) # python2", "def get_apple_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n aapl_filename = \"AAPL.json\"\n complete_path = os.path.join(prefix, aapl_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def get_google_adj_cls_from_s3(s3_resource, bucket_name, prefix='') -> dict:\n googl_filename = \"GOOGL.json\"\n complete_path = os.path.join(prefix, googl_filename)\n json_object = s3_resource.Object(bucket_name, complete_path)\n file_content = json_object.get()['Body'].read().decode('utf-8')\n json_content = json.loads(file_content)\n return json_content", "def main(transcribe_bucket_name, mp3_bucket_name):\n\n s3 = boto3.resource('s3')\n for bucket in s3.buckets.all():\n if bucket.name == transcribe_bucket_name:\n for key in bucket.objects.all():\n if key.key.endswith('.json'):\n r = {}\n # Get reference number\n reference = basename(key.key).replace('.json', '')\n r['ref'] = reference\n # Get URL\n location = boto3.client('s3') \\\n .get_bucket_location(\n Bucket=mp3_bucket_name)['LocationConstraint']\n base_url = join('https://s3-%s.amazonaws.com' % location,\n mp3_bucket_name)\n url = join(base_url, key.key.replace('.json', '.mp3'))\n r['url'] = url\n # Download json file\n try:\n s3.Bucket(transcribe_bucket_name) \\\n .download_file(key.key, key.key)\n except Exception as exception:\n return 1\n # Get text\n with open(key.key, 'r') as f:\n data = json.load(f)\n text = data['results']['transcripts'][0]['transcript']\n r['text'] = text\n # Get sentiment\n sentiment = get_sentiment(text)\n r['sentiment'] = sentiment\n # Check promotion\n promo = check_promo(text)\n r['promo'] = promo\n # Save to Gooogle Sheets\n values = [r['ref'], r['text'], r['promo'], r['sentiment'],\n r['url']]\n append_row(values)\n # Remove tmp json file from local machine\n remove(key.key)", "def latest(self):\n\n for i in json_parsed:\n number = i['number']\n available_bike_stands=i['available_bike_stands']\n last_update=datetime.datetime.fromtimestamp(i['last_update']/1000, pytz.timezone('Europe/Dublin'))\n available_bikes=i['available_bikes']\n insert_latest(number, available_bike_stands, last_update, available_bikes)", "def get_metadata(scene_urls):\n\n meta_list = []\n for url in scene_urls:\n with urlopen(url) as f:\n m = f.readlines()\n f.close()\n \n meta = parse_metadata(m)\n meta_list += [meta['L1_METADATA_FILE']]\n\n return meta_list", "def load_from_s3(self, bucket, prefix=None):\r\n n = 0\r\n if prefix:\r\n prefix = '%s/' % prefix\r\n else:\r\n prefix = '%s/' % self.id[1:]\r\n rs = bucket.list(prefix=prefix)\r\n for key in rs:\r\n n += 1\r\n m = self.new_message(key.get_contents_as_string())\r\n self.write(m)\r\n return n", "def parse_s3_uri(URIs):\n buckets, keys = [], []\n for URI in URIs:\n uri_path = path.normpath(URI).split(\"/\")\n buckets.append(uri_path[1])\n keys.append(uri_path[2:])\n\n return buckets, keys", "def get_files_from_s3_lambda_event(event):\n\tfiles_found = {}\n\n\tif \"Records\" not in event:\n\t\traise ValueError(\"Records key not in event\")\n\n\tcount = 0\n\tfor record in event[\"Records\"]:\n\t\tcount = count + 1\n\t\tkey = record[\"s3\"][\"object\"][\"key\"]\n\t\tbucket_arn = record[\"s3\"][\"bucket\"][\"arn\"]\n\t\tbucket_name = get_bucket_name_from_arn(bucket_arn)\n\t\tfile_url = get_bucket_file_url(bucket_name, key)\n\t\tfiles_found[file_url] = {\"bucket\" : bucket_name, \"key\" : key}\n\treturn files_found", "def get_matching_s3_objects(bucket, prefix=\"\", suffix=\"\"):\n s3session = boto3.Session(profile_name='prod')\n s3 = s3session.client(\"s3\", \"eu-west-2\")\n #s3 = boto3.client(\"s3\")\n paginator = s3.get_paginator(\"list_objects_v2\")\n\n kwargs = {'Bucket': bucket}\n\n # We can pass the prefix directly to the S3 API. If the user has passed\n # a tuple or list of prefixes, we go through them one by one.\n if isinstance(prefix, str):\n prefixes = (prefix, )\n else:\n prefixes = prefix\n\n for key_prefix in prefixes:\n kwargs[\"Prefix\"] = key_prefix\n\n for page in paginator.paginate(**kwargs):\n try:\n contents = page[\"Contents\"]\n except KeyError:\n break\n\n for obj in contents:\n key = obj[\"Key\"]\n if key.endswith(suffix):\n yield obj", "def _parse_latest_update(self, resp: Dict[str, Any], latest_version: str) -> str:\n latest_release = resp.get(\"releases\", {}).get(latest_version)\n if latest_release is not None and isinstance(latest_release, list):\n release_artifact_dates = []\n for artifact in latest_release:\n try:\n upload_time = artifact.get(\"upload_time_iso_8601\")\n parsed_upload_time = dateutil.parser.isoparse(upload_time)\n release_artifact_dates.append(parsed_upload_time)\n except Exception:\n pass\n latest_artifact_timestamp = max(release_artifact_dates)\n return latest_artifact_timestamp.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n return \"\"", "def list_sorted_files(uuid, basepath=None):\n if basepath is None:\n basepath = get_basepath()\n if 's3://' in basepath:\n return s3wrangler.list_objects(basepath + 'ephys/' + uuid + '/derived/kilosort2/')\n else:\n # return glob.glob(os.path.join(basepath, f'ephys/{uuid}/derived/kilosort2/*'))\n return glob.glob(basepath + f'ephys/{uuid}/derived/kilosort2/*')", "def get_latest_benchmark():\n\n benchmark_paths = glob.glob(\"./.benchmarks/*/*.json\")\n dates = [\n \"\".join(_b.split(\"/\")[-1].split(\"_\")[2:4]) for _b in benchmark_paths\n ]\n benchmarks = {date: value for date, value in zip(dates, benchmark_paths)}\n\n dates.sort()\n latest = dates[-1]\n benchmark_latest = benchmarks[latest]\n\n return benchmark_latest", "def get_file_list(\n self,\n file_regex = r'.*'):\n s3Contents = []\n #Use list_objects_v2 via kwargs since there could be\n #more than 1000 objects (single return limit)\n kwargs = {'Bucket': self.bucket, 'Prefix':self.key}\n while True:\n try:\n resp = self.s3.list_objects_v2(**kwargs)\n except:\n resp = None\n self.logger.error('Unable to reach s3 bucket')\n sys.exit(1)\n if resp.get(\"Contents\"):\n try:\n f_regex = re.compile(file_regex)\n #python 3.8+ required for walrus operator\n s3Contents += [f['Key'] for f in resp['Contents'] if (match := re.search(f_regex, f['Key']))]\n except Exception as e:\n self.logger.exception(e)\n self.logger.error('failed to filter s3 folder. Bucket: %s and location: %s',\n self.bucket,\n self.key)\n sys.exit(1)\n try:\n kwargs['ContinuationToken'] = resp['NextContinuationToken']\n except KeyError:\n break\n if not s3Contents:\n self.logger.warning(\n 'No files were returned from s3 bucket: %s and location: %s filtering by %s',\n self.bucket,\n self.key,\n file_regex)\n return s3Contents", "def get_matching_s3_objects(client, bucket, prefix=\"\", suffix=\"\"):\n\n kwargs = {\"Bucket\": bucket}\n if isinstance(prefix, str):\n kwargs[\"Prefix\"] = prefix\n # logging.info(\"kwargs: %s\" % kwargs)\n while True:\n resp = client.list_objects_v2(**kwargs)\n try:\n contents = resp[\"Contents\"]\n except KeyError:\n return\n for obj in contents:\n key = obj[\"Key\"]\n if key.startswith(prefix) and key.endswith(suffix):\n yield obj\n try:\n kwargs[\"ContinuationToken\"] = resp[\"NextContinuationToken\"]\n except KeyError:\n break", "def get_latest_timestamp_file_path(files: List[str]) -> str:\n\n logger = prefect.context.get(\"logger\")\n\n extract_fname = (\n lambda f: os.path.basename(f).replace(\".csv\", \"\").replace(\".parquet\", \"\")\n )\n file_names = [extract_fname(file) for file in files]\n latest_file_name = max(file_names, key=lambda d: datetime.fromisoformat(d))\n latest_file = files[file_names.index(latest_file_name)]\n\n logger.debug(f\"Latest file: {latest_file}\")\n\n return latest_file", "def _get_state_file_from_s3(\n self,\n state_file_url: str,\n profile: str = None,\n region: str = None\n ) -> Dict[str, Any]:\n if profile:\n session = boto3.session.Session(profile_name=profile, region_name=region)\n else:\n session = get_boto3_session()\n s3 = session.resource('s3')\n parts = state_file_url[5:].split('/')\n bucket = parts[0]\n filename = \"/\".join(parts[1:])\n key = s3.Object(bucket, filename)\n try:\n state_file = key.get()[\"Body\"].read().decode('utf-8')\n except botocore.exceptions.ClientError as ex:\n if ex.response['Error']['Code'] == 'NoSuchKey':\n raise NoSuchTerraformStateFile(\"Could not find Terraform state file {}\".format(state_file_url))\n raise ex\n return json.loads(state_file)", "def get_snapshots(dataset=''):\n # filter my tags\n return os.listdir(dataset + ZFS_DEFAULT_SNAPSHOT_DIR)", "def convert_to_json(basepath, sendto):\n\n logger = logging.getLogger('WikiLog')\n\n k = bucket.new_key(basepath)\n\n filenames = []\n year = month = day = hrs = ''\n\n for key in bucket.list():\n thisfile = key.name.encode('utf-8')\n if 'projectviews' not in thisfile and 'sql' not in thisfile and '.gz' in thisfile and thisfile.startswith(basepath):\n # S3 key name is of the format kt-wiki/pageviews/2016/2016-06/pageviews-20160601-000000.gz\n # Split by / to get last element\n filenames.append(thisfile)\n logger.info(\"Processing file: {}\".format(thisfile))\n fname = thisfile.split('/')\n\n # Get content from filename and save to local\n # Split again to Grab year, month, day, hour value from filename\n key.get_contents_to_filename('/home/ubuntu/WikiView/data/' + fname[-1])\n fname1 = fname[-1]\n data_time = fname1[:-3].split('-')\n year, month, day, hrs = data_time[1][:4], data_time[1][4:6], data_time[1][-2:], data_time[-1]\n\n docname = 'pageviews-' + year + '-' + month + '-' + day + '-' + hrs + '.json'\n dictlist = []\n\n # save file from s3 to local, read, write to json, push json to s3\n with open(docname, 'w') as fp:\n #\n with gzip.open('/home/ubuntu/WikiView/data/'+fname[-1],'r') as fin:\n for line in fin:\n line = line.split(' ')\n doc = {}\n doc['ymdh'] = year + '-' + month + '-' + day + '-' + hrs\n try:\n # format: project, title, views, bytes ~ en Main_Page 242332 4737756101\n prj, title, vcount = line[0], line[1], line[2]\n doc['prj'] = prj\n doc['title'] = title\n doc['vcount'] = vcount\n json.dump(doc,fp)\n fp.write('\\n')\n except:\n logger.error('Error reading gzip file {} at line: {}'.format(thisfile, line))\n pass\n# sys.exc_clear()\n\n # Now, save the json file to \n key_name = 'pageviews-' + year + '-' + month + '-' + day + '-' + hrs + '.json'\n full_key_name = os.path.join(sendto, key_name)\n k = bucket.new_key(full_key_name)\n\n logger.info(\"Sending json file to S3: {}\".format(docname))\n k.set_contents_from_filename(key_name)\n\n # Remove temp file\n logger.info(\"Removing temp file: {} {}\".format('/home/ubuntu/WikiView/data/', fname[-1]))\n os.remove('/home/ubuntu/WikiView/data/'+fname[-1])\n logger.info(\"Removing temp file: {}\".format(key_name))\n os.remove(key_name)\n logger.info('Finished!!!')", "def read_data_from_s3(self, name, loc):\n try:\n filename = loc + name + \".json\"\n logger.info('Retrieving the data from the S3 file %s' % filename)\n return self._retrieve_dict(filename)\n except Exception as e:\n logger.error(e)\n return False", "def load_from_s3(filename):\n s3 = boto3.client('s3')\n obj = s3.get_object(Bucket=BUCKET_NAME, Key=S3_PATH.format(filename))\n return obj['Body'].read().decode()", "def list_objects(self, s3_prefix_path):\n bucket_name, prefix = S3Util.get_bucket_and_key(s3_prefix_path)\n bucket = self.s3_resource.Bucket(bucket_name)\n return [\"s3://\" + bucket_name + \"/\" + key.key for key in bucket.objects.filter(Prefix=prefix)]", "def ls(_):\n client = utils.s3_client()\n\n projects = []\n\n config = utils.get_config()\n bucket = config[\"release\"][\"s3_bucket\"]\n deploys = config[\"deploy\"]\n\n resp = client.list_objects_v2(Bucket=bucket)\n for data in resp.get(\"Contents\", []):\n name = data[\"Key\"]\n\n projects.append(name)\n\n projects = sorted(projects)\n\n _projects = []\n\n for name in projects:\n try:\n release = get_release(client, bucket, name)\n except InvalidRelease:\n continue\n\n data = {\n \"Name\": name,\n \"Latest Release\": f\"v{release.version} {release.timestamp} ({release.commit})\",\n }\n\n for env_name, cfg in deploys.items():\n env_version, env_commit, env_timestamp = get_deployed_version(\n client, cfg[\"s3_bucket\"], name\n )\n\n data[env_name.title()] = f\"v{env_version} {env_timestamp} ({env_commit})\"\n\n _projects.append(data)\n\n projects = _projects\n\n utils.printfmt(projects)", "def ListFiles(s3):\n response = s3.list_objects(Bucket='mynewbucket123')\n for content in response.get('Contents', []):\n yield content.get('Key')", "def get_latest_match_from_list(self, steam_ids):\n latest_match = {}\n\n for steam_id in steam_ids:\n match = self.get_latest_match(steam_id)\n if match is None:\n return None\n if not match == {} and (latest_match == {} or latest_match['match_seq_num'] < match['match_seq_num']):\n latest_match = match\n\n return latest_match", "def _get_tasks_from_s3(self):\n files = self._get_s3_keys([], '')\n tasks = {}\n for i, x in enumerate(tqdm(files)):\n n = x['Key'].split('/')[-1]\n if self.task_filters and not [x for x in self.task_filters if x in n]:\n continue\n n = n.split('_')\n tasks[i] = {\n 'task_name': '_'.join(n[:-1]),\n 'task_params': pickle.loads(self.resource.Object(self.bucket_name, x['Key'].replace('task_log', 'task_params')).get()['Body'].read()),\n 'task_log': pickle.loads(self.resource.Object(self.bucket_name, x['Key']).get()['Body'].read()),\n 'last_modified': x['LastModified'],\n 'task_hash': n[-1].split('.')[0]\n }\n return tasks", "def upload_json_to_s3(directory):\n for f in directory.iterdir():\n if str(f).endswith('.json'):\n full_file_path = str(f.parent) + \"/\" + str(f.name)\n file_name = str(f.name)\n s3_client.upload_file(full_file_path, BASE_BUCKET, file_name)", "def download_most_recent_point(smap_root, stream):\n newest_ts = None\n newest_val = None\n newest_uuid = None\n \n uuids_to_metadata = get_stream_UUIDs_and_metadata(smap_root, stream)\n \n url = smap_root + \"/backend/api/query?\"\n query = \"select data before now limit 1 streamlimit 10 where uuid = '{uuid}'\"\n \n for uuid, metadata in uuids_to_metadata.iteritems():\n timezone = metadata.get(\"Properties\", {}).get(\"Timezone\", None)\n response = requests.post(url, data = query.format(uuid = uuid), verify = False)\n response = response.json()\n readings = response[0][\"Readings\"]\n if not readings:\n logger.debug(\"Did not find any readings\")\n continue\n ts = readings[0][0]\n val = readings[0][1]\n #divide by 1000 because smap time is in ms and datetime will error because it assumes seconds\n ts /= 1000.0\n ts = datetime.fromtimestamp(ts)\n tz = pytz.timezone(timezone)\n ts = tz.localize(ts)\n \n logger.debug(\"ts=\\t{ts}\\nval=\\t{val}\".format(ts=ts, val=val)) \n if val is None:\n val = float(\"NaN\")\n \n if not newest_ts or newest_ts < ts:\n newest_ts = ts\n newest_val = val\n newest_uuid = uuid \n \n logger.debug(\"Latest info:\\tuuid:\\t{uuid}\\tts:\\t{ts}\\tval:\\t{val}\".format(uuid = newest_uuid, ts = newest_ts, val = newest_val))\n return newest_uuid, newest_ts, newest_val", "def get_or_create_sjson(item):\r\n user_filename = item.transcripts[item.transcript_language]\r\n user_subs_id = os.path.splitext(user_filename)[0]\r\n source_subs_id, result_subs_dict = user_subs_id, {1.0: user_subs_id}\r\n try:\r\n sjson_transcript = Transcript.asset(item.location, source_subs_id, item.transcript_language).data\r\n except (NotFoundError): # generating sjson from srt\r\n generate_sjson_for_all_speeds(item, user_filename, result_subs_dict, item.transcript_language)\r\n sjson_transcript = Transcript.asset(item.location, source_subs_id, item.transcript_language).data\r\n return sjson_transcript", "def list_objects(path='',\n bucket=None,\n matches=None,\n include_prefix=False, recursive=False):\n bucket = bucket or s3_path_utils.get_default_bucket()\n s3 = boto3.client('s3')\n\n keys = []\n continuation_token = None\n continue_listing = True\n while continue_listing:\n list_kwargs = {}\n if continuation_token:\n list_kwargs['ContinuationToken'] = continuation_token\n response = s3.list_objects_v2(Bucket=bucket, Prefix=path,\n **list_kwargs)\n if 'Contents' in response:\n keys.extend([obj['Key'] for obj in response['Contents']])\n\n continue_listing = response['IsTruncated']\n if continue_listing:\n continuation_token = response['NextContinuationToken']\n\n if matches:\n if matches.startswith(path):\n matches = matches[len(path):]\n keys = [key for key in keys\n if re.match(re.escape(path) + matches, key)]\n\n if not recursive:\n keys = list(\n {re.match(re.escape(path) + r'[^/]*/?', key).group()\n for key in keys}\n )\n if '/' in path and not include_prefix:\n keys = [key[path.rfind('/') + 1:] for key in keys]\n\n return sorted(keys)", "def parse_json(cls, dataset_slug: str, team_slug: str, payload: Dict[str, Any]) -> \"Release\":\n try:\n export_date: datetime.datetime = datetime.datetime.strptime(payload[\"inserted_at\"], \"%Y-%m-%dT%H:%M:%S%z\")\n except ValueError:\n # For python version older than 3.7\n export_date = datetime.datetime.strptime(payload[\"inserted_at\"], \"%Y-%m-%dT%H:%M:%SZ\")\n\n if payload[\"download_url\"] is None:\n return cls(\n dataset_slug=dataset_slug,\n team_slug=team_slug,\n version=payload[\"version\"],\n name=payload[\"name\"],\n export_date=export_date,\n url=None,\n available=False,\n image_count=None,\n class_count=None,\n latest=False,\n format=payload.get(\"format\", \"json\"),\n )\n\n return cls(\n dataset_slug=dataset_slug,\n team_slug=team_slug,\n version=payload[\"version\"],\n name=payload[\"name\"],\n image_count=payload[\"metadata\"][\"num_images\"],\n class_count=len(payload[\"metadata\"][\"annotation_classes\"]),\n export_date=export_date,\n url=payload[\"download_url\"],\n available=True,\n latest=payload[\"latest\"],\n format=payload.get(\"format\", \"json\"),\n )", "def json_sluglist_latest():\n posts = posts_base.order_by(Post.pubdate.desc())[:app.config['FEEDITEMS']]\n out = {'posts': []}\n for post in posts:\n out['posts'].append([post[0].pubdate.strftime(app.config['POST_DATETIME_FORMAT']), post[0].slug])\n\n return jsonify(out)", "def get_targeted_jsons(all_jsons):\n found_jsons = []\n for j in all_jsons:\n if 'foundTargetWithConfidence' in j:\n found_jsons.append(j)\n return found_jsons", "def get_matching_s3_objects(bucket, prefix='', suffix=''):\n s3 = boto3.client('s3')\n kwargs = {'Bucket': bucket}\n\n # If the prefix is a single string (not a tuple of strings), we can\n # do the filtering directly in the S3 API.\n if isinstance(prefix, str):\n kwargs['Prefix'] = prefix\n\n while True:\n\n # The S3 API response is a large blob of metadata.\n # 'Contents' contains information about the listed objects.\n resp = s3.list_objects_v2(**kwargs)\n\n try:\n contents = resp['Contents']\n except KeyError:\n return\n\n for obj in contents:\n key = obj['Key']\n if key.endswith(suffix):\n yield obj\n\n # The S3 API is paginated, returning up to 1000 keys at a time.\n # Pass the continuation token into the next response, until we\n # reach the final page (when this field is missing).\n try:\n kwargs['ContinuationToken'] = resp['NextContinuationToken']\n except KeyError:\n break", "def read_file(user_id, path):\n full_path = s3_path + f\"/users/{user_id}/\" + path\n list_of_files = bucket.objects.filter(Prefix=full_path)\n story_dict = {}\n for obj in list_of_files:\n key = obj.key\n body = obj.get()['Body'].read()\n last_part = key.split(\"/\")[-1]\n story_dict[last_part] = body.decode(\"utf-8\")\n return story_dict", "def from_json(cls, file):\n ref = os.path.basename(file)\n with open(file, 'r') as fp:\n j = json.load(fp)\n\n return sorted([cls.from_dict(ref, d) for d in j[ref]], key=lambda x: x.priority)", "def output(self):\n for table_key, version_key in self.make_s3_keys():\n return S3Target(f\"s3://{BUCKET}/{table_key}\")", "def read_artworks_from_json(keys_to_use):\n JSON_ROOT = os.path.join(\"C:/Users/jmcif/Downloads/ps\")\n artworks = []\n for root, _, files in os.walk(JSON_ROOT):\n for f in files:\n if f.endswith(\"json\"):\n record = get_record_from_file(\n os.path.join(root, f),\n keys_to_use)\n artworks.append(record)\n break\n \n # Create the data frame\n df = pd.DataFrame.from_records(artworks,\n columns = keys_to_use,\n index = \"id\")\n return df", "def get_file_text_from_s3_urls(s3_file_url_array, s3_boto):\n\tfile_texts = {}\n\tbucket_and_keys = {}\n\tfor url in s3_file_url_array:\n\t\tbucket = get_bucket_name_from_url(url)\n\t\tkey = get_key_from_url(url)\n\t\tbucket_and_keys[url] = {\"bucket\" : bucket, \"key\" : key}\n\tfile_texts = get_file_text_from_s3_bucket_and_key(bucket_and_keys, s3_boto)\n\treturn file_texts", "def get_resource_last_update_timestamp(api_1_0_url, resource):\n try:\n r = requests.get(api_1_0_url)\n json_string = r.content\n data = json.loads(json_string)\n try:\n files = data['files']\n for entry in files:\n if entry['path'] == resource:\n entry_last_update_timestamp = entry['utctimestamp']\n return entry_last_update_timestamp\n except Exception as error:\n print(\"Caught error: \" + repr(error))\n except Exception as error:\n print(\"Failed to connect to bitbucket: \" + repr(error))\n exit(1)\n return None", "def get_gzipped_s3_objects_from_sns_msg_of_dict(session, event):\n objects = []\n if _is_s3_notif(event):\n return get_gzipped_s3_objects_from_dict(session, event)\n for record in event.get(\"Records\", []):\n message = record.get(\"Sns\", {}).get(\"Message\")\n objects.extend(get_gzipped_s3_objects_from_dict(session, json.loads(message)))\n return objects", "def from_json(o, sd=None, fname=None, s=None, wts=None, gz=None, root_name=None):\n if gz is None:\n if isinstance(fname, str):\n gz = fname.endswith(\".gz\")\n else:\n gz = False\n\n # keeping track of elapsed time. want to make sure I don't do anything\n # that's too slow.\n start_time = time.time()\n # Get the model state dict from one of three sources\n if sd is not None: # Existing Python dict (for in-memory stuff).\n pass\n elif fname is not None: # Read in from a json file\n if gz:\n with gzip.open(fname, \"r\") as f:\n fr = f.read()\n sd = json.loads(fr)\n else:\n with open(fname, \"r\") as f:\n sd = json.load(f) # json file\n elif s is not None: # Use a json string (not really sure if useful)\n sd = json.loads(s) # json string\n else: # Didn't specify at least one source\n raise Exception(\"Need to specify a data source to load from\")\n dict_time = time.time() # To calculate how long it took to read file\n if wts is None: # if no StoreSpec object given use the default, which should\n wts = StoreSpec() # be the typical save everything important\n lookup = {} # A dict to use for a lookup tables\n suffixes = {} # A list of suffixes delayed to end so lookup is complete\n # Read toplevel component (is recursive)\n if root_name is None:\n for k in sd:\n if k.startswith(\"__\") and k.endswith(\"__\"):\n # This is metadata or maybe some similar future addition.\n continue\n else:\n root_name = k\n break # should be one root, use it's name\n _read_component(sd, o, wts, lookup=lookup, suffixes=suffixes, root_name=root_name)\n read_time = time.time() # to calc time to read model state minus suffixes\n # Now read in the suffixes\n _read_suffixes(lookup, suffixes)\n suffix_time = time.time() # to calculate time to read suffixes\n pdict = {} # return some performance information, to make sure not too slow\n pdict[\"etime_load_file\"] = dict_time - start_time\n pdict[\"etime_read_dict\"] = read_time - dict_time\n pdict[\"etime_read_suffixes\"] = suffix_time - read_time\n return pdict", "def get_from_s3(s3_client, s3_url):\n url = urlparse(s3_url)\n\n # Split the bucket from the key\n bucket_name = urllib2.unquote(url.netloc).decode('utf8')\n key_name = urllib2.unquote(url.path[1:]).decode('utf8')\n\n # We're done parsing; start doing some S3 ops\n bucket = s3_client.get_bucket(bucket_name, validate=False)\n key = bucket.get_key(key_name)\n return key.get_contents_as_string()", "def get_file_from_s3(bucket_name, file_name, json_parse=True):\n s3_client = boto3.client('s3')\n s3_file = s3_client.get_object(Bucket=bucket_name, Key=file_name)\n try:\n file_contents = s3_file['Body'].read()\n if json_parse:\n file_contents = json.loads(file_contents)\n except Exception as exc:\n LOGGER.error('Encountered error reading s3 file')\n raise exc\n return file_contents", "def get_item(filename, uuid):\n with open(os.fsencode(str(filename)), \"r\") as f:\n data = json.load(f)\n results = [i for i in data if i[\"uuid\"] == str(uuid)]\n if results:\n return results\n return None", "def read_jsonl_from_s3(s3_path, encoding='utf-8', compressed=False) :\n bucket, key = s3_path_to_bucket_key(s3_path)\n obj = s3_resource.Object(bucket, key)\n text = obj.get()['Body'].read()\n \n if compressed:\n split_text = gzip.decompress(text).decode(encoding).split('\\n')\n else:\n split_text = text.decode(encoding).split('\\n')\n \n data = []\n for t in split_text:\n data.append(json.loads(t))\n \n return data", "def parse_rec(json_dataset, index):\n info = voc_info(json_dataset)\n data_path = info['data_path']\n image_file = os.path.join(data_path, 'images', index + '.jpg')\n assert os.path.exists(image_file), 'Path does not exist: {}'.format(image_file)\n\n height, width = cv2.imread(image_file).shape[:2]\n annopath = os.path.join(data_path, 'annotations', '{:s}.txt')\n filename = annopath.format(index)\n rotate = 0\n objects = []\n with open(filename) as f:\n line = f.readline()\n while line:\n parts = line.split()\n if parts[0] == 'rotate':\n rotate = int(parts[1])\n assert rotate == 0\n else:\n obj_struct = {'name': parts[0]}\n x1 = min(max(int(parts[1]), 0), width - 1)\n y1 = min(max(int(parts[2]), 0), height - 1)\n x2 = min(max(int(parts[3]), 0), width - 1)\n y2 = min(max(int(parts[4]), 0), height - 1)\n obj_struct['bbox'] = [x1, y1, x2, y2]\n obj_struct['truncated'] = int(parts[5])\n obj_struct['difficult'] = 0\n objects.append(obj_struct)\n line = f.readline()\n\n return objects", "def restore_data_from_s3(spark, s3_bucket):\n raw_path = os.path.join(s3_bucket, 'raw')\n table_names = [y for x, y, z in os.walk(raw_path)][0]\n subdirs = glob.glob(raw_path + '/*/')\n df_raw_all = {}\n for name, path in zip(table_names, subdirs):\n df_raw_all[name] = spark.read.parquet(path)\n logging.info(f'Dataframe <{name}> from parquet-file in <{path}> successfully loaded')\n return df_raw_all", "def get_scene(videoname_):\n s = videoname_.split(\"_S_\")[-1]\n s = s.split(\"_\")[0]\n return s[:4]", "def get_scene(videoname):\n s = videoname.split(\"_S_\")[-1]\n s = s.split(\"_\")[0]\n return s[:4]", "def s3_process(self, payload, classifier):\n s3_file_lines = StreamPreParsers.pre_parse_s3(payload.raw_record)\n for line in s3_file_lines:\n data = line.rstrip()\n payload.refresh_record(data)\n self.process_alerts(classifier, payload, data)", "def list_s3(bucket, prefix, ext):\n s3 = boto3.resource('s3')\n s3_bucket = s3.Bucket(bucket)\n\n if ext:\n ext = '.' + ext.lstrip('.')\n else:\n ext = ''\n\n counter = 0\n for item in s3_bucket.objects.filter(Prefix=prefix):\n counter += 1\n if counter % 5000 == 0:\n print(f'Found {counter} items so far', file=sys.stderr)\n\n key = item.key\n if not key.endswith(ext):\n continue\n\n # Write to stdout\n print(key)", "def get_matching_s3_keys(client, bucket, prefix=\"\", suffix=\"\"):\n\n for obj in get_matching_s3_objects(client, bucket, prefix, suffix):\n yield obj[\"Key\"]", "def getLatestDate(market, folder):\n \n if market == 'stockOption':\n m = 'DTOP_O_'\n elif market == 'indexFuture':\n m = 'DTOP_F_'\n \n dateList = []\n # get file list from directory\n for f in os.listdir(folder):\n if m in f:\n # crop the date from filename\n row = f.replace(m,'').replace('.zip','')\n dateList.append(date(int(row[:4]), int(row[4:6]), int(row[6:])))\n\n \n latest = dateList[0]\n for x in range(1,len(dateList)):\n if dateList[x] > latest:\n latest = dateList[x]\n \n return latest", "def s3_files(self, path, bucket, profile, pattern=\"*\", verbose=True):\n s3_path = bucket + path\n cmd = [\"aws\", \"s3\", \"ls\", s3_path, \"--profile\", profile]\n try:\n output = subprocess.check_output(\n cmd, stderr=subprocess.STDOUT, shell=True\n ).decode(\"UTF-8\")\n except Exception as e:\n output = e.output.decode(\"UTF-8\")\n print(\"ERROR:\" + output)\n output = [line.split() for line in output.split(\"\\n\")]\n output = [\n line for line in output if len(line) == 4\n ] # filter output for lines with file info\n output = [line[3] for line in output] # grab the filename only\n output = fnmatch.filter(output, pattern) # if default '*', all files will match\n if verbose == True:\n print(\"\\nIndex \\t Filename\")\n for (i, item) in enumerate(output, start=0):\n print(i, \"\\t\", item)\n return output", "def write_s3_file(data, date):\n logger.info(\"Writing history file to S3.\")\n bucket = os.getenv(\"SPOTIFY_BUCKET_NAME\")\n path = os.getenv(\"SPOTIFY_BUCKET_PATH\")\n s3 = boto3.client('s3')\n data = json.dumps(data)\n s3.put_object(Bucket=bucket, Key=\"%s/%s.json\" % (path, date), Body=data)", "def etl(event):\n for record in event:\n body = json.loads(record.body)\n resource_type = body.get(\"resource_type\")\n s3_paths = body.get(\"s3_paths\")\n for s3_path in s3_paths:\n do_etl(s3_path, resource_type)", "def dump_job_data(s3, bucket, key, ecosystem, package, version):\n data = s3.read_object(bucket, key)\n timestamp_str = datetime.datetime.utcnow().strftime(\"%Y-%m-%dT%H:%M:%S.%f\")\n filename = \"s3_data_{e}_{p}_{v}_{t}.json\".format(e=ecosystem,\n p=package,\n v=version,\n t=timestamp_str)\n with open(filename, 'w') as fout:\n json.dump(data, fout)", "def get_last(self, count):\n result = self.items[-count:]\n # Reverse the count\n objects = []\n result.reverse()\n for item in result:\n objects.append(FileDict(item))\n return objects", "def make_s3_keys(self):\n # Write the data twice:\n for fmt in (VERSION_FMT, LATEST_FMT):\n yield make_s3_keys(self, fmt)", "def _s3_stash(self):\n s3_url = 's3://{}/{}'.format(BUCKET, self.atom_file)\n bucketpath = BUCKET.strip(\"/\")\n bucketbase = BUCKET.split(\"/\")[0]\n parts = urlparse.urlsplit(s3_url)\n mimetype = 'application/xml' \n \n conn = boto.connect_s3()\n\n try:\n bucket = conn.get_bucket(bucketbase)\n except boto.exception.S3ResponseError:\n bucket = conn.create_bucket(bucketbase)\n self.logger.info(\"Created S3 bucket {}\".format(bucketbase))\n\n if not(bucket.get_key(parts.path)):\n key = bucket.new_key(parts.path)\n key.set_metadata(\"Content-Type\", mimetype)\n key.set_contents_from_filename(self.atom_file)\n msg = \"created {0}\".format(s3_url)\n self.logger.info(msg)\n else:\n key = bucket.get_key(parts.path)\n key.set_metadata(\"Content-Type\", mimetype)\n key.set_contents_from_filename(self.atom_file)\n msg = \"re-uploaded {}\".format(s3_url)\n self.logger.info(msg)" ]
[ "0.61708826", "0.5877487", "0.55048525", "0.5477997", "0.54031044", "0.5294818", "0.5293787", "0.52524304", "0.5156226", "0.5150664", "0.5150446", "0.512073", "0.49706817", "0.49576333", "0.4952994", "0.49373975", "0.4913797", "0.4860639", "0.48384994", "0.4826581", "0.4820257", "0.4753907", "0.47462967", "0.4735426", "0.4728382", "0.47009814", "0.46539104", "0.46241525", "0.46203536", "0.4601871", "0.46017495", "0.45963508", "0.45853925", "0.45759583", "0.45680463", "0.45660064", "0.45573464", "0.45526233", "0.45392516", "0.45367482", "0.45200327", "0.45197427", "0.45165423", "0.45061424", "0.44998524", "0.4495647", "0.4494948", "0.44931817", "0.44919798", "0.44907135", "0.44864276", "0.44843408", "0.44832975", "0.44801378", "0.44672", "0.4462337", "0.4458794", "0.44558528", "0.4447556", "0.44296548", "0.4427232", "0.4427035", "0.4425051", "0.44211778", "0.44109768", "0.44108912", "0.44049156", "0.44024473", "0.44017625", "0.4398657", "0.43943352", "0.43936202", "0.43834135", "0.43755898", "0.43734962", "0.4368835", "0.43680587", "0.43652004", "0.43605733", "0.4355564", "0.43439674", "0.4338612", "0.43354014", "0.43350053", "0.433163", "0.4327127", "0.43145993", "0.4313677", "0.43077332", "0.43057385", "0.43012866", "0.43007123", "0.42957288", "0.4290976", "0.42890096", "0.42863327", "0.4285903", "0.42851195", "0.42721742", "0.4256441" ]
0.7924387
0
` Read a file where each line is of the form "word1 word2 ..." Yields lists of the form [word1, word2, ...]
def read(fname): cmu_dict = split_cmu_dict.load_dict(TRAIN_FILEPATH_SRC) for word, phonemes in cmu_dict.iteritems(): yield word, phonemes.split()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_txt(filename):\n file_object = open(filename, 'r')\n file_as_string = file_object.read()\n return create_word_list(file_as_string)", "def _get_wordlist(file_name):\n ifile = codecs.open(file_name, 'r', encoding='utf-8')\n for _ in range(int(ifile.__next__())):\n yield (ifile.__next__().strip() for _ in range(int(ifile.__next__())))", "def list_every_word(file_name): #considers file_name is valid\n file = open(file_name,\"r\")\n words = []\n lines = file.readlines()\n for line in lines:\n line = line.strip()\n line = line.split(\" \")\n for word in line:\n words.append(word)\n return words", "def get_word_list(filename):\n f = open(filename,'r')\n word_list = list()\n for line in f:\n for word in line.split():\n word_list.append(word.lower().strip())\n return word_list", "def make_word_list():\n result = []\n for line in open('words.txt'):\n word = line.strip()\n result.append(word)\n return result", "def read_list_words(infile):\n\twords = []\n\tfin = open(infile)\n\tfor line in fin:\n\t\twords.append(line.strip())\n\treturn words", "def read_words(filename):\n # load assets\n word_file = urlopen(filename)\n \n # read in files as string\n words = word_file.read()\n \n # template lines and solution lines list of line string\n # if the input value is '\\n' then TypeError: a bytes-like object is required, not 'str'\n word_list = words.split(b'\\n')\n word_list = [word.decode('ascii') for word in word_list]\n print(\"Loaded a dictionary with\", len(word_list), \"words\")\n return word_list", "def loadWords():\n inFile = open(wordFile, 'r')\n wordlist = []\n for line in inFile:\n wordlist.append(line)\n return wordlist", "def read_words(filename):\n # load assets\n word_file = urllib2.urlopen(filename)\n \n # read in files as string\n words = word_file.read()\n \n # template lines and solution lines list of line string\n word_list = words.split('\\n')\n print \"Loaded a dictionary with\", len(word_list), \"words\"\n return word_list", "def read_crossword(path: str) -> list:\n result = list()\n with open(path, 'r', encoding='utf-8') as file:\n for line in file:\n line = [line]\n result.append(line)\n return result", "def read_file(path, tok=False):\n with open_file(path) as f:\n for line in f.readlines():\n words = split_sentence(line.strip(), tok)\n yield words", "def get_words_from_file(filename):\n with open(filename, newline='') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=' ')\n return list(csv_reader)", "def import_words(file_name):\n with open(file_name) as word_list:\n words = []\n for line in word_list:\n number, word = line.strip().split(\"\\t\")\n words.append(word.strip())\n # print(f\"Imported {(len(word_dict))} words\")\n\n return words", "def read_file_to_list(filename):\n with open(os.path.join(DIRECTORY, filename), \"r\") as f:\n return [word.strip() for word in f.readlines()]", "def loadWords():\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n return wordlist", "def make_word_list(fin):\n\tword_list = []\n\tfor line in fin:\n\t\tword = line.strip()\n\t\tword_list.append(word)\n\treturn word_list", "def make_word_list():\n word_list = []\n fin = open('words.txt')\n for line in fin:\n word = line.strip()\n word_list.append(word)\n return word_list", "def read_words(filename):\n with open(filename, encoding=\"utf-8\") as file:\n words = file.read().splitlines()\n return words", "def open_and_read_file(file_path):\n\n contents = open(file_path).read()\n words = contents.split()\n return words", "def _read(self, file_path: str) -> Iterator[Instance]:\n with open(file_path) as f:\n for line in f:\n pairs = line.split()\n words, tags = zip(*(pair.split(\"###\") for pair in pairs))\n yield self.text_to_instance([Token(word) for word in words], tags)", "def get_words_from_file(path, delimiter=r\"[^\\W_']+\"):\r\n lists = [re.findall(delimiter, line) for line in open(path).readlines()]\r\n word_list = []\r\n for l in lists:\r\n word_list += l\r\n return word_list", "def _read_words(filename):\n with tf.gfile.GFile(filename, \"r\") as f:\n return f.read().replace(\"\\n\", \"<eos>\").split()", "def _read_words_from_file(filename: str):\n with open(filename, 'r') as file:\n text = ''.join(file.readlines())\n text = text.replace('\\n', ' ').lower()\n text = re.sub(\" +\", \" \", text)\n words = text.split(' ')\n return words", "def read_word_file(self, filename):\n words = []\n try:\n file = open(filename, 'rt', encoding='utf8')\n words = [word[:-1] for word in file.readlines()]\n\n except Exception as e:\n print(f'[-] Error occurred while reading word file: {e}')\n\n return words", "def word_runner(self):\n with open(self.filename) as doc:\n text = doc.readlines()\n for line in text:\n for word in line.split():\n yield word", "def read_word_list(file_name):\r\n\twith open(file_name) as word_list_file:\r\n\t\treturn set(word.strip() for word in word_list_file)", "def read_words(self, in_file):\n with open(in_file, 'r') as wordFile:\n words = \"\"\n for line in wordFile:\n words = words + ' ' + line\n return words.split()", "def get_words_in_file(file_name):\n\n\tlines = get_file_contents(file_name)\n\tall_words = []\n\tfor line in lines:\n\t\t# remove lines that don't have words on them\n\t\tif len(line) < 2:\n\t\t\tcontinue\n\t\tline = line.rstrip() # removes \\n at the end of each line\n\t\twords = line.split()\n\t\tfor word in words:\n\t\t\tall_words.append(word)\n\n\treturn all_words", "def read_data(filename,words):\n try:\n f = open(filename)\n reader = f.read().splitlines()\n for line in reader:\n #print(line[0])\n words.add(line.lower())\n f.close()\n except IOError:\n print 'Input file reading failed,'\n return words", "def get_words(file_path):\r\n with open(file_path, encoding='utf-8') as hfile:\r\n return hfile.read().lower().split()", "def get_words(file_path):\r\n with open(file_path, encoding='utf-8') as hfile:\r\n return hfile.read().lower().split()", "def load_wordlist(filename):\n with open(filename) as f:\n \tdata = f.read().splitlines()\n return data", "def read_file(filename):\n\n sentences = open(filename).read().strip().split(\"\\n\\n\") #separate tweets\n ret = []\n for sent in sentences:\n lines = sent.split(\"\\n\") #each word in the tweet\n pairs = [L.split(\"\\t\") for L in lines] #Funniest O\n tokens = [tok for tok,tag in pairs]\n tags = [tag for tok,tag in pairs]\n ret.append( (tokens,tags) )\n return ret", "def load_words():\r\n \r\n my_file = open(\"words.txt\")\r\n words = my_file.read()\r\n words_list = words.split(\" \")\r\n return (words_list)\r\n my_file.close()", "def get_word_list(file_name):\n # Read the file specified\n f = open(file_name,'r')\n lines = f.readlines()\n \n # Remove header text from lines\n curr_line = 0\n while lines[curr_line].find('START OF THIS PROJECT GUTENBERG EBOOK') == -1:\n curr_line += 1\n lines = lines[curr_line + 1:]\n\n # Remove footer text from lines\n curr_line = -1\n while lines[curr_line].find('END OF THIS PROJECT GUTENBERG EBOOK') == -1:\n curr_line -= 1\n lines = lines[: curr_line]\n\n # Strip lines into words\n words = []\n for i in range(len(lines)):\n # Remove punctuation\n next_line = lines[i].translate(string.maketrans(\"\",\"\"), string.punctuation)\n next_line = next_line.lower()\n words += next_line.split()\n \n return words", "def process_file(filename, skip_header=True):\n hist = {}\n fp = file(filename)\n fullwordlist=[]\n # if skip_header:\n # skip_gutenberg_header(fp)\n\n for line in fp:\n holder=process_line(line,hist)\n #print holder\n fullwordlist.extend(holder)\n return fullwordlist", "def read_dictionary():\n with open(FILE, 'r') as f:\n for line in f:\n words_lst = line.split()\n for word in words_lst:\n dict_list.append(word)", "def readFile(filename):\n listOfWords = []\n currentLine = 1\n f = open(filename, \"r\")\n for line in f:\n line = stripPunctuation(line)\n for word in line.split():\n word = word.lower()\n if len(word) > 1:\n if not word[0].isdigit():\n tempObj = contains(listOfWords, word)\n if tempObj != None:\n tempObj.incOccurrence(currentLine)\n else:\n temp = Word(word, currentLine)\n listOfWords.append(temp)\n currentLine = currentLine + 1\n return listOfWords", "def load_words():\n f = open('words.txt', 'r')\n words_list = f.readlines()\n f.close()\n split_words_list = words_list[0].split(' ')\n return split_words_list", "def load_words(filename):\n url = codeskulptor.file2url(filename)\n word_file = urllib2.urlopen(url)\n \n all_words = []\n for line in word_file.readlines():\n all_words.append(line.strip())\n \n \n return all_words", "def get_word_list(file_name):\n\tnew_list = []\n\n\tf = open(file_name,'r')\n\tlines = f.readlines()\n\tcurr_line = 0\n\tend_line = 0\n\twhile lines[curr_line].find('START OF THIS PROJECT GUTENBERG EBOOK') == -1:\n\t\tcurr_line += 1\n\twhile lines[end_line].find('End of the Project Gutenberg EBook') == -1:\n\t\tend_line -= 1\n\tlines = lines[curr_line + 1:end_line]\n\n\tlong_lines = ''.join(str(e) for e in lines)\n\tlong_lines = long_lines.lower()\n\tlong_lines = long_lines.translate(None, punctuation)\n\n\twords = long_lines.split()\n\tfor item in words:\n\t\tnew_list.append(item)\n\n\treturn new_list", "def get_word_list(file_name):\n\tbook = get_file_text(file_name)\n\tbook = strip_header(book)\n\tbook = strip_punctuation(book)\n\tbook = book.lower()\n\twords = re.split(r'\\s+', book)\n\treturn words", "def list_words():\n fin = open('words.txt')\n words = []\n for line in fin:\n words.append(line.strip())\n fin.close()\n return words", "def loadWords() -> List[str]:\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # wordList: list of strings\n wordList = []\n for line in inFile:\n wordList.append(line.strip().lower())\n print(\" \", len(wordList), \"words loaded.\")\n\n return wordList", "def _read_txt(file_path):\n translation_pairs = []\n with file_path.open() as f:\n for line in f:\n translation_pairs.append(\n evaluation.TranslationPair(source=None, translation=line.strip())\n )\n return translation_pairs", "def read_words(infile):\n words = []\n with open(infile, 'r') as f:\n csv_reader = csv.reader(f)\n for row in csv_reader:\n words = words + row\n return words", "def get_word_list(file_name):\n\n\tstoryEdit = []\n\n\t#Reads the file starting after the beginning\t\n\tf = open(file_name,'r')\n\tlines = f.readlines()\n\tcurr_line = 0\n\twhile lines[curr_line].find('START OF THIS PROJECT GUTENBERG EBOOK') == -1:\n\t\tcurr_line += 1\n\tlines = lines[curr_line+1:]\n\n\n\t#Loops through each row, making everything lowercase and replacing all punctuation\n\tfor row in lines:\n\t \trow = row.lower()\n\t \trow = row.translate(string.maketrans(\"\",\"\"), string.punctuation)\n\t \tstoryEdit += row.split()\n\n\n\t#Returns the final list as \n\treturn storyEdit", "def read_words(filename, replace = ['\\n', '<eos>']):\n with tf.gfile.GFile(filename, \"r\") as f:\n return f.read().replace(*replace).split()", "def load_words():\n with open(DICTIONARY) as f:\n return [line.strip() for line in f]", "def file_reader(filePath):\n try:\n word_file = open(filePath, \"rt\")\n word_list = word_file.read().splitlines()\n word_file.close()\n return word_list\n except Exception:\n print(f\"An error has occured when reading the file.\")\n\n return", "def get_word_list():\n data_file = open(\"unixWordList.txt\",\"r\")\n word_list = [] # start with an empty word list\n for word in data_file: # for every word (line) in the file\n # strip off end−of−line characters and make each word lowercase\n # then append the word to the word list\n word_list.append(word.strip().lower())\n data_file.close()\n return word_list", "def read_input(fname):\n f_data = open(fname).read().strip().replace(utils.TAB, utils.SPACE)\n X = []\n sentences = f_data.split(utils.CARRIGE_RETURN)\n for sen in sentences:\n words = sen.split(utils.NEWLINE)\n X.append(words)\n return [X, list(itertools.chain(*X))]", "def read_tagged_word_list(filename):\n # TODO: write and test this method\n print 'reading tagged file'", "def load_words_from_file(filename):\n f = open(filename, \"r\")\n file_content = f.read()\n f.close()\n wds = file_content.split()\n return wds", "def get_word_list(file_name):\n file_ = open(file_name, 'r')\n lines = file_.readlines()\n\n start_line = 0\n while lines[start_line].find('START OF THIS PROJECT GUTENBERG EBOOK') == -1:\n start_line += 1\n\n lines = lines[start_line+1:]\n\n end_line = 0\n while lines[end_line].find('END OF THIS PROJECT GUTENBERG EBOOK') == -1:\n end_line += 1\n\n lines = lines[:end_line-3]\n\n list_ = ' '.join(lines)\n list_ = str.lower(list_)\n list_ = list_.translate(None, string.punctuation)\n list_ = list_.split()\n\n return list_", "def word_gen(path, skiplines=0):\n\n with open(path) as f:\n for _ in range(skiplines):\n next(f)\n for line in f:\n stripped_line = line.translate(str.maketrans('', '', string.punctuation+string.digits))\n for word in stripped_line.split():\n yield word.lower()", "def read_file(file_path):\n with open(file_path, 'r') as students:\n return [list(filter(None, re.split(r'[ |,]', student.rstrip('\\n')))) for student in students]", "def get_list(file_name):\n with open(file_name, \"r\", encoding=\"latin-1\") as file:\n text = file.read()\n text = text.lower() # Make everything lowercase\n text = text.split(\"\\n\")\n return text", "def load_words():\n print(\"Loading word list from file..\")\n WORDLIST_FILENAME = \"words.txt\"\n # with open('words.txt', 'r') as f:\n # inFile = f.read()\n inFile = open(WORDLIST_FILENAME, 'r')\n wordlist = []\n\n for line in inFile:\n wordlist.append(line.strip().lower())\n return wordlist", "def load_words(file_path: str) -> List[Word]:\n \n words = load_words_raw(file_path)\n \n \n words = remove_stop_words(words)\n\n \n words = remove_duplicates(words)\n \n return words", "def load_words():\r\n## print \"Loading word list from file...\"\r\n # inFile: file\r\n inFile = open(WORDLIST_FILENAME, 'r', 0)\r\n # wordlist: list of strings\r\n wordlist = []\r\n for line in inFile:\r\n wordlist.append(line.strip().lower())\r\n## print \" \", len(wordlist), \"words loaded.\"\r\n return wordlist", "def load_input_word_list(file_path):\n if not os.path.isfile(file_path):\n return False\n\n word_list = list()\n\n with open(file_path, 'r') as fp:\n while True:\n line = fp.readline()\n if not line:\n break\n\n data = line.split(' ')\n text = data[0].lower().strip(Setting.NONWORD_CHARACTERS)\n\n if not text:\n continue\n\n text = text.replace('_', ' ')\n\n score = float(data[1])\n\n if score < 0:\n kind = WordKindEnum.NEG\n else:\n kind = WordKindEnum.POS\n\n word = Word(text, score, kind)\n word_list.append(word)\n\n return word_list", "def word_list():\n words = open(raw_input('Enter filename :'), 'r')\n lst = []\n for item in words:\n lst.append(item.strip()) #strip() removes the \\n that are added by the encoding\n return lst", "def read_conll_file(file_name):\n data = []\n current_words = []\n current_tags = []\n\n for line in codecs.open(file_name, encoding='utf-8'):\n line = line.strip()\n \n if line:\n if line[0] == '#':\n continue # skip comments\n tok = line.split('\\t')\n if '-' in tok[0] or '.' in tok[0]:\n continue # skip special tokenized words\n word = tok[1]\n tag = tok[3]\n \n current_words.append(word)\n current_tags.append(tag)\n else:\n if current_words: # skip empty lines\n data.append((current_words, current_tags))\n current_words = []\n current_tags = []\n\n # check for last one\n if current_tags != [] and not raw:\n data.append((current_words, current_tags))\n return data", "def simple_read_words(filename=\"nietzsche.txt\"):\n with open(\"nietzsche.txt\", \"r\") as f:\n words = f.read()\n return words", "def importBrainstormWordsFile(filename):\n #init the list with all words in the file\n allWords = []\n \n #open the brainstorming words file and read the lines\n with open(filename, 'r') as fp:\n lines = fp.read().splitlines()\n \n #split the lines for the idiots that didn't read the instructions and add them to the output\n for curLine in lines:\n if curLine.startswith('Please type one'):\n continue\n cutLines = curLine.replace(',',' ').split()\n \n #cycle the word and add them\n for curWord in cutLines:\n allWords.append(curWord.strip().lower())\n \n return allWords", "def import_text(file):\n\n # Only use alpha-numeric words from file\n with open(file=file, mode='r') as text:\n word_list = [word for word in text.read().split() if word.isalnum()]\n return word_list", "def load_words():\n print\n \"Loading word list from file...\"\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r', 0)\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = string.split(line)\n print\n \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def load_words(filename):\n url = codeskulptor.file2url(filename)\n netfile = urllib2.urlopen(url)\n \n words = []\n for line in netfile.readlines():\n words.append(line.replace('\\n',''))\n \n return words", "def read(fname):\n with open(fname) as fh:\n for line in fh:\n sent = [w2i[eos]]\n sent += [w2i[x] for x in line.strip().split()]\n sent.append(w2i[eos])\n yield sent", "def read_common_words():\r\n path = r\"C:\\Users\\15451\\PycharmProjects\\Nan\\dataset\" # the path of the common word list\r\n # path = input(\"Please input the path of the common words list: \")\r\n file = \"common_words.txt\"\r\n f = open(path + \"/\" + file)\r\n iter_f = iter(f)\r\n list = []\r\n i = 0\r\n for line in iter_f:\r\n line = line.strip()\r\n line = line.lower()\r\n list.append(line)\r\n return list", "def readInWordList(): \n try:\n path = os.path.abspath(os.path.join(os.getcwd())) + '/english/wordList.txt'\n rawWordList = open(path,'r').readlines()\n\n except IOError:\n print(\"No such file {}\".format(path))\n return False \n\n # remove '\\r' and '\\n'\n cleanedWordList = []\n for word in rawWordList:\n cleanedWordList.append((word.rstrip('\\n')).rstrip('\\r').lower())\n return cleanedWordList", "def fetch_words(filename):\n data = [] #empty list\n with urlopen(filename) as story:\n for line in story:\n words = line.decode('utf-8').split() #must decode into strings and then separate with spaces\n #print(lists)\n for word in words:\n data.append(word)\n return(data)", "def load_wordlist(self, filename):\n reg1 = re.compile(\"^([1-6]{5})[ \\t]+(.*)$\")\n f = open(filename, 'r')\n \n if(self.generate):\n wordlist = []\n reg2 = re.compile(\"^(\\S*)$\")\n for line in f:\n m1 = reg1.match(line)\n m2 = reg2.match(line)\n \n if(m1):\n wordlist.append(m1.group(2))\n elif(m2):\n wordlist.append(m2.group(1))\n \n else:\n wordlist = {}\n for line in f:\n m = reg1.match(line)\n if(m):\n wordlist[int(m.group(1))] = m.group(2)\n \n if((not self.generate and len(wordlist) < 7776) or \n (self.generate and len(wordlist) < 2**13)):\n stderr.write(\"Word list is too short\\n\")\n exit(5)\n \n self.wordlist = wordlist", "def readPairs(filename):\r\n\r\n fileIn = open(filename, 'r')\r\n \r\n outputList = []\r\n\r\n for line in fileIn:\r\n pair = line.replace(\"\\n\",\"\").split(\" \")\r\n outputList.append(pair)\r\n \r\n return outputList\r\n fileIn.close()", "def readStrings(filename):\n txtlist = []\n f = open(filename)\n for line in f.readlines():\n txtlist.extend(line.split())\n return txtlist", "def load_words():\n print \"Loading word list from file...\"\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r', 0)\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = string.split(line)\n print \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def load_words():\n print \"Loading word list from file...\"\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r', 0)\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = string.split(line)\n print \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def get_sentence_list_for_word_file(file_path: str) -> List[str]:\n # get file data\n with open(file_path, 'r') as review_file:\n file_text = review_file.read().splitlines()\n return file_text", "def loadWords():\n print \"Loading word list from file...\"\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r', 0)\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = string.split(line)\n print \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def loadWords():\n print \"Loading word list from file...\"\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r', 0)\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = string.split(line)\n print \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def read_by_word(skip_lines):\n\n drop_mathrm = re.compile(r'\\\\(mathrm|rm)\\{(?P<mathrm>.*?)\\}')\n merge_colname = re.compile(r' *_')\n skip_latex = str.maketrans('', '', '{}$\\\\')\n with open(filename, 'r') as param_file:\n for line in param_file:\n if line.startswith('References.'):\n return\n if skip_lines > 0:\n skip_lines -= 1\n else:\n for word in merge_colname.sub(\n '_',\n drop_mathrm.sub(r'\\g<mathrm>',\n line).translate(skip_latex)\n ).split():\n yield word", "def get_words(filepath: str = \"words.txt\") -> list:\n fpath = Path(filepath)\n if not fpath.exists():\n raise FileNotFoundError(f\"Specified dictionary ({filepath}) not found\")\n\n if fpath.is_dir():\n raise ValueError(\"Filepath is a folder, not a file\")\n\n with fpath.open() as f:\n words = list(set([x.strip() for x in f.readlines()]))\n\n return words", "def read(fname):\n with open(fname) as fh:\n for line in fh:\n line = line.strip().split()\n sent = [tuple(x.rsplit(\"|\",1)) for x in line]\n yield sent", "def read_data(input_file):\n\n def process_line(labels, words):\n l = ' '.join([label for label in labels if len(label) > 0])\n w = ' '.join([word for word in words if len(word) > 0])\n lines.append((l, w))\n words = []\n labels = []\n return words, labels, lines\n\n rf = open(input_file, 'r')\n lines = [];\n words = [];\n labels = []\n for line in rf:\n word = line.strip().split(' ')[0]\n label = line.strip().split(' ')[-1]\n # here we dont do \"DOCSTART\" check\n\n if len(line.strip()) == 0: # and words[-1] == '.'\n words, labels, lines = process_line(labels, words)\n words.append(word)\n labels.append(label)\n rf.close()\n return lines", "def importDictionary():\n with open('res/dictionary.txt', 'r') as f:\n lines = f.readlines()\n result = [word.strip() for word in lines]\n return result", "def load_file(path_to_file) -> list:\r\n\tif not os.path.exists(path_to_file):\r\n\t\tlg.critical('The file %s doesn\\'t exist !' % path_to_file)\r\n\t\tsys.exit()\r\n\r\n\tfile = open(path_to_file, 'r')\r\n\twords = [word for word in file.read().split('\\n') if len(word) > 1]\r\n\tfile.close()\r\n\r\n\treturn words", "def load_words():\n print \"Loading word list from file...\"\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r', 0)\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def load_words_raw(file_path: str) -> List[Word]:\n def parse_line(line: str, frequency: int) -> Word:\n tokens = line.split()\n word = tokens[0]\n vector = v.normalize([float(x) for x in tokens[1:]])\n return Word(word, vector, frequency)", "def get_words_from_file(filename):\n words_by_len = {}\n f = open(filename, \"r\", 1, \"utf8\")\n for word in f:\n word = word.strip().lower()\n w_len = len(word)\n if w_len > 1:\n words_by_len[w_len] = words_by_len.get(w_len, []) + [word]\n return words_by_len", "def load_words():\n print \"Loading word list from file...\"\n in_file = open(WORDLIST_FILENAME, 'r', 0)\n line = in_file.readline()\n wordlist = string.split(line)\n print \" \", len(wordlist), \"words loaded.\"\n return wordlist", "def lines_into_list(line):\n\n words = line.rstrip().split('|')\n return words", "def read_list(fname):\n with open(fname) as handle:\n items = [line.strip() for line in handle]\n return items", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist", "def load_words():\n print(\"Loading word list from file...\")\n # inFile: file\n inFile = open(WORDLIST_FILENAME, 'r')\n # line: string\n line = inFile.readline()\n # wordlist: list of strings\n wordlist = line.split()\n print(\" \", len(wordlist), \"words loaded.\")\n return wordlist" ]
[ "0.7736308", "0.76894", "0.7536886", "0.74992454", "0.7413196", "0.7329414", "0.73256314", "0.7306998", "0.72972393", "0.7285923", "0.7252589", "0.72504383", "0.72416985", "0.72221196", "0.71844417", "0.7173946", "0.7148515", "0.7139532", "0.71371263", "0.7133431", "0.71230274", "0.71090287", "0.71044827", "0.710089", "0.70830244", "0.708142", "0.70799434", "0.70694935", "0.706365", "0.7024679", "0.7024679", "0.7018253", "0.70018137", "0.69992715", "0.69979286", "0.69873524", "0.6982165", "0.6953484", "0.69447184", "0.6878439", "0.6869949", "0.6865895", "0.68497294", "0.6842404", "0.6839632", "0.6835776", "0.6832987", "0.6826538", "0.68066084", "0.6799424", "0.67958564", "0.6790066", "0.6776736", "0.67750245", "0.67537475", "0.67311907", "0.67251587", "0.6724293", "0.6721731", "0.67042184", "0.67009634", "0.6700643", "0.6698091", "0.6691662", "0.6684614", "0.6677781", "0.6672532", "0.66704905", "0.6669979", "0.66686267", "0.665727", "0.66458637", "0.66423756", "0.663577", "0.6635252", "0.66348666", "0.6633772", "0.6633772", "0.66274256", "0.66237676", "0.66237676", "0.6613697", "0.6606988", "0.65736854", "0.6566616", "0.6566495", "0.6557787", "0.6546514", "0.65423936", "0.6540497", "0.6531903", "0.6529882", "0.6522366", "0.6508979", "0.6508979", "0.6508979", "0.6508979", "0.6508979", "0.6508979", "0.6508979", "0.6508979" ]
0.0
-1
The input parameter "config" (dictionary) contains the sampled configurations passed by the bohb optimizer
def compute(self, config, budget, working_directory, *args, **kwargs): # Useful website -- https://aws.amazon.com/blogs/machine-learning/scalable-multi-node-deep-learning-training-using-gpus-in-the-aws-cloud/ ''' The below is commented out because I don't want to mess with the CNN's architecture. If you want to use hyperparameter optimization to alter the architecture of the fully connected layers as well, you can use the below. ''' #new_layer_elements = np.array([config['num_els_new_1'] if config['num_new_fc_layers'] >= 1 else None, # config['num_els_new_2'] if config['num_new_fc_layers'] >= 2 else None, # config['num_els_new_3'] if config['num_new_fc_layers'] >= 3 else None]) #new_layer_elements = list(new_layer_elements[new_layer_elements != None]) #old_fclayers_tofreeze = np.array([0 if config['freeze0_cat'] == 1 else None, # 1 if config['freeze1_cat'] == 1 else None]) #old_fclayers_tofreeze = list(old_fclayers_tofreeze[old_fclayers_tofreeze != None]) # Generate the model model = ISICNetAlex(num_new_fc_layers=0, new_layer_elements=[], dropout_rate=config['dropout_rate'], old_fclayers_tofreeze=[], ) # Use GPU processing if available. if torch.cuda.is_available(): model.cuda() # Build criterion and optimizer. criterion = torch.nn.CrossEntropyLoss() ''' The below is commented out because I don't want to mess with the optimizer. ''' #if config['optimizer'] == 'Adam': # optimizer = torch.optim.Adam(model.parameters(), lr=config['lr']) #else: # optimizer = torch.optim.SGD(model.parameters(), lr=config['lr'], momentum=config['sgd_momentum']) optimizer = torch.optim.SGD(model.parameters(), lr=config['lr'], momentum=config['sgd_momentum']) # Run training loop. # IMPORTANT -- note that the budget parameter used in setting up HpBandSter refers to the number of epochs. It can be made to refer to other parameters, but here we chose to have it refer to epochs. for epoch in range(int(budget)): start = time.time() # initialize variables to monitor training and validation loss train_loss = 0.0 ################### # train the model # ################### model.train() for batch_idx, (data, target) in enumerate(self.train_loader): # move to GPU if available if torch.cuda.is_available(): data, target = data.cuda(), target.cuda() optimizer.zero_grad() output = model(data) loss = criterion(output, target) loss.backward() optimizer.step() train_loss += 1/(batch_idx+1)*(loss.data-train_loss) print("Epoch {} training time took {} seconds".format(epoch,time.time()-start)) train_accuracy = self.evaluate_accuracy(model, self.train_loader) validation_accuracy = self.evaluate_accuracy(model, self.validation_loader) test_accuracy = self.evaluate_accuracy(model, self.test_loader) return ({ 'loss': 1-validation_accuracy, # remember: HpBandSter always minimizes! 'info': { 'test accuracy': test_accuracy, 'train accuracy': train_accuracy, 'validation accuracy': validation_accuracy, 'number of parameters': number_of_parameters(model), } })
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_params(config):\n params = copy.deepcopy(config.view.params)\n params.t2bins = np.arange(0, params.t2bin_max + 1e-4, params.t2bin_stepsize)\n params.out = make_Bunch(\"State and output of detection processing\") # outputs are not parameters, maybe separate \n return params", "def set_params(self, config):\n params = {'n_bins', 'edges', 'classes', 'chi', 'n_params'}\n self.__dict__.update((param, np.array(value)) for param, value in config.items() if param in params)", "def optimizer_config(self):\r\n return {\r\n \"lr\": self.args.lr[0],\r\n \"momentum\": self.args.momentum,\r\n \"weight_decay\": self.args.weight_decay,\r\n }", "def get_config_sample_speed():\n # try changing learning rate\n config = get_default_config()\n\n config['train_batch_size'] = 16384\n config['_policies'] = [None, \"from_scratch_sb\", \"pretrained\"]\n config['lr'] = 3e-4\n config['sgd_minibatch_size'] = 4096\n config['num_sgd_iter'] = 4\n config['rollout_fragment_length'] = 100\n config['num_workers'] = tune.grid_search([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])\n\n config['num_envs_per_worker'] = tune.grid_search([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])\n\n # ['humanoid_blocker', 'humanoid'],\n config['_train_policies'] = ['player_1']\n config['num_gpus'] = 0\n config['_train_steps'] = 20\n config[\"batch_mode\"] = \"complete_episodes\"\n\n config['_trainer'] = \"PPO\"\n config['_policy'] = \"PPO\"\n config['_call']['num_samples'] = 1\n config['_call']['resources_per_trial'] = {\n \"custom_resources\": {\"tune_cpu\": tune.sample_from(lambda spec: spec.config.num_workers + 10)}} # upper bound\n\n # config['_run_inline'] = True\n\n return config", "def configure(config_file):\n Config.config_file = config_file\n config = ConfigParser()\n config.optionxform = str\n config.read(config_file)\n dct = {}\n for item in list(config.sections()):\n dct[item] = dict(config.items(item))\n\n for item in dct.keys():\n if item == 'config':\n for key in dct[item].keys():\n dct[item][key] = tf(dct[item][key])\n\n if item == 'continuum':\n for key, val in dct[item].items():\n val = val.split(',')\n if len(val) == 2:\n dct[item][key] = {'xlim': float(val[0]), 'ylim': float(val[1])}\n dct[item][key] = {'ylim': float(val[0])}\n else:\n for k in [\"N\", \"b\", \"z\"]:\n try:\n dct[item][k] = dct[item][k].replace(\" \", \"\")\n dct[item][k] = list(map(float, dct[item][k].strip().split(',')))\n except KeyError:\n pass\n\n cond = []\n if \"N\" in dct[item].keys():\n cond += [dct[item][\"N\"][0] < 10., dct[item][\"N\"][-1] > 23.,\n dct[item][\"N\"][-1] < dct[item][\"N\"][0]]\n\n if \"b\" in dct[item].keys():\n cond += [dct[item][\"b\"][0] < 0., dct[item][\"b\"][-1] < dct[item][\"b\"][0]]\n\n if \"z\" in dct[item].keys():\n cond += [dct[item][\"z\"][-1] < dct[item][\"z\"][0]]\n\n if any(cond):\n raise Exception(\"check your random sampling inputs\")\n\n set_config_defaults(dct)\n _set_values(dct)", "def _sample_hyperparameters(self):\n\t\tconfig = {}\n\t\tfor attr, option in self._config_options.items():\n\t\t\tprint('Sampling', attr)\n\t\t\tconfig[attr] = option.sample()\n\t\treturn config", "def adapt_to_config(self, neb_config: config.NEBConfig):\n if neb_config.optim_config.eval_config is not None:\n self.model.adapt_to_config(neb_config.optim_config.eval_config)\n self.spring_constant = neb_config.spring_constant\n self.weight_decay = neb_config.weight_decay", "def config1() :\n data_name = \"titanic\" ### in data/input/\n model_class = 'AutoML' ### ACTUAL Class name for model_sklearn.py\n n_sample = 1000\n\n def post_process_fun(y): ### After prediction is done\n return int(y)\n\n def pre_process_fun(y): ### Before the prediction is done\n return int(y)\n\n\n model_dict = {'model_pars': {\n ### LightGBM API model #######################################\n 'model_class': model_class\n ,'model_pars' : {\n 'total_time_limit' : 20,\n 'algorithms' : 'auto',\n 'results_path' : root_repo + f'/data/output/{data_name}/{os_get_function_name()}/automl_1',\n 'eval_metric' : 'auto'\n\n # mode='Explain',\n # ml_task='auto', model_time_limit=None, algorithms='auto', train_ensemble=True,\n # stack_models='auto', eval_metric='auto', validation_strategy='auto', explain_level='auto',\n # golden_features='auto', features_selection='auto', start_random_models='auto',\n # hill_climbing_steps='auto', top_models_to_improve='auto', verbose=1, random_state=1234)\n }\n\n , 'post_process_fun' : post_process_fun ### After prediction ##########################################\n , 'pre_process_pars' : {'y_norm_fun' : pre_process_fun , ### Before training ##########################\n\n\n ### Pipeline for data processing ##############################\n 'pipe_list': [\n #### coly target prorcessing\n {'uri': 'source/prepro.py::pd_coly', 'pars': {}, 'cols_family': 'coly', 'cols_out': 'coly', 'type': 'coly' },\n\n\n {'uri': 'source/prepro.py::pd_colnum_bin', 'pars': {}, 'cols_family': 'colnum', 'cols_out': 'colnum_bin', 'type': '' },\n {'uri': 'source/prepro.py::pd_colnum_binto_onehot', 'pars': {}, 'cols_family': 'colnum_bin', 'cols_out': 'colnum_onehot', 'type': '' },\n\n #### catcol INTO integer, colcat into OneHot\n {'uri': 'source/prepro.py::pd_colcat_bin', 'pars': {}, 'cols_family': 'colcat', 'cols_out': 'colcat_bin', 'type': '' },\n # {'uri': 'source/prepro.py::pd_colcat_to_onehot', 'pars': {}, 'cols_family': 'colcat_bin', 'cols_out': 'colcat_onehot', 'type': '' },\n\n\n ### Cross_feat = feat1 X feat2\n # {'uri': 'source/prepro.py::pd_colcross', 'pars': {}, 'cols_family': 'colcross', 'cols_out': 'colcross_pair', 'type': 'cross'},\n\n\n #### Example of Custom processor\n #{'uri': THIS_FILEPATH + '::pd_col_myfun', 'pars': {}, 'cols_family': 'colnum', 'cols_out': 'col_myfun', 'type': '' }, \n\n\n ],\n }\n },\n\n 'compute_pars': { 'metric_list': ['accuracy_score','average_precision_score']\n\n ,'mlflow_pars' : None # {} ### Not empty --> use mlflow\n },\n\n 'data_pars': { 'n_sample' : n_sample,\n\n 'download_pars' : None,\n\n\n 'cols_input_type' : cols_input_type_1,\n ### family of columns for MODEL #########################################################\n # \"colnum\", \"colnum_bin\", \"colnum_onehot\", \"colnum_binmap\", #### Colnum columns\n # \"colcat\", \"colcat_bin\", \"colcat_onehot\", \"colcat_bin_map\", #### colcat columns\n # 'colcross_single_onehot_select', \"colcross_pair_onehot\", 'colcross_pair', #### colcross columns 'coldate', 'coltext',\n 'cols_model_group': [ 'colnum_bin',\n 'colcat_bin',\n # 'coltext',\n # 'coldate',\n #'colcross_pair',\n \n ### example of custom\n # 'col_myfun'\n ]\n\n ### Filter data rows ##################################################################\n ,'filter_pars': { 'ymax' : 2 ,'ymin' : -1 }\n\n }\n }\n\n ##### Filling Global parameters ############################################################\n model_dict = global_pars_update(model_dict, data_name, config_name=os_get_function_name() )\n return model_dict", "def extractBINS( configPy, var ):\n\n\t#TODO: Better a temporary file\n\ttry:\n\t\tshutil.copy( configPy, '_tmpPy.py')\n\texcept IOError:\n\t\tmessage = '\\033[1;31mError: There is no config File named %s\\033[1;m' % configPy\n\t\traise IOError, message\n\t# To be sure the first import is FWCore.ParameterSet.Config \n\t# in order to extract BINS\n\t_file = open('_tmpPy.py','r')\n\t_lines = _file.readlines()\n\t_file.close()\n\t_lines.insert(0,'import FWCore.ParameterSet.Config as cms\\n')\n\t_file = open('_tmpPy.py','w')\n\t_file.writelines(_lines)\n\t_file.close()\n\t# Append the working directory to do the import\n\tsys.path.append( os.getcwd() )\n\t#------------------------------------------------------------ \n\t\n\ttry:\n\t\tfrom _tmpPy import BINS\n\texcept ImportError:\n\t\tmessage = '\\033[1;31mError: There is no BINS in %s file. Are you sure this is a config python to do the fit?\\033[1;m' % configPy\n\t\tos.remove('_tmpPy.py')\n\t\traise ImportError, message\n\n\tvariables = BINS.parameterNames_()\n\t# Check if the variables introduced by the user are inside\n\t# the fit config python\n\tfor i in var:\n\t\tif i not in variables:\n\t\t\tos.remove('_tmpPy.py')\n\t\t\tmessage = \"\"\"\\033[1;31mError: The variable %s is not in the parameter BINS of the config python %s. \nCheck your config or change your input variable with --var option\\033[1;m \"\"\" % ( i, configPy)\n\t\t print message\n raise KeyError\n\n\t# All was fine. Remember: first variable is the pt-like (construct the weights respect it)\n\tPT = var[0]\n\tETA = var[1]\n\n\t#bins = BINS\n\ttry:\n\t\tos.remove( '_tmpPy.py' )\n\t\tos.remove( '_tmpPy.pyc' )\n\texcept OSError:\n\t\tpass\n\n\treturn BINS,PT,ETA", "def get_configspace():\r\n cs = CS.ConfigurationSpace()\r\n\r\n lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True)\r\n\r\n # For demonstration purposes, we add different optimizers as categorical hyperparameters.\r\n # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'.\r\n # SGD has a different parameter 'momentum'.\r\n optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD'])\r\n\r\n sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False)\r\n\r\n cs.add_hyperparameters([lr, optimizer, sgd_momentum])\r\n\r\n\r\n\r\n num_conv_layers = CSH.UniformIntegerHyperparameter('num_conv_layers', lower=1, upper=3, default_value=2)\r\n\r\n num_filters_1 = CSH.UniformIntegerHyperparameter('num_filters_1', lower=4, upper=64, default_value=16, log=True)\r\n num_filters_2 = CSH.UniformIntegerHyperparameter('num_filters_2', lower=4, upper=64, default_value=16, log=True)\r\n num_filters_3 = CSH.UniformIntegerHyperparameter('num_filters_3', lower=4, upper=64, default_value=16, log=True)\r\n\r\n cs.add_hyperparameters([num_conv_layers, num_filters_1, num_filters_2, num_filters_3])\r\n\r\n\r\n dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False)\r\n num_fc_units = CSH.UniformIntegerHyperparameter('num_fc_units', lower=8, upper=256, default_value=32, log=True)\r\n\r\n cs.add_hyperparameters([dropout_rate, num_fc_units])\r\n\r\n\r\n # The hyperparameter sgd_momentum will be used,if the configuration\r\n # contains 'SGD' as optimizer.\r\n cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')\r\n cs.add_condition(cond)\r\n\r\n # You can also use inequality conditions:\r\n cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1)\r\n cs.add_condition(cond)\r\n\r\n cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2)\r\n cs.add_condition(cond)\r\n\r\n return cs", "def __init__(self, bias_config, headers=None, label=None):\n self.analysis_config = bias_config.get_config()\n if headers is not None:\n self.analysis_config[\"headers\"] = headers\n if label is not None:\n self.analysis_config[\"label\"] = label", "def update_config(config, args):\n if args.n_train is not None:\n config['data']['n_train'] = args.n_train\n if args.n_valid is not None:\n config['data']['n_valid'] = args.n_valid\n if args.real_weight is not None:\n config['data']['real_weight'] = args.real_weight\n if args.lr is not None:\n config['optimizer']['learning_rate'] = args.lr\n if args.hidden_dim is not None:\n config['model']['hidden_dim'] = args.hidden_dim\n if args.n_graph_iters is not None:\n config['model']['n_graph_iters'] = args.n_graph_iters\n if args.batch_size is not None:\n config['data']['batch_size'] = args.batch_size\n if args.n_epochs is not None:\n config['training']['n_epochs'] = args.n_epochs\n if args.weight_decay is not None:\n config['optimizer']['weight_decay'] = args.weight_decay\n\n return config", "def customize_experiment_config(self, config):\n # TODO: use ConfigList from Coach launcher, and share customization code.\n hyperparams_dict = json.loads(os.environ.get(\"SM_HPS\", \"{}\"))\n\n # Set output dir to intermediate\n # TODO: move this to before customer-specified so they can override\n hyperparams_dict[\"rl.training.local_dir\"] = \"/opt/ml/output/intermediate\"\n\n self.hyperparameters = ConfigurationList() # TODO: move to shared\n for name, value in hyperparams_dict.items():\n # self.map_hyperparameter(name, val) #TODO\n if name.startswith(\"rl.\"):\n # self.apply_hyperparameter(name, value) #TODO\n self.hyperparameters.store(name, value)\n # else:\n # raise ValueError(\"Unknown hyperparameter %s\" % name)\n\n self.hyperparameters.apply_subset(config, \"rl.\")\n return config", "def get_configspace():\n cs = CS.ConfigurationSpace()\n\n \n\n # Learning rate hyperparameter\n lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True)\n\n \n\n # Stochastic gradient descent momentum as parameter.\n sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False)\n\n cs.add_hyperparameters([lr, sgd_momentum])\n \n # Optimizer hyperparameters.\n #optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD'])\n #cs.add_hyperparameters([optimizer])\n \n # Only add the sgd_momentum hyperparameter if the optimizer is stochastic gradient descent. Otherwise, it doesn't make sense.\n #cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')\n #cs.add_condition(cond)\n\n ''' The below is commented out because we're not fiddling with architecture in this optimization.'''\n #num_new_fc_layers = CSH.UniformIntegerHyperparameter('num_new_fc_layers', lower=0, upper=3, default_value=0, log=False)\n #num_els_new_1 = CSH.UniformIntegerHyperparameter('num_els_new_1', lower=128, upper=4096, default_value = 1000, log=True)\n #num_els_new_2 = CSH.UniformIntegerHyperparameter('num_els_new_2', lower=128, upper=4096, default_value = 1000, log=True)\n #num_els_new_3 = CSH.UniformIntegerHyperparameter('num_els_new_3', lower=128, upper=4096, default_value = 1000, log=True)\n\n #freeze0_old = CSH.UniformIntegerHyperparameter('freeze0_cat', lower = 0, upper = 1, default_value = 1, log=False)\n #freeze1_old = CSH.UniformIntegerHyperparameter('freeze1_cat', lower=0, upper=1, default_value=1, log=False)\n\n #cs.add_hyperparameters([num_new_fc_layers, num_els_new_1, num_els_new_2, num_els_new_3, freeze0_old, freeze1_old, batchsize])\n\n dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False)\n\n cs.add_hyperparameters([dropout_rate])\n\n return cs", "def configure(self, config_json):\n log.info(\"Configuring EDD backend for processing\")\n log.debug(\"Configuration string: '{}'\".format(config_json))\n\n yield self.set(config_json)\n\n cfs = json.dumps(self._config, indent=4)\n log.info(\"Final configuration:\\n\" + cfs)\n\n\n\n self.__numa_node_pool = []\n # remove numa nodes with missing capabilities\n for node in numa.getInfo():\n if len(numa.getInfo()[node]['gpus']) < 1:\n log.debug(\"Not enough gpus on numa node {} - removing from pool.\".format(node))\n continue\n elif len(numa.getInfo()[node]['net_devices']) < 1:\n log.debug(\"Not enough nics on numa node {} - removing from pool.\".format(node))\n continue\n else:\n self.__numa_node_pool.append(node)\n\n log.debug(\"{} numa nodes remaining in pool after cosntraints.\".format(len(self.__numa_node_pool)))\n\n if len(self._config['input_data_streams']) > len(self.__numa_node_pool):\n raise FailReply(\"Not enough numa nodes to process {} polarizations!\".format(len(self._config['input_data_streams'])))\n\n self._subprocessMonitor = SubprocessMonitor()\n #ToDo: Check that all input data streams have the same format, or allow different formats\n for i, streamid in enumerate(self._config['input_data_streams']):\n # calculate input buffer parameters\n stream_description = self._config['input_data_streams'][streamid]\n stream_description[\"dada_key\"] = DADABUFFERS[i]\n self.add_input_stream_sensor(streamid)\n self.input_heapSize = stream_description[\"samples_per_heap\"] * stream_description['bit_depth'] / 8\n\n nHeaps = self._config[\"samples_per_block\"] / stream_description[\"samples_per_heap\"]\n input_bufferSize = nHeaps * (self.input_heapSize)\n log.info('Input dada parameters created from configuration:\\n\\\n heap size: {} byte\\n\\\n heaps per block: {}\\n\\\n buffer size: {} byte'.format(self.input_heapSize, nHeaps, input_bufferSize))\n\n\n final_payloads, final_fpss, final_framens = EDD_VDIF_Frame_Size(stream_description['sample_rate'])\n\n if self._config['payload_size'] == 'auto':\n payload_size = final_payloads[-1]\n else:\n payload_size = int(self._config['payload_size'])\n\n log.info('Possible frame payload sizes (add 32 for framesize):')\n for k in range(final_payloads.size):\n if payload_size == final_payloads[k]:\n M = \"*\"\n else:\n M = \" \"\n log.info(' {}{:5.0f} byte {:8.0f} frames per sec {:6.3f} nsec/frame'.format(M, final_payloads[k], final_fpss[k], final_framens[k]))\n\n if payload_size not in final_payloads:\n log.warning(\"Payload size {} possibly not conform with VDIF format!\".format(payload_size))\n\n # calculate output buffer parameters\n size_of_samples = ceil(1. * self._config[\"samples_per_block\"] * 2 / 8.) # byte for two bit mode\n number_of_packages = ceil(size_of_samples / float(payload_size))\n\n output_buffer_size = number_of_packages * (payload_size + self._config['vdif_header_size'])\n\n integration_time = self._config[\"samples_per_block\"] / float(stream_description[\"sample_rate\"])\n self._integration_time_status.set_value(integration_time)\n\n rate = output_buffer_size/ integration_time # in spead documentation BYTE per second and not bit!\n rate *= self._config[\"output_rate_factor\"] # set rate to (100+X)% of expected rate\n self._output_rate_status.set_value(rate / 1E9)\n\n log.info('Output parameters calculated from configuration:\\n\\\n total size of data samples: {} byte\\n\\\n number_of_packages: {}\\n\\\n size of output buffer: {} byte\\n\\\n rate ({:.0f}%): {} Gbps'.format(size_of_samples,\n number_of_packages, output_buffer_size,\n self._config[\"output_rate_factor\"]*100, rate / 1E9))\n\n numa_node = self.__numa_node_pool[i]\n log.debug(\"Associating {} with numa node {}\".format(streamid, numa_node))\n\n # configure dada buffer\n bufferName = stream_description['dada_key']\n yield self._create_ring_buffer(input_bufferSize, 64, bufferName, numa_node)\n\n ofname = bufferName[::-1]\n # we write nSlice blocks on each go\n yield self._create_ring_buffer(output_buffer_size, 8, ofname, numa_node)\n\n # Configure + launch \n physcpu = numa.getInfo()[numa_node]['cores'][0]\n thread_id = self._config['thread_id'][streamid]\n station_id = self._config['thread_id'][streamid]\n cmd = \"taskset -c {physcpu} VLBI --input_key={dada_key} --speadheap_size={heapSize} --thread_id={thread_id} --station_id={station_id} --payload_size={payload_size} --sample_rate={sample_rate} --nbits={bit_depth} -o {ofname} --log_level={log_level} --output_type=dada\".format(ofname=ofname, heapSize=self.input_heapSize, numa_node=numa_node, physcpu=physcpu, thread_id=thread_id, station_id=station_id, payload_size=payload_size, log_level=self._config['log_level'], **stream_description)\n log.debug(\"Command to run: {}\".format(cmd))\n\n cudaDevice = numa.getInfo()[numa_node]['gpus'][0]\n cli = ManagedProcess(cmd, env={\"CUDA_VISIBLE_DEVICES\": cudaDevice})\n self._subprocessMonitor.add(cli, self._subprocess_error)\n self._subprocesses.append(cli)\n\n cfg = self._config.copy()\n cfg.update(stream_description)\n\n ip_range = []\n port = set()\n for key in self._config[\"output_data_streams\"]:\n if streamid in key:\n ip_range.append(self._config[\"output_data_streams\"][key]['ip'])\n port.add(self._config[\"output_data_streams\"][key]['port'])\n if len(port)!=1:\n raise FailReply(\"Output data for one plarization has to be on the same port! \")\n\n if self._config[\"output_type\"] == 'network':\n physcpu = \",\".join(numa.getInfo()[numa_node]['cores'][1:2])\n fastest_nic, nic_params = numa.getFastestNic(numa_node)\n log.info(\"Sending data for {} on NIC {} [ {} ] @ {} Mbit/s\".format(streamid, fastest_nic, nic_params['ip'], nic_params['speed']))\n\n cmd = \"taskset -c {physcpu} vdif_send --input_key {ofname} --if_ip {ibv_if} --dest_ip {mcast_dest} --port {port_tx} --max_rate {rate}\".format(ofname=ofname, \n physcpu=physcpu, ibv_if=nic_params['ip'], mcast_dest=\" \".join(ip_range), port_tx=port.pop(), rate=rate)\n log.debug(\"Command to run: {}\".format(cmd))\n\n elif self._config[\"output_type\"] == 'disk':\n ofpath = os.path.join(cfg[\"output_directory\"], ofname)\n log.debug(\"Writing output to {}\".format(ofpath))\n if not os.path.isdir(ofpath):\n os.makedirs(ofpath)\n cmd = \"dada_dbdisk -k {ofname} -D {ofpath} -W\".format(ofname=ofname, ofpath=ofpath, **cfg)\n else:\n log.warning(\"Selected null output. Not sending data!\")\n cmd = \"dada_dbnull -z -k {}\".format(ofname)\n\n log.debug(\"Command to run: {}\".format(cmd))\n mks = ManagedProcess(cmd, env={\"CUDA_VISIBLE_DEVICES\": cudaDevice})\n self._subprocessMonitor.add(mks, self._subprocess_error)\n self._subprocesses.append(mks)\n\n self._subprocessMonitor.start()", "def get_config(self) -> dict:\n config = {}\n\n args = ['out_dim', 'bond_dim', 'use_bias']\n for arg in args:\n config[arg] = getattr(self, arg)\n\n config['activation'] = activations.serialize(getattr(self, 'activation'))\n\n custom_initializers = ['kernel_initializer', 'bias_initializer']\n for initializer_arg in custom_initializers:\n config[initializer_arg] = initializers.serialize(\n getattr(self, initializer_arg))\n\n base_config = super().get_config()\n return dict(list(base_config.items()) + list(config.items()))", "def test_single_config(runConfig):\n finalConfig = {}\n finalConfig.update(runConfig.algorithm_config)\n finalConfig.update({\"function\": runConfig.algorithm_function})\n finalConfig.update({\"stop_predicate\": runConfig.stop_predicate})\n finalConfig.update({\"runNumber\": range(1,runConfig.number_of_runs+1)})\n finalConfig.update({\"max_iter\": runConfig.max_iter})\n #finalConfig.update({\"config_start_timestamp\": time.time()})\n #finalConfig.update({\"authors\": [runConfig.authors]})\n \n print(finalConfig)\n \n for element in itertools.product(\n *map(lambda t: [(t[0], x) for x in iterate(t[1])], finalConfig.items())):\n print(element)", "def __init__(self, config):\n self.cfg = config\n self.var_combinations = [\"tas:tas\", \"pr:pr\", \"pr:tas\"]\n self.seasons = [\"jja\", \"djf\", \"annual\"]\n self.projects = [\"cmip5\", \"cmip6\"]\n self.variables = [\"tas\", \"pr\"]\n self.scenarios = [\"26\", \"45\", \"85\"]\n\n # generate list of candidate bound limits\n small = np.arange(0.1, 1, 0.1)\n medium = np.arange(1, 11)\n high = np.arange(20, 100, 10)\n v_high = np.arange(150, 400, 50)\n self.bound_candidates = np.concatenate(\n (small, medium, high, v_high)) * 5 / 4", "def get_configspace() -> CS.Configuration:\n cs = CS.ConfigurationSpace(seed=0)\n # START TODO ################\n lr_hp = CS.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value=1e-2, log=True)\n optimizer_hp = CSH.CategoricalHyperparameter(name='optimizer', choices=['Adam', 'SGD', 'RMSprop'])\n sgd_momentum_hp = CS.UniformFloatHyperparameter('sgd_momentum', lower=0.00, upper=0.99, default_value=0.9)\n\n rms_momentum_hp = CS.UniformFloatHyperparameter('rms_momentum', lower=0.00, upper=0.99, default_value=0.9)\n rms_alpha_hp = CS.UniformFloatHyperparameter('rms_alpha', lower=0.00, upper=0.99, default_value=0.99)\n\n scheduler_hp = CSH.CategoricalHyperparameter(name='scheduler',\n choices=['CosineAnnealingLR', 'CosineAnnealingWarmRestarts'])\n cosine_max_t_hp = CS.UniformIntegerHyperparameter(name='cosine_max_t', lower=50, upper=300, default_value=150)\n cosine_warm_hp = CS.UniformIntegerHyperparameter(name='warm_t_0', lower=50, upper=300, default_value=150)\n\n sgd_cond = CS.EqualsCondition(sgd_momentum_hp, optimizer_hp, 'SGD')\n rms_cond1 = CS.EqualsCondition(rms_momentum_hp, optimizer_hp, 'RMSprop')\n rms_cond2 = CS.EqualsCondition(rms_alpha_hp, optimizer_hp, 'RMSprop')\n cosine_warm_cond = CS.EqualsCondition(cosine_warm_hp, scheduler_hp, 'CosineAnnealingWarmRestarts')\n cosine_cond = CS.EqualsCondition(cosine_max_t_hp, scheduler_hp, 'CosineAnnealingLR')\n cs.add_hyperparameters([lr_hp, optimizer_hp, sgd_momentum_hp, rms_momentum_hp,\n rms_alpha_hp, scheduler_hp, cosine_max_t_hp, cosine_warm_hp])\n cs.add_conditions([sgd_cond, rms_cond1, rms_cond2, cosine_cond, cosine_warm_cond])\n # END TODO ################\n return cs", "def set_config(self, config):\n self.adversarial = config.adversarial\n self.eps = config.eps\n self.probability = config.probability\n self.use_dynamics = config.use_dynamics\n self.random = config.random\n self.observable_noise = config.observable_noise\n self.use_max_norm = config.use_max_norm", "def __init__(self, input_size, hidden_size, output_size, std=1e-4):\n self.params = {}\n self.params['W1'] = std * np.random.randn(input_size, hidden_size)\n self.params['b1'] = np.zeros(hidden_size)\n self.params['W2'] = std * np.random.randn(hidden_size, output_size)\n self.params['b2'] = np.zeros(output_size)", "def config_params0(data,parameter):\n model = []\n #Range of value of p\n acf = sm.graphics.tsa.acf(data.diff().dropna())\n for i in range(len(acf)):\n acf[i] = abs(acf[i]*10)\n if (ceil(acf[i])) <= 2:\n p = range(ceil(acf[i])-1,ceil(acf[i])+2)\n break\n\n #range of value of q\n pacf = sm.graphics.tsa.pacf(data.diff().dropna())\n for i in range(len(pacf)):\n pacf[i] = abs(pacf[i]*10)\n if (ceil(pacf[i])) <= 2:\n q = range(ceil(pacf[i])-1,ceil(pacf[i])+2)\n break\n\n\t# define config lists\n p_params = p\n d_params = parameter['d']\n q_params = q\n m_params = parameter['m']\n #P_params = p\n #D_params = [0, 1]\n #Q_params = q\n \n pdq_m = list(itertools.product(p_params, d_params, q_params,m_params)) #Generate all different combinations of p, q and q triplets\n params = [[(x[0], x[1], x[2]),(x[0], x[1], x[2], x[3])] for x in pdq_m]\n return params", "def __init__(self, mean, config):\n self.lb = config.get('lb', 0)\n self.ub = config.get('ub', sys.maxint)\n self.a = float(config['a'])", "def get_config(self):\n config = {\n \"units\": self.units,\n \"activation\": activations.serialize(self.activation),\n \"recurrent_activation\": activations.serialize(\n self.recurrent_activation\n ),\n \"attention_activation\": activations.serialize(\n self.attention_activation\n ),\n \"use_bias\": self.use_bias,\n \"kernel_initializer\": initializers.serialize(self.kernel_initializer),\n \"recurrent_initializer\": initializers.serialize(\n self.recurrent_initializer\n ),\n \"bias_initializer\": initializers.serialize(self.bias_initializer),\n \"attention_initializer\": initializers.serialize(\n self.attention_initializer\n ),\n \"use_chrono_initialization\": self.unit_forget_bias,\n \"kernel_regularizer\": regularizers.serialize(self.kernel_regularizer),\n \"recurrent_regularizer\": regularizers.serialize(\n self.recurrent_regularizer\n ),\n \"bias_regularizer\": regularizers.serialize(self.bias_regularizer),\n \"activity_regularizer\": regularizers.serialize(\n self.activity_regularizer\n ),\n \"attention_regularizer\": regularizers.serialize(\n self.attention_regularizer\n ),\n \"kernel_constraint\": constraints.serialize(self.kernel_constraint),\n \"recurrent_constraint\": constraints.serialize(\n self.recurrent_constraint\n ),\n \"bias_constraint\": constraints.serialize(self.bias_constraint),\n \"attention_constraint\": constraints.serialize(\n self.attention_constraint\n ),\n \"dropout\": self.dropout,\n \"recurrent_dropout\": self.recurrent_dropout,\n \"return_attention\": self.return_attention,\n }\n base_config = super().get_config()\n del base_config[\"cell\"]\n return dict(list(base_config.items()) + list(config.items()))", "def get_binners(config):\n binners = []\n if config[\"binning\"][\"metabat\"]:\n binners.append(\"metabat\")\n if config[\"binning\"][\"concoct\"]:\n binners.append(\"concoct\")\n if config[\"binning\"][\"maxbin\"]:\n binners.append(\"maxbin\")\n return binners", "def __init__(self, input_size, hidden_size, output_size, weight_init_std=0.01):\n\n self.params = {}\n self.params['W1'] = weight_init_std * \\\n np.random.randn(input_size, hidden_size)\n self.params['b1'] = np.zeros(hidden_size)\n self.params['W2'] = weight_init_std * \\\n np.random.randn(hidden_size, output_size)\n self.params['b2'] = np.zeros(output_size)", "def __init__(self, input_size, neurons):\n super().__init__()\n self.input_size = input_size\n self.neurons = neurons\n self.params[\"w\"] = np.random.randn(input_size, neurons)\n self.params[\"b\"] = np.random.randn(1, neurons)\n self.grads = {}", "def pibooth_configure(cfg):", "def config( **kwargs ):", "def run_from_config(config_fname, output_fname, rmf_fname=None):\n IMP.set_log_level(IMP.SILENT)\n print(\"assigning parameter ranges from config into output\", output_fname)\n num = IMP.npctransport.assign_ranges(config_fname, output_fname,\n 0, True, 10)\n sd = IMP.npctransport.SimulationData(output_fname, False)\n sd.set_log_level(IMP.SILENT)\n if rmf_fname is not None:\n sd.set_rmf_file( rmf_fname, False )\n if IMP.get_check_level() >= IMP.USAGE_AND_INTERNAL:\n short_init_factor = 0.00001\n opt_cycles = 1\n else:\n short_init_factor = 0.01\n opt_cycles = 10000\n sd.get_bd().set_log_level(IMP.SILENT)\n IMP.npctransport.initialize_positions(sd, [], False, short_init_factor)\n sd.activate_statistics()\n obd = sd.get_bd()\n obd.optimize(opt_cycles)\n timer = IMP.npctransport.timer()\n # lame test\n # rt= sd.get_root()\n # rtt= IMP.npctransport.Transporting.setup_particle(rt, True)\n # rtf= rt.get_child(0)\n # rttf= IMP.npctransport.Transporting.setup_particle(rtf, False)\n print(\"updating stats\")\n sd.get_statistics().update(timer, 0)\n return sd", "def update_configuration(self, config):\n\n config[\"data_transformation\"][\"n_classification_bins\"] = config[\"n_classification_bins\"]\n config[\"data_transformation\"][\"nassets\"] = config[\"nassets\"]\n config[\"data_transformation\"][\"classify_per_series\"] = config[\"classify_per_series\"]\n config[\"data_transformation\"][\"normalise_per_series\"] = config[\"normalise_per_series\"]\n\n return config", "def default_kernel_config(defn):\n return [('beam', {}),\n ('hypers',\n {\n 'alpha_a': 4.0,\n 'alpha_b': 2.0,\n 'gamma_a': 3.0, \n 'gamma_b': 6.0\n }\n )]", "def get_bert_config(config):\n if config.model_size == \"large\":\n args = {\"hidden_size\": 1024, \"num_hidden_layers\": 24}\n elif config.model_size == \"base\":\n args = {\"hidden_size\": 768, \"num_hidden_layers\": 12}\n elif config.model_size == \"small\":\n args = {\"hidden_size\": 256, \"num_hidden_layers\": 12}\n else:\n raise ValueError(\"Unknown model size\", config.model_size)\n args[\"vocab_size\"] = config.vocab_size\n args.update(**config.model_hparam_overrides)\n # by default the ff size and num attn heads are determined by the hidden size\n args[\"num_attention_heads\"] = max(1, args[\"hidden_size\"] // 64)\n args[\"intermediate_size\"] = 4 * args[\"hidden_size\"]\n args.update(**config.model_hparam_overrides)\n return modeling.BertConfig.from_dict(args)", "def summarize(config):\n\n try:\n logger.info(':: ConfigID {0} ::'.format(config.configId))\n logger.info('\\tScan {0}, source {1}, intent {2}'\n .format(config.scanNo, config.source,\n config.scan_intent))\n\n logger.info('\\t(RA, Dec) = ({0}, {1})'\n .format(config.ra_deg, config.dec_deg))\n subbands = config.get_subbands()\n reffreqs = [subband.sky_center_freq for subband in subbands]\n logger.info('\\tFreq: {0} - {1}'\n .format(min(reffreqs), max(reffreqs)))\n\n nchans = [subband.spectralChannels for subband in subbands]\n chansizes = [subband.bw/subband.spectralChannels\n for subband in subbands]\n sb0 = subbands[0]\n logger.info('\\t(nspw, chan/spw, nchan) = ({0}, {1}, {2})'\n .format(len(nchans), nchans[0], sum(nchans)))\n logger.info('\\t(BW, chansize) = ({0}, {1}) MHz'\n .format(sb0.bw, chansizes[0]))\n if not all([chansizes[0] == chansize for chansize in chansizes]):\n logger.info('\\tNot all spw have same configuration.')\n\n logger.info('\\t(nant, npol) = ({0}, {1})'\n .format(config.numAntenna, sb0.npp))\n dt = 24*3600*(config.stopTime-config.startTime)\n logger.info('\\t(StartMJD, duration) = ({0}, {1}s).'\n .format(config.startTime, round(dt, 1)))\n logger.info('\\t({0}/{1}) ints at (HW/Final) integration time of ({2:.3f}/{3:.3f}) s'\n .format(int(round(dt/sb0.hw_time_res)),\n int(round(dt/sb0.final_time_res)),\n sb0.hw_time_res, sb0.final_time_res))\n except:\n logger.warn(\"Failed to fully parse config to print summary.\"\n \"Proceeding.\")", "def _parse_config(self, config_path=None):\n # Configurable parameters\n config_param = {\n 'elements': [\n 'NUM_BLUE',\n 'NUM_RED',\n 'NUM_BLUE_UAV',\n 'NUM_RED_UAV',\n 'NUM_GRAY',\n 'NUM_BLUE_UGV2',\n 'NUM_RED_UGV2',\n 'NUM_BLUE_UGV3',\n 'NUM_RED_UGV3',\n 'NUM_BLUE_UGV4',\n 'NUM_RED_UGV4'],\n 'control': [\n 'CONTROL_ALL',\n 'MAX_STEP',\n 'RED_STEP',\n 'RED_DELAY',\n 'BLUE_ADV_BIAS',\n 'RED_ADV_BIAS'],\n 'communication': [\n 'COM_GROUND',\n 'COM_AIR',\n 'COM_DISTANCE',\n 'COM_FREQUENCY'],\n 'memory': [\n 'INDIV_MEMORY',\n 'TEAM_MEMORY',\n 'RENDER_INDIV_MEMORY',\n 'RENDER_TEAM_MEMORY'],\n 'settings': [\n 'RL_SUGGESTIONS',\n 'STOCH_TRANSITIONS',\n 'STOCH_TRANSITIONS_EPS',\n 'STOCH_TRANSITIONS_MOD',\n 'STOCH_ATTACK',\n 'STOCH_ATTACK_BIAS',\n 'STOCH_ZONES',\n 'RED_PARTIAL',\n 'BLUE_PARTIAL',\n 'MAP_MODE',\n 'MAP_POOL_SIZE'],\n 'experiments': [\n 'RENDER_ENV_ONLY',\n 'SAVE_BOARD_RGB',\n 'SAVE_BLUE_OBS',\n 'SAVE_RED_OBS',\n 'SILENCE_RENDER',\n 'RESPAWN_FLAG',\n 'RESPAWN_AGENT_DEAD',\n 'RESPAWN_AGENT_AT_FLAG']}\n config_datatype = {\n 'elements': [\n int, int, int ,int, int, int, int,\n int, int, int, int],\n 'control': [bool, int, int, int, int, int, int, int],\n 'communication': [bool, bool, int, int],\n 'memory': [str, str, bool, bool],\n 'settings': [\n bool, bool, float, str,\n bool, int, bool, bool, bool, str, int],\n 'experiments': [bool, bool, bool, bool, bool, bool, bool, bool]}\n\n if config_path is None and self.config_path is not None:\n # Maintain previous configuration\n return\n assert os.path.isfile(config_path), 'Configuration file does not exist'\n self.config_path = config_path\n config = configparser.ConfigParser()\n config.read(config_path)\n\n # Set environment attributes\n for section in config_param:\n for option, datatype in zip(config_param[section], config_datatype[section]):\n if not config.has_section(section) or not config.has_option(section, option):\n if hasattr(self, option):\n continue\n else:\n raise KeyError('Configuration import fails: double check whether all config variables are included')\n if datatype is bool:\n value = config.getboolean(section, option)\n elif datatype is int:\n value = config.getint(section, option)\n elif datatype is float:\n value = config.getfloat(section, option)\n elif datatype is str:\n value = config.get(section, option)\n else:\n raise Exception('Unsupported datatype')\n setattr(self, option, value)", "def fit_config(rnd: int) -> Dict[str, str]:\n config = {\n \"epoch_global\": str(rnd),\n \"epochs\": str(1),\n \"batch_size\": str(32),\n \"learning_rate\": str(0.001),\n }\n return config", "def run_block(self, config: Dict) -> Dict:\n super().run_block(config)\n test.validate_eda(self.params['key'], config)\n\n for key in config.keys():\n keyword = re.sub('[^a-zA-Z]+', '', key)\n params = eval(f'self.run_{keyword}(key, config[key])')\n # Check if the params output is not empty\n if not bool(params):\n self.params.update(params)\n print(f\"Added the following parameters: {params}\")\n\n self.params['step_number'] += 1\n return self.params", "def model_and_data(request, hyperparams, estep_conf):\n if tvo.get_run_policy() == \"mpi\":\n init_processes()\n\n precision, N, D, H, batch_size = get(hyperparams, \"precision\", \"N\", \"D\", \"H\", \"batch_size\")\n\n if request.param == \"BSC\":\n W_gt = generate_bars(H, bar_amp=10.0, precision=precision)\n sigma2_gt = to.ones((1,), dtype=precision, device=tvo.get_device())\n pies_gt = to.full((H,), 2.0 / H, dtype=precision, device=tvo.get_device())\n\n to.manual_seed(999)\n W_init = to.rand((D, H), dtype=precision)\n W_init = W_init.to(device=tvo.get_device())\n broadcast(W_init)\n\n sigma2_init = to.tensor([1.0], dtype=precision, device=tvo.get_device())\n pies_init = to.full((H,), 1.0 / H, dtype=precision, device=tvo.get_device())\n\n model = BSC(\n H=H, D=D, W_init=W_gt, sigma2_init=sigma2_gt, pies_init=pies_gt, precision=precision\n )\n\n fname = \"bars_test_data_bsc.h5\"\n\n write_dataset(fname, N, D, np.float32, model)\n\n model.theta[\"W\"] = W_init\n model.theta[\"sigma2\"] = sigma2_init\n model.theta[\"pies\"] = pies_init\n\n elif request.param == \"NoisyOR\":\n W_gt = generate_bars(H, bar_amp=0.8, bg_amp=0.1, precision=precision)\n pies_gt = to.full((H,), 2.0 / H, dtype=precision, device=tvo.get_device())\n\n to.manual_seed(999)\n W_init = to.rand((D, H), dtype=precision)\n W_init = W_init.to(device=tvo.get_device())\n broadcast(W_init)\n pies_init = to.full((H,), 1.0 / H, dtype=precision, device=tvo.get_device())\n\n model = NoisyOR(H=H, D=D, W_init=W_gt, pi_init=pies_gt, precision=precision)\n\n fname = \"bars_test_data_nor.h5\"\n\n write_dataset(fname, N, D, np.uint8, model)\n\n model.theta[\"W\"] = W_init\n model.theta[\"pies\"] = pies_init\n\n if tvo.get_run_policy() == \"mpi\":\n dist.barrier()\n\n return model, fname", "def get_model_config(model_name, args):\n if model_name == 'Tacotron2':\n model_config = dict(\n # optimization\n mask_padding=args.mask_padding,\n # audio\n n_mel_channels=args.n_mel_channels,\n # symbols\n n_symbols=args.n_symbols,\n symbols_embedding_dim=args.symbols_embedding_dim,\n # encoder\n encoder_kernel_size=args.encoder_kernel_size,\n encoder_n_convolutions=args.encoder_n_convolutions,\n encoder_embedding_dim=args.encoder_embedding_dim,\n # attention\n attention_rnn_dim=args.attention_rnn_dim,\n attention_dim=args.attention_dim,\n # attention location\n attention_location_n_filters=args.attention_location_n_filters,\n attention_location_kernel_size=args.attention_location_kernel_size,\n # decoder\n n_frames_per_step=args.n_frames_per_step,\n decoder_rnn_dim=args.decoder_rnn_dim,\n prenet_dim=args.prenet_dim,\n max_decoder_steps=args.max_decoder_steps,\n gate_threshold=args.gate_threshold,\n p_attention_dropout=args.p_attention_dropout,\n p_decoder_dropout=args.p_decoder_dropout,\n # postnet\n postnet_embedding_dim=args.postnet_embedding_dim,\n postnet_kernel_size=args.postnet_kernel_size,\n postnet_n_convolutions=args.postnet_n_convolutions,\n decoder_no_early_stopping=args.decoder_no_early_stopping\n )\n return model_config\n elif model_name == 'WaveGlow':\n model_config = dict(\n n_mel_channels=args.n_mel_channels,\n n_flows=args.flows,\n n_group=args.groups,\n n_early_every=args.early_every,\n n_early_size=args.early_size,\n WN_config=dict(\n n_layers=args.wn_layers,\n kernel_size=args.wn_kernel_size,\n n_channels=args.wn_channels\n )\n )\n return model_config\n else:\n raise NotImplementedError(model_name)", "def input_config():\n run_dir = 'runs/ODEMnistClassification/8'\n epoch = 'latest'\n device = 'cpu'\n min_end_time = 10\n max_end_time = 100\n tol = 1e-3", "def __init__(self, config, xtdim, batch_size):\n self.float_type = 'float32' # This should be the default\n self.config = config\n self.dt = self.config['dt']\n\n self.n_input = self.config['n_input']\n self.n_output = self.config['n_output']\n\n self.batch_size = batch_size\n self.xtdim = xtdim\n\n # time major\n self.x = np.zeros((xtdim, batch_size, self.n_input), dtype=self.float_type)\n self.y = np.zeros((xtdim, batch_size, self.n_output), dtype=self.float_type)\n self.cost_mask = np.zeros((xtdim, batch_size, self.n_output), dtype=self.float_type)\n # strength of input noise\n self._sigma_x = config['sigma_x'] * math.sqrt(2./self.config['alpha'])\n\n if config['rule_name'] == 'timed_spatial_reproduction_broad_tuning' \\\n or config['rule_name'] == 'spatial_reproduction_broad_tuning' \\\n or config['rule_name'] == 'spatial_comparison_broad_tuning' \\\n or config['rule_name'] == 'spatial_change_detection_broad_tuning':\n self.n_guassianline = 32 + 12\n self.sd_gaussianline = 4.\n else:\n self.n_guassianline = 32\n self.sd_gaussianline = 2.\n\n self.pref_line_gaussian = np.arange(0, self.n_guassianline)", "def branch(configs, weights):\n\n nconfig = configs.configs.shape[0]\n probability = np.cumsum(weights)\n wtot = probability[-1]\n base = np.random.rand()\n newinds = np.searchsorted(\n probability, (base + np.linspace(0, wtot, nconfig)) % wtot\n )\n configs.resample(newinds)\n weights.fill(wtot / nconfig)\n return configs, weights", "def config_step(self):\n api = self.api\n # bisect_config may come as a FrozenDict (which is not serializable).\n bisect_config = dict(self.bisect_config)\n\n def fix_windows_backslashes(s):\n backslash_regex = re.compile(r'(?<!\\\\)\\\\(?!\\\\)')\n return backslash_regex.sub(r'\\\\', s)\n\n for k, v in bisect_config.iteritems():\n if isinstance(v, basestring):\n bisect_config[k] = fix_windows_backslashes(v)\n # We sort the keys to prevent problems with orders changing when\n # recipe_simulation_test compares against expectation files.\n config_string = json.dumps(bisect_config, indent=2, sort_keys=True)\n result = api.m.step('config', [])\n config_lines = config_string.splitlines()\n result.presentation.logs['Bisect job configuration'] = config_lines", "def sample_configuration_dist(config, root=True, num_samples_per_dist=1):\n if isinstance(config, dict):\n return {\n k: sample_configuration_dist(\n v, root=False, num_samples_per_dist=num_samples_per_dist)\n for k, v in sorted(config.items())\n }\n elif isinstance(config, list) and root:\n return [\n sample_configuration_dist(\n c, root=False, num_samples_per_dist=num_samples_per_dist)\n for c in config\n ]\n elif callable(config):\n return [config() for _ in range(num_samples_per_dist)]\n else:\n return config", "def evaluate_config(p, n):\n \n classifier = clas.Classifier()\n hog = fe.HOG(orientations=orientations, \n pixels_per_cell=pixels_per_cell, \n cells_per_block=cells_per_block)\n \n classifier.extractor = hog\n classifier.evaluate(mode=\"train\")", "def get_configuration_sample(config, root=True):\n if isinstance(config, dict):\n return {\n k: get_configuration_sample(v, root=False)\n for k, v in sorted(config.items())\n }\n elif isinstance(config, list):\n if root:\n return get_configuration_sample(\n config[np.random.randint(len(config))], root=False)\n else:\n return config[np.random.randint(len(config))]\n elif callable(config):\n return config()\n else:\n return config", "def run(bench, budget):\n\n # Get the set of hypeparameter configuration space possible in this benchmark\n cs = bench.get_configuration_space()\n\n ##############################################################################\n # Begin implementation\n ##############################################################################\n bomo = BOMO(cs)\n\n for _ in range(budget):\n i = bomo.sample()\n sample = bench.objective_function(cs[i])\n print(\"Sample:\", sample)\n bomo.fit_predict(sample)\n\n ##############################################################################\n # End implementation\n ##############################################################################\n # This needs to be called at the end of a run\n bench.done()", "def make_batman_config(tmin, tmax, tstep, wmin, wmax, wnum, wlog=True, suffix=\"\", path=\".\"):\n params = {}\n params[\"curves_fname\"] = p.join(path, 'batmanCurves{}.csv'.format(suffix))\n params[\"params_fname\"] = p.join(path, 'batmanParams{}.csv'.format(suffix))\n params[\"tmin\"] = tmin\n params[\"tmax\"] = tmax\n params[\"tstep\"] = tstep\n params[\"wmin\"] = wmin\n params[\"wmax\"] = wmax\n params[\"wnum\"] = wnum\n params[\"wlog\"] = wlog\n\n outfile = p.join(path, 'batmanConfig{}.param'.format(suffix))\n with open(outfile, \"w+\") as f:\n json.dump(params, f)\n print(\"Batman config written to {}\".format(outfile))", "def _configure(self, config):\n self.friction_coef = config['friction_coef']\n self.num_cone_faces = config['num_cone_faces']\n self.num_samples = config['grasp_samples_per_surface_point']\n self.dir_prior = config['dir_prior']\n self.target_num_grasps = config['target_num_grasps']\n if self.target_num_grasps is None:\n self.target_num_grasps = config['min_num_grasps']\n\n self.min_contact_dist = config['min_contact_dist']\n self.num_grasp_rots = config['coll_check_num_grasp_rots']\n if 'max_num_surface_points' in config.keys():\n self.max_num_surface_points_ = config['max_num_surface_points']\n else:\n self.max_num_surface_points_ = 100", "def init_config(self):\n super().init_config()\n for param in self.parameters():\n if param.name == 'source':\n continue\n self.add_config_item(param.name,\n saver=lambda p=param: getattr(p, \"value\"),\n loader=lambda x, p=param: setattr(p, \"value\", x),\n default=param.default)", "def get_config(self):\n config = {'kernel_initializer': initializers.serialize(self.kernel_initializer),\n 'activation': self.activation.__name__,\n 'dim_ordering': self.dim_ordering,\n 'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),\n 'eps': self.eps,\n 'cov_mode': self.cov_mode\n }\n base_config = super(SecondaryStatistic, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))", "def get_parameters(self, config: Dict[str, Scalar]) -> NDArrays:\n _ = (self, config)\n return []", "def __init__(self, config, tdim, batch_size):\n self.float_type = 'float32' # This should be the default\n self.config = config\n self.dt = self.config['dt']\n\n self.n_eachring = self.config['n_eachring']\n self.n_input = self.config['n_input']\n self.n_output = self.config['n_output']\n self.pref = np.arange(0,2*np.pi,2*np.pi/self.n_eachring) # preferences\n\n self.batch_size = batch_size\n self.tdim = tdim\n self.x = np.zeros((tdim, batch_size, self.n_input), dtype=self.float_type)\n #self.input_loc = list()# add by yichen\n #self.output_loc = list()# add by yichen\n #self.distract_loc = list()# add by yichen\n self.y = np.zeros((tdim, batch_size, self.n_output), dtype=self.float_type)\n if self.config['loss_type'] == 'lsq':\n self.y[:,:,:] = 0.05\n # y_loc is the stimulus location of the output, -1 for fixation, (0,2 pi) for response\n self.y_loc = -np.ones((tdim, batch_size) , dtype=self.float_type)\n\n self._sigma_x = config['sigma_x']*np.sqrt(2/config['alpha'])", "def main(config, black_box_function=None, output_file=\"\"):\r\n start_time = (datetime.datetime.now())\r\n run_directory = config[\"run_directory\"]\r\n hypermapper_mode = config[\"hypermapper_mode\"][\"mode\"]\r\n\r\n # Start logging\r\n log_file = deal_with_relative_and_absolute_path(run_directory, config[\"log_file\"])\r\n sys.stdout.change_log_file(log_file)\r\n if (hypermapper_mode == 'client-server'):\r\n sys.stdout.switch_log_only_on_file(True)\r\n\r\n # Log the json configuration for this optimization\r\n sys.stdout.write_to_logfile(str(config) + \"\\n\")\r\n\r\n # Create parameter space object and unpack hyperparameters from json\r\n param_space = space.Space(config)\r\n application_name = config[\"application_name\"]\r\n optimization_metrics = config[\"optimization_objectives\"]\r\n optimization_iterations = config[\"optimization_iterations\"]\r\n evaluations_per_optimization_iteration = config[\"evaluations_per_optimization_iteration\"]\r\n batch_mode = evaluations_per_optimization_iteration > 1\r\n number_of_cpus = config[\"number_of_cpus\"]\r\n print_importances = config[\"print_parameter_importance\"]\r\n epsilon_greedy_threshold = config[\"epsilon_greedy_threshold\"]\r\n acquisition_function = config[\"acquisition_function\"]\r\n weight_sampling = config[\"weight_sampling\"]\r\n scalarization_method = config[\"scalarization_method\"]\r\n scalarization_key = config[\"scalarization_key\"]\r\n doe_type = config[\"design_of_experiment\"][\"doe_type\"]\r\n number_of_doe_samples = config[\"design_of_experiment\"][\"number_of_samples\"]\r\n model_type = config[\"models\"][\"model\"]\r\n optimization_method = config[\"optimization_method\"]\r\n time_budget = config[\"time_budget\"]\r\n input_params = param_space.get_input_parameters()\r\n number_of_objectives = len(optimization_metrics)\r\n objective_limits = {}\r\n data_array = {}\r\n fast_addressing_of_data_array = {}\r\n objective_bounds = None\r\n exhaustive_search_data_array = None\r\n normalize_objectives = False\r\n debug = False\r\n\r\n if \"feasible_output\" in config:\r\n feasible_output = config[\"feasible_output\"]\r\n feasible_output_name = feasible_output[\"name\"]\r\n enable_feasible_predictor = feasible_output[\"enable_feasible_predictor\"]\r\n enable_feasible_predictor_grid_search_on_recall_and_precision = feasible_output[\"enable_feasible_predictor_grid_search_on_recall_and_precision\"]\r\n feasible_predictor_grid_search_validation_file = feasible_output[\"feasible_predictor_grid_search_validation_file\"]\r\n feasible_parameter = param_space.get_feasible_parameter()\r\n number_of_trees = config[\"models\"][\"number_of_trees\"]\r\n\r\n if (weight_sampling == \"bounding_box\"):\r\n objective_bounds = {}\r\n user_bounds = config[\"bounding_box_limits\"]\r\n if (len(user_bounds) == 2):\r\n if (user_bounds[0] > user_bounds[1]):\r\n user_bounds[0], user_bounds[1] = user_bounds[1], user_bounds[0]\r\n for objective in optimization_metrics:\r\n objective_bounds[objective] = user_bounds\r\n objective_limits[objective] = user_bounds\r\n elif (len(user_bounds) == number_of_objectives*2):\r\n idx = 0\r\n for objective in optimization_metrics:\r\n objective_bounds[objective] = user_bounds[idx:idx+2]\r\n if (objective_bounds[objective][0] > objective_bounds[objective][1]):\r\n objective_bounds[objective][0], objective_bounds[objective][1] = objective_bounds[objective][1], objective_bounds[objective][0]\r\n objective_limits[objective] = objective_bounds[objective]\r\n idx += 2\r\n else:\r\n print(\"Wrong number of bounding boxes, expected 2 or\", 2*number_of_objectives, \"got\", len(user_bounds))\r\n raise SystemExit\r\n else:\r\n for objective in optimization_metrics:\r\n objective_limits[objective] = [float(\"inf\"), float(\"-inf\")]\r\n\r\n if output_file == \"\":\r\n output_data_file = config[\"output_data_file\"]\r\n if output_data_file == \"output_samples.csv\":\r\n output_data_file = application_name + \"_\" + output_data_file\r\n else:\r\n output_data_file = output_file\r\n\r\n exhaustive_search_data_array = None\r\n exhaustive_search_fast_addressing_of_data_array = None\r\n if hypermapper_mode == 'exhaustive':\r\n exhaustive_file = config[\"hypermapper_mode\"][\"exhaustive_search_file\"]\r\n exhaustive_search_data_array, exhaustive_search_fast_addressing_of_data_array = param_space.load_data_file(exhaustive_file, debug=False, number_of_cpus=number_of_cpus)\r\n\r\n # Check if some parameters are correctly defined\r\n if hypermapper_mode == \"default\":\r\n if black_box_function == None:\r\n print(\"Error: the black box function must be provided\")\r\n raise SystemExit\r\n if not callable(black_box_function):\r\n print(\"Error: the black box function parameter is not callable\")\r\n raise SystemExit\r\n\r\n if (model_type == \"gaussian_process\") and (acquisition_function == \"TS\"):\r\n print(\"Error: The TS acquisition function with Gaussian Process models is still under implementation\")\r\n print(\"Using EI acquisition function instead\")\r\n config[\"acquisition_function\"] = \"EI\"\r\n\r\n if number_of_cpus > 1:\r\n print(\"Warning: HyperMapper supports only sequential execution for now. Running on a single cpu.\")\r\n number_of_cpus = 1\r\n\r\n # If priors are present, use prior-guided optimization\r\n user_priors = False\r\n for input_param in config[\"input_parameters\"]:\r\n if config[\"input_parameters\"][input_param][\"prior\"] != \"uniform\":\r\n if number_of_objectives == 1:\r\n user_priors = True\r\n else:\r\n print(\"Warning: prior optimization does not work with multiple objectives yet, priors will be uniform\")\r\n config[\"input_parameters\"][input_param][\"prior\"] = \"uniform\"\r\n\r\n if user_priors:\r\n bo_method = prior_guided_optimization\r\n else:\r\n bo_method = random_scalarizations\r\n normalize_objectives = True\r\n\r\n ### Resume previous optimization, if any\r\n beginning_of_time = param_space.current_milli_time()\r\n absolute_configuration_index = 0\r\n doe_t0 = datetime.datetime.now()\r\n if config[\"resume_optimization\"] == True:\r\n resume_data_file = config[\"resume_optimization_data\"]\r\n\r\n if not resume_data_file.endswith('.csv'):\r\n print(\"Error: resume data file must be a CSV\")\r\n raise SystemExit\r\n if resume_data_file == \"output_samples.csv\":\r\n resume_data_file = application_name + \"_\" + resume_data_file\r\n\r\n data_array, fast_addressing_of_data_array = param_space.load_data_file(resume_data_file, debug=False, number_of_cpus=number_of_cpus)\r\n absolute_configuration_index = len(data_array[list(data_array.keys())[0]]) # get the number of points evaluated in the previous run\r\n beginning_of_time = beginning_of_time - data_array[param_space.get_timestamp_parameter()[0]][-1] # Set the timestamp back to match the previous run\r\n print(\"Resumed optimization, number of samples = %d .......\" % absolute_configuration_index)\r\n\r\n ### DoE phase\r\n if absolute_configuration_index < number_of_doe_samples:\r\n configurations = []\r\n default_configuration = param_space.get_default_or_random_configuration()\r\n str_data = param_space.get_unique_hash_string_from_values(default_configuration)\r\n if str_data not in fast_addressing_of_data_array:\r\n fast_addressing_of_data_array[str_data] = absolute_configuration_index\r\n configurations.append(default_configuration)\r\n absolute_configuration_index += 1\r\n\r\n doe_configurations = []\r\n if absolute_configuration_index < number_of_doe_samples:\r\n doe_configurations = param_space.get_doe_sample_configurations(\r\n fast_addressing_of_data_array,\r\n number_of_doe_samples-absolute_configuration_index,\r\n doe_type)\r\n configurations += doe_configurations\r\n print(\"Design of experiment phase, number of new doe samples = %d .......\" % len(configurations))\r\n\r\n doe_data_array = param_space.run_configurations(\r\n hypermapper_mode,\r\n configurations,\r\n beginning_of_time,\r\n black_box_function,\r\n exhaustive_search_data_array,\r\n exhaustive_search_fast_addressing_of_data_array,\r\n run_directory,\r\n batch_mode=batch_mode)\r\n data_array = concatenate_data_dictionaries(\r\n data_array,\r\n doe_data_array,\r\n param_space.input_output_and_timestamp_parameter_names)\r\n absolute_configuration_index = number_of_doe_samples\r\n iteration_number = 1\r\n else:\r\n iteration_number = absolute_configuration_index - number_of_doe_samples + 1\r\n\r\n # If we have feasibility constraints, we must ensure we have at least one feasible and one infeasible sample before starting optimization\r\n # If this is not true, continue design of experiment until the condition is met\r\n if enable_feasible_predictor:\r\n while are_all_elements_equal(data_array[feasible_parameter[0]]) and optimization_iterations > 0:\r\n print(\"Warning: all points are either valid or invalid, random sampling more configurations.\")\r\n print(\"Number of doe samples so far:\", absolute_configuration_index)\r\n configurations = param_space.get_doe_sample_configurations(fast_addressing_of_data_array, 1, \"random sampling\")\r\n new_data_array = param_space.run_configurations(\r\n hypermapper_mode,\r\n configurations,\r\n beginning_of_time,\r\n black_box_function,\r\n exhaustive_search_data_array,\r\n exhaustive_search_fast_addressing_of_data_array,\r\n run_directory,\r\n batch_mode=batch_mode)\r\n data_array = concatenate_data_dictionaries(\r\n new_data_array,\r\n data_array,\r\n param_space.input_output_and_timestamp_parameter_names)\r\n absolute_configuration_index += 1\r\n optimization_iterations -= 1\r\n\r\n # Create output file with explored configurations from resumed run and DoE\r\n with open(deal_with_relative_and_absolute_path(run_directory, output_data_file), 'w') as f:\r\n w = csv.writer(f)\r\n w.writerow(param_space.get_input_output_and_timestamp_parameters())\r\n tmp_list = [param_space.convert_types_to_string(j, data_array) for j in param_space.get_input_output_and_timestamp_parameters()]\r\n tmp_list = list(zip(*tmp_list))\r\n for i in range(len(data_array[optimization_metrics[0]])):\r\n w.writerow(tmp_list[i])\r\n\r\n for objective in optimization_metrics:\r\n lower_bound = min(objective_limits[objective][0], min(data_array[objective]))\r\n upper_bound = max(objective_limits[objective][1], max(data_array[objective]))\r\n objective_limits[objective] = [lower_bound, upper_bound]\r\n print(\"\\nEnd of doe/resume phase, the number of evaluated configurations is: %d\\n\" %absolute_configuration_index)\r\n sys.stdout.write_to_logfile((\"End of DoE - Time %10.4f sec\\n\" % ((datetime.datetime.now() - doe_t0).total_seconds())))\r\n if doe_type == \"grid_search\" and optimization_iterations > 0:\r\n print(\"Warning: DoE is grid search, setting number of optimization iterations to 0\")\r\n optimization_iterations = 0\r\n\r\n ### Main optimization loop\r\n bo_t0 = datetime.datetime.now()\r\n run_time = (datetime.datetime.now() - start_time).total_seconds() / 60\r\n # run_time / time_budget < 1 if budget > elapsed time or budget == -1\r\n if time_budget > 0:\r\n print('starting optimization phase, limited to run for ', time_budget, ' minutes')\r\n elif time_budget == 0:\r\n print('Time budget cannot be zero. To not limit runtime set time_budget = -1')\r\n sys.exit()\r\n\r\n configurations = []\r\n evaluation_budget = optimization_iterations * evaluations_per_optimization_iteration\r\n iteration_number = 0\r\n evaluation_count = 0\r\n while evaluation_count < evaluation_budget and run_time / time_budget < 1:\r\n if evaluation_count % evaluations_per_optimization_iteration == 0:\r\n iteration_number += 1\r\n print(\"Starting optimization iteration\", iteration_number)\r\n iteration_t0 = datetime.datetime.now()\r\n\r\n model_t0 = datetime.datetime.now()\r\n regression_models,_,_ = models.generate_mono_output_regression_models(\r\n data_array,\r\n param_space,\r\n input_params,\r\n optimization_metrics,\r\n 1.00,\r\n config,\r\n model_type=model_type,\r\n number_of_cpus=number_of_cpus,\r\n print_importances=print_importances,\r\n normalize_objectives=normalize_objectives,\r\n objective_limits=objective_limits)\r\n\r\n classification_model = None\r\n if enable_feasible_predictor:\r\n classification_model,_,_ = models.generate_classification_model(application_name,\r\n param_space,\r\n data_array,\r\n input_params,\r\n feasible_parameter,\r\n 1.00,\r\n config,\r\n debug,\r\n number_of_cpus=number_of_cpus,\r\n data_array_exhaustive=exhaustive_search_data_array,\r\n enable_feasible_predictor_grid_search_on_recall_and_precision=enable_feasible_predictor_grid_search_on_recall_and_precision,\r\n feasible_predictor_grid_search_validation_file=feasible_predictor_grid_search_validation_file,\r\n print_importances=print_importances)\r\n model_t1 = datetime.datetime.now()\r\n sys.stdout.write_to_logfile((\"Model fitting time %10.4f sec\\n\" % ((model_t1 - model_t0).total_seconds())))\r\n if (weight_sampling == \"bounding_box\"):\r\n objective_weights = sample_weight_bbox(optimization_metrics, objective_bounds, objective_limits, 1)[0]\r\n elif (weight_sampling == \"flat\"):\r\n objective_weights = sample_weight_flat(optimization_metrics, 1)[0]\r\n else:\r\n print(\"Error: unrecognized option:\", weight_sampling)\r\n raise SystemExit\r\n\r\n data_array_scalarization, _ = compute_data_array_scalarization(\r\n data_array,\r\n objective_weights,\r\n objective_limits,\r\n scalarization_method)\r\n data_array[scalarization_key] = data_array_scalarization.tolist()\r\n\r\n epsilon = random.uniform(0,1)\r\n local_search_t0 = datetime.datetime.now()\r\n if epsilon > epsilon_greedy_threshold:\r\n best_configuration = bo_method(\r\n config,\r\n data_array,\r\n param_space,\r\n fast_addressing_of_data_array,\r\n regression_models,\r\n iteration_number,\r\n objective_weights,\r\n objective_limits,\r\n classification_model)\r\n\r\n else:\r\n sys.stdout.write_to_logfile(str(epsilon) + \" < \" + str(epsilon_greedy_threshold) + \" random sampling a configuration to run\\n\")\r\n tmp_fast_addressing_of_data_array = copy.deepcopy(fast_addressing_of_data_array)\r\n best_configuration = param_space.random_sample_configurations_without_repetitions(tmp_fast_addressing_of_data_array, 1)[0]\r\n local_search_t1 = datetime.datetime.now()\r\n sys.stdout.write_to_logfile((\"Local search time %10.4f sec\\n\" % ((local_search_t1 - local_search_t0).total_seconds())))\r\n\r\n configurations.append(best_configuration)\r\n\r\n # When we have selected \"evaluations_per_optimization_iteration\" configurations, evaluate the batch\r\n if evaluation_count % evaluations_per_optimization_iteration == (evaluations_per_optimization_iteration - 1):\r\n black_box_function_t0 = datetime.datetime.now()\r\n new_data_array = param_space.run_configurations(\r\n hypermapper_mode,\r\n configurations,\r\n beginning_of_time,\r\n black_box_function,\r\n exhaustive_search_data_array,\r\n exhaustive_search_fast_addressing_of_data_array,\r\n run_directory,\r\n batch_mode=batch_mode)\r\n black_box_function_t1 = datetime.datetime.now()\r\n sys.stdout.write_to_logfile((\"Black box function time %10.4f sec\\n\" % ((black_box_function_t1 - black_box_function_t0).total_seconds())))\r\n\r\n # If running batch BO, we will have some liars in fast_addressing_of_data, update them with the true value\r\n for configuration_idx in range(len(new_data_array[list(new_data_array.keys())[0]])):\r\n configuration = get_single_configuration(new_data_array, configuration_idx)\r\n str_data = param_space.get_unique_hash_string_from_values(configuration)\r\n if str_data in fast_addressing_of_data_array:\r\n absolute_index = fast_addressing_of_data_array[str_data]\r\n for header in configuration:\r\n data_array[header][absolute_index] = configuration[header]\r\n else:\r\n fast_addressing_of_data_array[str_data] = absolute_configuration_index\r\n absolute_configuration_index += 1\r\n for header in configuration:\r\n data_array[header].append(configuration[header])\r\n\r\n # and save results\r\n with open(deal_with_relative_and_absolute_path(run_directory, output_data_file), 'a') as f:\r\n w = csv.writer(f)\r\n tmp_list = [param_space.convert_types_to_string(j, new_data_array) for j in list(param_space.get_input_output_and_timestamp_parameters())]\r\n tmp_list = list(zip(*tmp_list))\r\n for i in range(len(new_data_array[optimization_metrics[0]])):\r\n w.writerow(tmp_list[i])\r\n configurations = []\r\n else:\r\n # If we have not selected all points in the batch yet, add the model prediction as a 'liar'\r\n for header in best_configuration:\r\n data_array[header].append(best_configuration[header])\r\n\r\n bufferx = [tuple(best_configuration.values())]\r\n prediction_means, _ = models.compute_model_mean_and_uncertainty(bufferx, regression_models, model_type, param_space)\r\n for objective in prediction_means:\r\n data_array[objective].append(prediction_means[objective][0])\r\n\r\n if classification_model is not None:\r\n classification_prediction_results = models.model_probabilities(bufferx,classification_model,param_space)\r\n true_value_index = classification_model[feasible_parameter[0]].classes_.tolist().index(True)\r\n feasibility_indicator = classification_prediction_results[feasible_parameter[0]][:,true_value_index]\r\n data_array[feasible_output_name].append(True if feasibility_indicator[0] >= 0.5 else False)\r\n\r\n data_array[param_space.get_timestamp_parameter()[0]].append(absolute_configuration_index)\r\n str_data = param_space.get_unique_hash_string_from_values(best_configuration)\r\n fast_addressing_of_data_array[str_data] = absolute_configuration_index\r\n absolute_configuration_index += 1\r\n\r\n\r\n for objective in optimization_metrics:\r\n lower_bound = min(objective_limits[objective][0], min(data_array[objective]))\r\n upper_bound = max(objective_limits[objective][1], max(data_array[objective]))\r\n objective_limits[objective] = [lower_bound, upper_bound]\r\n\r\n evaluation_count += 1\r\n run_time = (datetime.datetime.now() - start_time).total_seconds() / 60\r\n\r\n sys.stdout.write_to_logfile((\"Total iteration time %10.4f sec\\n\" % ((datetime.datetime.now() - iteration_t0).total_seconds())))\r\n sys.stdout.write_to_logfile((\"End of BO phase - Time %10.4f sec\\n\" % ((datetime.datetime.now() - bo_t0).total_seconds())))\r\n\r\n print(\"End of Bayesian Optimization\")\r\n sys.stdout.write_to_logfile((\"Total script time %10.2f sec\\n\" % ((datetime.datetime.now() - start_time).total_seconds())))", "def config():\n experiment_dir = './experiments'\n simulation_steps = 1000\n device = 'cpu'\n path_to_molecules = os.path.join(experiment_dir, 'data/ethanol.xyz')\n simulation_dir = os.path.join(experiment_dir, 'simulation')\n training_dir = os.path.join(experiment_dir, 'training')\n model_path = os.path.join(training_dir, 'best_model')\n overwrite = True", "def config():\n\n compared_algorithms_type: AlgorithmsType = AlgorithmsType.LinearRegression\n compared_methods: List = [] # Leave empty for using all solvers.\n numpy_distribution: NumpyDistribution = NumpyDistribution.IntelDistribution\n used_database: DatabaseType = DatabaseType.Synthetic\n experiment_type: ExperimentType = ExperimentType.RunTimeExperiment\n cross_validation_folds: int = 1\n n_alphas: int = 100\n reduction_factor: int = 1\n\n run_time_experiments_config: Dict[str, range] = {\n \"run_time_compared_data_sizes\": range(int(5000 / reduction_factor), int(15000 / reduction_factor),\n int(5000 / reduction_factor)),\n \"calc_transpose_dot_residuals\": compared_algorithms_type == AlgorithmsType.LinearRegression\n }\n number_of_alphas_experiments_config: Dict[str, range] = {\n \"alphas_range\": range(1, 221, 20)\n }\n\n synthetic_data_config: Dict[str, int] = {\n \"data_size\": int(15000 / reduction_factor),\n \"features_num\": 7\n }\n\n sketch_preconditioned_config: Dict[str, float] = {\n \"sampled_rows\": 0.005,\n \"switch_sign_probability\": 0.5,\n \"min_sampled_rows\": 100.0\n }\n resources_path: str = r'Resources'\n results_path: str = r'Results'\n clusters_count: int = _choose_clusters_num(used_database, synthetic_data_config[\"features_num\"])\n elastic_net_factor: float = 0.5 # Rho factor in Elastic-Net regularization.\n is_positive_definite: bool = True", "def __init__(self, in_features, out_features):\n \n ########################\n # PUT YOUR CODE HERE #\n #######################\n\n\n self.params = {'weight': 0.0001 * np.random.randn(out_features, in_features), 'bias': np.zeros((out_features, 1))}\n self.grads = {'weight': np.zeros((out_features, in_features)), 'bias': np.zeros((out_features, 1))}\n\n\n\n ########################\n # END OF YOUR CODE #\n #######################", "def get_config(self):\n config = {\n 'F_': self.F_,\n 'attn_heads': self.attn_heads,\n 'attn_heads_reduction': self.attn_heads_reduction,\n 'edge_type_reduction': self.edge_type_reduction,\n 'attention_type': self.attention_type,\n 'attn_dropout': self.attn_dropout,\n 'feature_dropout': self.feature_dropout,\n 'activation': self.activation,\n 'use_value_bias': self.use_value_bias,\n 'use_key_bias': self.use_key_bias,\n 'kernel_initializer': self.kernel_initializer,\n 'bias_initializer': self.bias_initializer,\n 'attn_kernel_initializer': self.attn_kernel_initializer,\n 'attn_bias_initalizer': self.attn_bias_initializer,\n 'kernel_regularizer': self.kernel_regularizer,\n 'bias_regularizer': self.bias_regularizer,\n 'attn_kernel_regularizer': self.attn_kernel_regularizer,\n 'attn_bias_regularizer': self.attn_bias_regularizer,\n 'activity_regularizer': self.activity_regularizer,\n 'kernel_constraint': self.kernel_constraint,\n 'bias_constraint': self.bias_constraint,\n 'attn_kernel_constraint': self.attn_kernel_constraint,\n 'attn_bias_constraint': self.attn_bias_constraint\n }\n base_config = super(BatchShawMultigraphAttention, self).get_config()\n return dict(list(base_config.items())) + list(config.items())", "def get_prob_params():\n prob = Namespace()\n prob.study_name = STUDY_NAME\n if IS_DEBUG:\n prob.num_trials = 3\n prob.max_capital = 10\n else:\n prob.num_trials = NUM_TRIALS\n prob.max_capital = MAX_CAPITAL\n # Common\n prob.time_distro = TIME_DISTRO\n prob.num_workers = NUM_WORKERS\n _study_params = {\n 'branin': ('synthetic/branin/config_mf.json',\n branin_mf, cost_branin_mf, 0.1, 0, 1),\n 'hartmann3_2': ('synthetic/hartmann3_2/config_mf.json',\n hartmann3_2_mf, cost_hartmann3_2_mf, 0.1, 0, 1),\n 'hartmann6_4': ('synthetic/hartmann6_4/config_mf.json',\n hartmann6_4_mf, cost_hartmann6_4_mf, 0.1, 0, 1),\n 'borehole_6': ('synthetic/borehole_6/config_mf.json',\n borehole_6_mf, cost_borehole_6_mf, 1, 0, 1),\n 'park2_4': ('synthetic/park2_4/config_mf.json',\n park2_4_mf, cost_park2_4_mf, 0.3, 0, 1),\n 'park2_3': ('synthetic/park2_3/config_mf.json',\n park2_3_mf, cost_park2_3_mf, 0.1, 0, 1),\n 'park1_3': ('synthetic/park1_3/config_mf.json',\n park1_3_mf, cost_park1_3_mf, 0.5, 0, 1),\n }\n (domain_config_file_suffix, raw_func, raw_fidel_cost_func, _fc_noise_scale,\n _initial_pool_size, _) = _study_params[prob.study_name]\n domain_config_file = os.path.join(DRAGONFLY_EXPERIMENTS_DIR, domain_config_file_suffix)\n # noisy\n prob.noisy_evals = NOISY_EVALS\n if NOISY_EVALS:\n noise_type = 'gauss'\n noise_scale = _fc_noise_scale\n else:\n noise_type = 'no_noise'\n noise_scale = None\n # Create domain, function_caller and worker_manager\n config = load_config_file(domain_config_file)\n func_caller = get_multifunction_caller_from_config(raw_func, config,\n raw_fidel_cost_func=raw_fidel_cost_func, noise_type=noise_type,\n noise_scale=noise_scale)\n # Set max_capital\n if hasattr(func_caller, 'fidel_cost_func'):\n prob.max_capital = prob.max_capital * \\\n func_caller.fidel_cost_func(func_caller.fidel_to_opt)\n else:\n prob.max_capital = prob.max_capital\n # Store everything in prob\n prob.func_caller = func_caller\n prob.worker_manager = SyntheticWorkerManager(prob.num_workers,\n time_distro='caller_eval_cost')\n prob.save_file_prefix = prob.study_name + ('-debug' if IS_DEBUG else '')\n prob.methods = METHODS\n prob.save_results_dir = SAVE_RESULTS_DIR\n prob.reporter = get_reporter('default')\n # evaluation options\n prob.evaluation_options = Namespace(prev_eval_points='none',\n initial_pool_size=_initial_pool_size)\n return prob", "def get_config_parameter(config):\n\n selected_event = config['selected_event']\n datasource_raw_data = config['datasource_raw_data']['database']\n measurement_raw = config['datasource_raw_data']['measurement']\n measurement_enriched = config['datasource_enriched_data']['measurement']\n datasource_enriched_data = config['datasource_enriched_data']['database']\n datasource_marked_data = config['datasource_marked_data']['database']\n datasource_predicted_data = config['datasource_predicted_data']['database']\n start_time = config['timeframe'][0]\n end_time = config['timeframe'][1]\n register_dict = config['register_dict']\n required_registers = config[f\"{selected_event}_register\"]\n events = config[selected_event]\n measurement_predicted = config['datasource_predicted_data']['measurement']\n return selected_event, datasource_raw_data, measurement_raw, start_time, end_time, register_dict, \\\n required_registers, datasource_enriched_data, datasource_marked_data, \\\n measurement_enriched, events, datasource_predicted_data, measurement_predicted", "def __init__(self, config, tdim, batch_size):\n self.float_type = 'float32' # This should be the default\n self.config = config\n self.dt = self.config['dt']\n\n self.n_eachring = self.config['n_eachring']\n self.n_input = self.config['n_input']\n self.n_output = self.config['n_output']\n self.pref = np.arange(0,2*np.pi,2*np.pi/self.n_eachring) # preferences\n\n self.batch_size = batch_size\n self.tdim = tdim\n self.x = np.zeros((tdim, batch_size, self.n_input), dtype=self.float_type)\n self.y = np.zeros((tdim, batch_size, self.n_output), dtype=self.float_type)\n if self.config['loss_type'] == 'lsq':\n self.y[:,:,:] = 0.05\n # y_loc is the stimulus location of the output, -1 for fixation, (0,2 pi) for response\n self.y_loc = -np.ones((tdim, batch_size) , dtype=self.float_type)\n\n self._sigma_x = config['sigma_x']*np.sqrt(2/config['alpha'])", "def get_model_config(model_name, args):\n if model_name == 'WaveGlow':\n model_config = dict(\n n_mel_channels=args.n_mel_channels,\n n_flows=args.flows,\n n_group=args.groups,\n n_early_every=args.early_every,\n n_early_size=args.early_size,\n WN_config=dict(\n n_layers=args.wn_layers,\n kernel_size=args.wn_kernel_size,\n n_channels=args.wn_channels\n )\n )\n return model_config\n elif model_name == 'FastPitch':\n model_config = dict(\n # io\n n_mel_channels=args.n_mel_channels,\n # symbols\n n_symbols=len(get_symbols(args.symbol_set)),\n padding_idx=get_pad_idx(args.symbol_set),\n symbols_embedding_dim=args.symbols_embedding_dim,\n # input FFT\n in_fft_n_layers=args.in_fft_n_layers,\n in_fft_n_heads=args.in_fft_n_heads,\n in_fft_d_head=args.in_fft_d_head,\n in_fft_conv1d_kernel_size=args.in_fft_conv1d_kernel_size,\n in_fft_conv1d_filter_size=args.in_fft_conv1d_filter_size,\n in_fft_output_size=args.in_fft_output_size,\n p_in_fft_dropout=args.p_in_fft_dropout,\n p_in_fft_dropatt=args.p_in_fft_dropatt,\n p_in_fft_dropemb=args.p_in_fft_dropemb,\n # output FFT\n out_fft_n_layers=args.out_fft_n_layers,\n out_fft_n_heads=args.out_fft_n_heads,\n out_fft_d_head=args.out_fft_d_head,\n out_fft_conv1d_kernel_size=args.out_fft_conv1d_kernel_size,\n out_fft_conv1d_filter_size=args.out_fft_conv1d_filter_size,\n out_fft_output_size=args.out_fft_output_size,\n p_out_fft_dropout=args.p_out_fft_dropout,\n p_out_fft_dropatt=args.p_out_fft_dropatt,\n p_out_fft_dropemb=args.p_out_fft_dropemb,\n # duration predictor\n dur_predictor_kernel_size=args.dur_predictor_kernel_size,\n dur_predictor_filter_size=args.dur_predictor_filter_size,\n p_dur_predictor_dropout=args.p_dur_predictor_dropout,\n dur_predictor_n_layers=args.dur_predictor_n_layers,\n # pitch predictor\n pitch_predictor_kernel_size=args.pitch_predictor_kernel_size,\n pitch_predictor_filter_size=args.pitch_predictor_filter_size,\n p_pitch_predictor_dropout=args.p_pitch_predictor_dropout,\n pitch_predictor_n_layers=args.pitch_predictor_n_layers,\n # pitch conditioning\n pitch_embedding_kernel_size=args.pitch_embedding_kernel_size,\n # speakers parameters\n n_speakers=args.n_speakers,\n speaker_emb_weight=args.speaker_emb_weight,\n # energy predictor\n energy_predictor_kernel_size=args.energy_predictor_kernel_size,\n energy_predictor_filter_size=args.energy_predictor_filter_size,\n p_energy_predictor_dropout=args.p_energy_predictor_dropout,\n energy_predictor_n_layers=args.energy_predictor_n_layers,\n # energy conditioning\n energy_conditioning=args.energy_conditioning,\n energy_embedding_kernel_size=args.energy_embedding_kernel_size,\n )\n return model_config\n\n else:\n raise NotImplementedError(model_name)", "def get_config(self, config_path):\n with open(config_path, 'r') as json_fh:\n config_dict = json.load(json_fh)\n self.task2model_dicts = config_dict['task2models']\n agg_method = config_dict['aggregation_method']\n if agg_method == 'max':\n self.aggregation_fn = torch.max\n elif agg_method == 'mean':\n self.aggregation_fn = torch.mean\n else:\n raise ValueError('Invalid configuration: {} = {} (expected \"max\" or \"mean\")'.format('aggregation_method', agg_method))", "def get_config(self):\n config = {\n }\n base_config = super(MatrixConcat, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))", "def test_config():\n import yaml\n import astropy.units as u\n from tqdm import tqdm\n # Need these for `eval` below\n from numpy import array\n\n # Same test suite as used in test_imsim above.\n # This time, we just use this for the det names.\n with open(DATA_DIR / \"wcs_466749.yaml\", 'r') as f:\n wcss = yaml.safe_load(f)\n\n cmds = {}\n with open(DATA_DIR / \"phosim_cat_466749.txt\", 'r') as f:\n for line in f:\n k, v = line.split()\n try:\n v = int(v)\n except ValueError:\n try:\n v = float(v)\n except ValueError:\n pass\n cmds[k] = v\n\n # Values below (and others) from phosim_cat_466749.txt\n rc = cmds['rightascension']\n dc = cmds['declination']\n boresight = galsim.CelestialCoord(\n rc*galsim.degrees,\n dc*galsim.degrees\n )\n obstime = Time(cmds['mjd'], format='mjd', scale='utc')\n obstime -= 15*u.s\n band = \"ugrizy\"[cmds['filter']]\n wavelength_dict = dict(\n u=365.49,\n g=480.03,\n r=622.20,\n i=754.06,\n z=868.21,\n y=991.66\n )\n wavelength = wavelength_dict[band]\n camera = imsim.get_camera()\n\n rotTelPos = cmds['rottelpos'] * galsim.degrees\n telescope = imsim.load_telescope(f\"LSST_{band}.yaml\", rotTelPos=rotTelPos)\n # Non-default values.\n temperature = 293.\n pressure = 69.0\n H2O_pressure = 2.0\n\n factory = imsim.BatoidWCSFactory(\n boresight, obstime, telescope, wavelength,\n camera,\n temperature=temperature,\n pressure=pressure,\n H2O_pressure=H2O_pressure\n )\n\n config = {\n 'input': {\n 'telescope': {\n 'file_name':f\"LSST_{band}.yaml\",\n 'rotTelPos': rotTelPos\n }\n },\n 'image': {\n 'wcs': {\n 'type': 'Batoid',\n 'boresight': boresight,\n 'camera': 'LsstCam',\n 'obstime': obstime,\n 'wavelength': wavelength,\n 'temperature': temperature,\n 'pressure': pressure,\n 'H2O_pressure': H2O_pressure,\n 'order': 2,\n }\n }\n }\n\n rng = np.random.default_rng(1234)\n for k in tqdm(wcss.keys()):\n name = k[18:25].replace('-', '_')\n det = camera[name]\n\n wcs1 = factory.getWCS(det, order=2)\n config['image']['wcs']['det_name'] = name\n galsim.config.RemoveCurrent(config['image']['wcs'])\n galsim.config.ProcessInput(config)\n wcs2 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n\n # Test points\n xs = rng.uniform(0, 4000, 100)\n ys = rng.uniform(0, 4000, 100)\n ra1, dec1 = wcs1.xyToradec(xs, ys, units='radians')\n ra2, dec2 = wcs2.xyToradec(xs, ys, units='radians')\n np.testing.assert_allclose(ra1, ra2)\n np.testing.assert_allclose(dec1, dec2)\n\n # Test == when identical\n galsim.config.RemoveCurrent(config['image']['wcs'])\n wcs3 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n assert wcs3 == wcs2\n\n # Test that pressure and temperature matter.\n config['image']['wcs']['temperature'] = 250\n galsim.config.RemoveCurrent(config['image']['wcs'])\n wcs4 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n assert wcs4 != wcs2\n\n config['image']['wcs']['temperature'] = temperature\n config['image']['wcs']['pressure'] = 55\n galsim.config.RemoveCurrent(config['image']['wcs'])\n wcs5 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n assert wcs5 != wcs2\n\n config['image']['wcs']['pressure'] = pressure\n config['image']['wcs']['H2O_pressure'] = 10\n galsim.config.RemoveCurrent(config['image']['wcs'])\n wcs6 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n assert wcs6 != wcs2\n\n # Test defaults\n del config['image']['wcs']['temperature']\n del config['image']['wcs']['pressure']\n del config['image']['wcs']['H2O_pressure']\n galsim.config.RemoveCurrent(config['image']['wcs'])\n config = galsim.config.CleanConfig(config)\n galsim.config.ProcessInput(config)\n wcs7 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n default_pressure = 101.325 * (1-2.25577e-5*2715)**5.25588\n wcs7a = imsim.BatoidWCSFactory(\n boresight, obstime, telescope, wavelength, camera,\n temperature=280,\n pressure=default_pressure,\n H2O_pressure=1.0,\n ).getWCS(det, order=2)\n assert wcs7 == wcs7a\n\n # Default wavelength from bandpass\n del config['image']['wcs']['wavelength']\n config['bandpass'] = imsim.RubinBandpass('r')\n galsim.config.RemoveCurrent(config['image']['wcs'])\n config = galsim.config.CleanConfig(config)\n galsim.config.ProcessInput(config)\n wcs8 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n wcs8a = imsim.BatoidWCSFactory(\n boresight, obstime, telescope,\n wavelength=config['bandpass'].effective_wavelength,\n camera=camera,\n temperature=280,\n pressure=default_pressure,\n H2O_pressure=1.0,\n ).getWCS(det, order=2)\n assert wcs8 == wcs8a\n\n del config['bandpass']\n config['image']['bandpass'] = {\n 'type': 'RubinBandpass',\n 'band' : 'r',\n }\n galsim.config.RemoveCurrent(config['image']['wcs'])\n config = galsim.config.CleanConfig(config)\n galsim.config.ProcessInput(config)\n wcs8b = galsim.config.BuildWCS(config['image'], 'wcs', config)\n assert wcs8b == wcs8a\n\n # Obstime can be a string\n print('obstime = ',obstime.to_value('iso'), type(obstime.to_value('iso')))\n config['image']['wcs']['obstime'] = obstime.to_value('iso')\n # Doesn't quite roundtrip perfectly. But within a millisecond.\n obstime = Time(obstime.to_value('iso'), scale='tai')\n print('obstime => ',obstime)\n galsim.config.RemoveCurrent(config['image']['wcs'])\n config = galsim.config.CleanConfig(config)\n galsim.config.ProcessInput(config)\n wcs9 = galsim.config.BuildWCS(config['image'], 'wcs', config)\n wcs9a = imsim.BatoidWCSFactory(\n boresight, obstime, telescope,\n wavelength=config['bandpass'].effective_wavelength,\n camera=camera,\n temperature=280,\n pressure=default_pressure,\n H2O_pressure=1.0,\n ).getWCS(det, order=2)\n assert wcs9 == wcs9a", "def _config_bay(bay, baymodel, cfg_dir, force=False):\n if baymodel.coe == 'kubernetes':\n return _config_bay_kubernetes(bay, baymodel, cfg_dir, force)\n elif baymodel.coe == 'swarm':\n return _config_bay_swarm(bay, baymodel, cfg_dir, force)", "def configure_parameters(self):\n optim = Adam(self.parameters(),lr=1e-3)\n criterion = nn.BCELoss()\n return (optim,criterion)", "def _read_config(self):\n if not os.path.exists(self.config_file):\n raise Exception(\"Can't read the SGDM config file\")\n config = toml.load(self.config_file)\n\n self.initial_model = config[\"initial_model\"]\n self.alpha = config[\"alpha\"]\n self.beta = config[\"beta\"] # decay factor for first moments\n self.smoothing_timestep = config[\"smoothing_timestep\"]\n\n # Perturbation decay per iteration as a percentage of the relative\n # deviation to the initial model\n self.perturbation_decay = config[\"perturbation_decay\"]\n self.roughness_decay_type = config[\"roughness_decay_type\"]\n if self.roughness_decay_type not in [\"relative_perturbation\", \"absolute\"]:\n raise Exception(\n \"Roughness decay type should be either \"\n \"'relative_perturbation' or 'absolute'\"\n )\n self.update_smoothing_length = config[\"update_smoothing_length\"]\n self.roughness_decay_smoothing_length = config[\n \"roughness_decay_smoothing_length\"\n ]\n\n # Gradient scaling factor to avoid issues with floats, this should be constant throughout the inversion\n self.grad_scaling_fac = config[\"gradient_scaling_factor\"]\n # Regularization parameter to avoid dividing by zero\n if \"max_iterations\" in config.keys():\n self.max_iterations = config[\"max_iterations\"]\n else:\n self.max_iterations = None", "def __init__(self, \n num_vars, \n num_hidden,\n training_inputs = None,\n algorithm = None,\n algorithm_dict = None,\n batch_size = None,\n use_momentum = None,\n W0= None, \n b0= None, \n bhid0 = None,\n zero_diag = True,\n symmetric = True,\n report_p_tilda =False,\n learn_biases = True,\n test_mode= False,\n training = True):\n \n self.num_vars = num_vars\n \n self.num_hidden = num_hidden\n \n self.batch_size = batch_size\n \n self.zero_diag = zero_diag\n \n self.algorithm = algorithm\n \n self.num_samples = 0\n \n self.num_u_gibbs = 0\n \n self.gibbs_steps = 0\n \n self.resample = False\n \n self.uniform = False\n \n self.mixture = False\n \n self.mix_params = []\n \n self.m_params = []\n \n self.mf_steps = 0\n \n self.alpha = 0\n \n self.learn_biases = learn_biases\n \n if isinstance(algorithm_dict, dict):\n \n for param in algorithm_dict.keys():\n \n if param == 'resample':\n \n self.resample = algorithm_dict[param]\n \n if param == 'mf_steps':\n \n self.mf_steps = algorithm_dict[param]\n \n if param == \"gibbs_steps\":\n \n self.gibbs_steps = algorithm_dict[param]\n \n if param == \"num_samples\":\n \n self.num_samples = algorithm_dict[param]\n \n if param == \"num_u_gibbs\":\n \n self.num_u_gibbs = algorithm_dict[param]\n \n if param == \"uniform\":\n \n self.uniform = algorithm_dict[param] \n \n if param == \"mixture\":\n \n self.mixture = algorithm_dict[param] \n \n if param == \"mix_params\":\n \n self.mix_params = algorithm_dict[param] \n \n if param == \"alpha\" and algorithm_dict[param] != None:\n #### alpha defines transition rate from\n #### uniform to mean-field distribution\n self.alpha = algorithm_dict[param] \n \n self.m_params = (1-self.alpha)*0.5*np.ones([1,self.num_vars])+\\\n self.alpha*np.mean(training_inputs,0)\n \n self.use_momentum = use_momentum\n \n self.report_p_tilda = report_p_tilda\n \n self.side = int(np.sqrt(self.num_vars))\n \n self.np_rand_gen = np.random.RandomState(1234)\n \n self.theano_rand_gen =\\\n theano.sandbox.rng_mrg.MRG_RandomStreams(self.np_rand_gen.randint(2**30))\n \n #self.theano_rand_gen =\\\n #T.shared_randomstreams.RandomStreams(self.np_rand_gen.randint(2**30))\n \n theano.config.exception_verbosity = 'high'\n \n self.node_indices = \\\n theano.shared(np.arange(self.num_vars), name=\"node_indices\")\n \n self.x = T.matrix('x')\n \n self.x_tilda = T.matrix('x_tilda')\n \n self.sampler_theta = T.matrix('sampler_theta')\n \n self.symmetric = symmetric\n \n if training:\n \n if self.num_hidden ==0:\n \n self.num_x2 = self.num_vars\n \n elif self.num_hidden > 0 :\n \n self.num_x2 = self.num_hidden\n \n self.updates = OrderedDict()\n \n self.N_train = training_inputs.shape[0]\n \n self.train_inputs = theano.shared(np.asarray(training_inputs,\n dtype=theano.config.floatX),\n borrow= True)\n \n self.learning_rate = T.dscalar('learning_rate')\n \n if self.mixture:\n \n print(\"Importance distribution was specified as mixture\"+\\\n \" of Bernoulli products\")\n \n if self.mix_params == []:\n print(\"Error: parameters defining mixture means were\"+\\\n \" not provided\")\n sys.exit()\n \n self.set_mixture_means(inputs = training_inputs)\n \n if use_momentum:\n \n print(\"Will add momentum term to gradient computations\")\n \n self.momentum = T.dscalar('learning_rate')\n \n self.grad_vec = {}\n \n self.grad_vec['W'] = theano.shared(np.zeros([self.num_vars, self.num_x2],\n dtype = theano.config.floatX), name = 'W_momentum', borrow = True)\n \n if self.num_hidden > 0:\n \n self.grad_vec['bhid'] = theano.shared(np.zeros([self.num_x2],\n dtype = theano.config.floatX), name = 'b_momentum', borrow = True)\n \n self.grad_vec['b'] = theano.shared(np.zeros([self.num_vars],\n dtype = theano.config.floatX), name = 'b_momentum', borrow = True)\n \n if test_mode:\n \n b_init =self.np_rand_gen.uniform(0,1, num_vars)\n \n W_init =self.np_rand_gen.uniform(0,1, size = (num_vars, num_vars))\n \n # also tested ones\n # b_init = np.ones(num_vars)\n \n # W_init = np.ones([num_vars, num_vars])\n \n self.b_init= np.asarray(b_init, dtype = theano.config.floatX)\n \n self.W_init= np.asarray(W_init, dtype = theano.config.floatX)\n \n self.b = theano.shared(self.b_init, name='b', borrow = False)\n \n self.W = theano.shared(self.W_init, name='W', borrow = False)\n \n print(\"Initialized with test mode\")\n \n else:\n \n if W0 is None:\n \n if self.num_hidden > 0:\n \n W0_init =\\\n self.np_rand_gen.uniform(\n -4*np.sqrt(6.0/(self.num_vars+self.num_hidden)),\\\n 4*np.sqrt(6.0 /(self.num_vars + self.num_hidden)), \n size = (num_vars, self.num_hidden)\n )\n \n W0 = np.asarray(W0_init, dtype = theano.config.floatX) \n \n if self.num_hidden == 0:\n \n # different W initializations: \n \n # W0_init =\\\n # self.np_rand_gen.uniform(-np.sqrt(3.0/(num_vars)),\\\n # np.sqrt(3.0 / (num_vars)), size = (num_vars, num_vars))\n \n # W0_init =\\\n # self.np_rand_gen.uniform(-0.00000001,\\\n # 0.00000001, size = (num_vars, num_vars))\n \n W0_init = 0.00000001*\\\n self.np_rand_gen.normal(size = (num_vars, self.num_x2)) \n \n W0 = np.asarray(W0_init, dtype = theano.config.floatX)\n \n if self.symmetric:\n \n W0 = (W0 + np.transpose(W0))/2.0\n \n if self.zero_diag:\n \n W0 = W0 - np.diag(np.diag(W0))\n \n self.W = theano.shared(value= W0, name='W', borrow=True)\n \n if self.num_hidden == 0:\n \n test_W = self.W.get_value() \n \n assert sum(np.diag(test_W)) == 0.0\n \n assert (test_W == np.transpose(test_W)).all() == True\n \n else:\n print(\"W is initialized with provided array\")\n self.W = theano.shared(value= W0, name='W', borrow=True)\n \n if b0 is None:\n \n bias_init = np.zeros(num_vars, dtype = theano.config.floatX)\n \n self.b = theano.shared(value= bias_init, name='b', borrow=True)\n \n else:\n print(\"b vector is initialized with provided vector\")\n self.b = theano.shared(value= b0, name='b', borrow=True)\n \n if bhid0 is None and self.num_hidden > 0:\n \n hbias_init = np.zeros(self.num_hidden, dtype = theano.config.floatX)\n \n self.bhid = theano.shared(value= hbias_init, name='bhid', borrow=True)\n \n elif (bhid0 is not None) and (self.num_hidden > 0):\n print(\"bhid vector is initialized with provided vector\") \n self.bhid = theano.shared(value= bhid0, name='bhid', borrow=True)\n \n self.theta = [self.W, self.b]\n \n if self.num_hidden > 0 :\n \n self.theta.append(self.bhid)\n \n self.train_set = set(range(self.N_train))\n \n self.minibatch_set = T.ivector('minibatch_set')\n \n self.sample_set = T.ivector('sample_set')\n \n if \"CD\" in self.algorithm and self.num_hidden ==0:\n \n self.x_gibbs= theano.shared(np.ones([self.batch_size,self.num_vars],\n dtype=theano.config.floatX),\n borrow = True, name= \"x_gibbs\")\n \n if \"CD\" in self.algorithm and self.num_hidden > 0:\n \n self.persistent_gibbs =\\\n theano.shared(np.ones([self.batch_size,self.num_hidden],\n dtype=theano.config.floatX),\n borrow = True, \n name= \"persistent_gibbs\")\n \n if \"CSS\" in self.algorithm and self.mf_steps > 0:\n \n init_mf_vis = self.np_rand_gen.uniform(0, \n 1, \n size =(self.num_vars,1))\n \n init_mf_vis = np.asarray(init_mf_vis, dtype = theano.config.floatX)\n \n self.mf_vis_p = theano.shared(init_mf_vis, \n name= \"mf_vis_p\", \n borrow= True)\n \n if self.num_hidden > 0:\n \n init_mf_hid = \\\n self.np_rand_gen.uniform(0, 1, size =(self.num_hidden,1))\n \n init_mf_hid = np.asarray(init_mf_hid, \n dtype = theano.config.floatX)\n \n self.mf_hid_p = theano.shared(init_mf_hid, \n name= \"mf_hid_p\", \n borrow= True)\n \n elif \"CSS\" in self.algorithm and self.gibbs_steps > 0: \n \n if self.num_hidden ==0: \n self.x_gibbs= theano.shared(np.ones([self.batch_size,self.num_vars],\n dtype=theano.config.floatX),\n borrow = True, name= \"x_gibbs\")", "def init_parameters(obj, hyperparameters):\n # Initialize Global Configuration Parameter\n params = hyperparameters['global']\n setattr(obj, 'param', params)\n\n # Initialize Attributes (Pre-Checked Parameters)\n setattr(obj, 'learning_rate', params['learning_rate'])\n setattr(obj, 'loss', params['loss'])\n setattr(obj, 'max_iter', params['max_iter'])\n\n if params['loss'] == 'least_squares':\n setattr(obj, 'num_classes', 1)\n elif params['loss'] in ['binary_crossentropy', 'categorical_crossentropy', 'auto']:\n setattr(obj, 'num_classes', params['num_classes'])\n\n # Initialize Attributes (Optional Values - Based on Default Parameters)\n if 'l2_regularization' not in params or params['l2_regularization'] is None:\n setattr(obj, 'l2_regularization', 0)\n else:\n setattr(obj, 'l2_regularization', params['l2_regularization'])\n\n if 'max_bins' not in params:\n setattr(obj, 'max_bins', 255)\n else:\n setattr(obj, 'max_bins', params['max_bins'])\n\n if 'max_depth' not in params or params['max_depth'] is None:\n setattr(obj, 'max_depth', None)\n else:\n setattr(obj, 'max_depth', params['max_depth'])\n\n if 'max_leaf_nodes' not in params or params['max_leaf_nodes'] is None:\n setattr(obj, 'max_leaf_nodes', 31)\n else:\n setattr(obj, 'max_leaf_nodes', params['max_leaf_nodes'])\n\n if 'min_samples_leaf' not in params or params['min_samples_leaf'] is None:\n setattr(obj, 'min_samples_leaf', 20)\n else:\n setattr(obj, 'min_samples_leaf', params['min_samples_leaf'])\n\n if 'random_state' in params:\n setattr(obj, 'random_state', params['random_state'])\n else:\n setattr(obj, 'random_state', None)\n\n if 'scoring' in params:\n setattr(obj, 'scoring', params['scoring'])\n else:\n setattr(obj, 'scoring', None)\n\n if 'verbose' not in params or params['verbose'] is None:\n setattr(obj, 'verbose', False)\n else:\n setattr(obj, 'verbose', True)\n\n return obj", "def evaluate_config(rnd: int):\n val_steps = 5 if rnd < 4 else 10\n return {\"val_steps\": val_steps}", "def get_config():\n config = ml_collections.ConfigDict()\n config.seed = 42\n\n config.eval_num = 30000\n config.eval_avg_num = 3\n config.num_train_steps = -1\n config.log_loss_every_steps = 1000\n config.eval_every_steps = 1000\n config.checkpoint_every_steps = 5000\n\n config.dataset = \"mscoco\"\n config.coco_version = \"2014\"\n config.data_dir = \"data/\"\n config.return_text = False\n config.return_filename = False\n\n config.trial = 0 # dummy for repeated runs.\n config.beta1 = 0.5\n config.beta2 = 0.999\n config.d_lr = 0.0004\n config.g_lr = 0.0001\n config.polyak_decay = 0.999\n config.show_num = 64\n config.shuffle_buffer_size = 1000\n config.batch_norm_group_size = -1\n config.dtype = \"bfloat16\"\n config.train_shuffle = True\n\n config.image_size = 128\n config.batch_size = 56\n config.eval_batch_size = 7\n\n config.df_dim = 96\n config.gf_dim = 96\n config.z_dim = 128\n config.num_epochs = 500\n config.model_name = \"xmc\"\n config.d_step_per_g_step = 2\n config.g_spectral_norm = False\n config.d_spectral_norm = True\n config.architecture = \"xmc_net\"\n config.gamma_for_g = 15\n config.word_contrastive = True\n config.sentence_contrastive = True\n config.image_contrastive = True\n config.pretrained_image_contrastive = True\n config.cond_size = 16\n\n return config", "def manipulator(self):\n m = manipulator.ConfigurationManipulator()\n self.goals={}\n self.trace = {}\n self.weight = {}\n weightTotal = 0\n for n in range(self.args.num_params):\n pname = 'P%03d' % (n)\n randbool = random.randint(0,1) == 0\n if self.args.use_enum_param:\n m.add_parameter(manipulator.EnumParameter(pname, ['on', 'off']))\n self.goals[pname] = 'off' if randbool else 'on'\n elif self.args.use_int_param:\n m.add_parameter(manipulator.IntegerParameter(pname, 256, 257))\n self.goals[pname] = 256 if randbool else 257\n else:\n m.add_parameter(manipulator.BooleanParameter(pname))\n self.goals[pname] = randbool \n self.trace[pname] = {}\n weight = n*n + 1\n self.weight[pname] = weight\n weightTotal += weight\n \n self.maxscore = weightTotal + 1\n self.best = self.maxscore\n self.bestcfg = None\n if False:\n print 'goals = ',\n pprint(self.goals)\n \n return m", "def _run_single_config(self, train_ratio, config):\n X_train, X_test = self._generate_embeddings(config)\n model_str = config[\"model\"]\n model_tuples = [(model_str, MODELS[model_str])]\n is_multi = (self.class_label == \"Multiclass\")\n metrics_df = evaluate_classifiers(model_tuples, X_train, self.tr_label, X_test, self.te_label, multiclass=is_multi, show_confusion_matrix=True, verbose=False)\n metrics_df[\"class\"] = self.class_label\n #append parameter values to dataframe\n for key, value in config.items():\n metrics_df[key] = value\n return metrics_df", "def from_config(config: dict):\n pass", "def getActualConfig(self) -> object:\n if not self.debug:\n ntraces = int(self.getNumberOfTraces())\n traces = []\n for i in range(1,ntraces+1):\n self.selectTrace(i)\n data = self.getData()\n if i == 1:\n title = \"S11\"\n elif i == 2:\n title = \"S21\"\n elif i == 3:\n title = \"S12\"\n else:\n title = \"S22\"\n trace={\n 'number': i,\n 'xMin': self.getStartFrequency(),\n 'xMax': self.getStopFrequency(),\n 'yMin': self.getmindbm(i),#min([x['y'] for x in data]), #getmindbm(),\n 'yMax': self.getmaxdbm(i),#max([x['y'] for x in data]), #getmaxdbm(),\n 'xScale': \"linear\",#self.getxscale()\n 'yScale': \"linear\",#self.getyscale(),\n 'type': \"bode\",#self.getTypeFormat(),\n 'title': title,#self.getTraceTitle(i),\n 'xLabel': \"Freq [Hz]\",#getxLabel(),\n 'yLabel': \"dBm\", #getyLabel()\n 'data': data,\n 'yPDiv': self.getYPDiv(i)\n }\n traces.append(trace) \n ret = {\n 'traces': traces, \n 'sweepResolution': self.getSweepResolution(),\n 'IFBW': self.getIFBW() \n }\n else:\n trace1 = {\n 'number': 1,\n 'xMin': 100,\n 'xMax': 1000,\n 'yMin': 100,\n 'yMax': 1000,\n 'xScale': 'logarithmic',\n 'yScale': 'logarithmic',\n 'type': 'bode',\n 'title': 'S11',\n 'xLabel': 'Freq',\n 'yLabel': 'dBm',\n 'yPDiv': 10,\n 'data': [\n {'x': 100,'y': 100},\n {'x': 200,'y': 150},\n {'x': 500,'y': 300},\n {'x': 1000,'y': 800}\n ]\n }\n trace2 = {\n 'number': 2,\n 'xMin': 1,\n 'xMax': 100,\n 'yMin': 1,\n 'yMax': 1000,\n 'xScale': 'linear',\n 'yScale': 'linear',\n 'type': 'bode',\n 'title': 'S21',\n 'xLabel': 'Freq',\n 'yLabel': 'dBm',\n 'yPDiv': 10,\n 'data': [\n {'x': 1,'y': 100},\n {'x': 20,'y': 250},\n {'x': 50,'y': 200},\n {'x': 100,'y': 600}\n ]\n }\n trace3 = {\n 'number': 3,\n 'xMin': 500,\n 'xMax': 10000,\n 'yMin': 100,\n 'yMax': 10000,\n 'xScale': 'linear',\n 'yScale': 'logarithmic',\n 'type': 'bode',\n 'title': 'S12',\n 'xLabel': 'Freq',\n 'yLabel': 'dBm',\n 'yPDiv': 10,\n 'data': [\n {'x': 500,'y': 100},\n {'x': 2000,'y': 1000},\n {'x': 5000,'y': 3000},\n {'x': 10000,'y': 8000}\n ]\n }\n trace4 = {\n 'number': 4,\n 'xMin': 100,\n 'xMax': 10000,\n 'yMin': 500,\n 'yMax': 10000,\n 'xScale': 'logarithmic',\n 'yScale': 'linear',\n 'type': 'bode',\n 'title': 'S22',\n 'xLabel': 'Freq',\n 'yLabel': 'dBm',\n 'yPDiv': 10,\n 'data': [\n {'x': 100,'y': 500},\n {'x': 2000,'y': 5000},\n {'x': 5000,'y': 2000},\n {'x': 10000,'y': 4000}\n ]\n }\n ret = {\n 'traces': [ trace1, trace2, trace3, trace4 ], \n 'sweepResolution': 401,\n 'IFBW': 10000 \n }\n return ret", "def run(bench, budget):\n\n # Get the set of hypeparameter configuration space possible in this benchmark\n cs = bench.get_configuration_space()\n\n ##############################################################################\n # Begin implementation\n ##############################################################################\n popsize=5\n cmaes = CMAES(cs, pop=popsize)\n\n for i in range(int(budget/popsize)):\n pool = cmaes.sample()\n evals = []\n for i in pool:\n eval = bench.objective_function(cs[i])\n evals.append(eval)\n print(\"Sample:\", eval)\n cmaes.fit_predict(evals)\n\n ##############################################################################\n # End implementation\n ##############################################################################\n # This needs to be called at the end of a run\n bench.done()", "def __init__(self, config):\n self.config = config\n self.observation_space = spaces.Box(\n low=np.array([-15, -15, -15, -15, -15]),\n high=np.array([15, 15, 15, 15, 15])\n )\n self.action_space = spaces.Box(\n low=np.array([0, 0]),\n high=np.array([15, 5])\n )\n if config[\"backend\"] == \"EH\":\n self.params = {\n \"a_1\": 1,\n \"b_1\": 3,\n \"c_1\": 1,\n \"d_1\": 5,\n \"I_ext1\": 3.1,\n \"m\": 0,\n \"a_2\": 6,\n \"tau_2\": 10,\n \"I_ext2\": .45,\n \"gamma\": .01,\n \"r\": .00035,\n \"s\": 4,\n \"x0\": -1.6,\n }\n self.sim = EHSim()\n else:\n self.params = {\n \"x0\": -1.6,\n \"y0\": 1,\n \"tau0\": 2857,\n \"tau1\": 1,\n \"tau2\": 10,\n \"I_rst1\": 3.1,\n \"I_rst2\": .45,\n \"gamma\": .01,\n }\n self.sim = JSim()\n self.x1 = 0\n self.y1 = -5\n self.z = 5.5\n self.x2 = 0\n self.y2 = 0\n self.frame = 0\n self.curr_stim = []\n self.reset()\n self.history = list()", "def update_config(config, args):\n if args.cfg:\n _update_config_from_file(config, args.cfg)\n config.defrost()\n if args.dataset:\n config.DATA.DATASET = args.dataset\n if args.batch_size:\n config.DATA.BATCH_SIZE = args.batch_size\n config.DATA.BATCH_SIZE_EVAL = args.batch_size\n if args.batch_size_eval:\n config.DATA.BATCH_SIZE_EVAL = args.batch_size_eval\n if args.image_size:\n config.DATA.IMAGE_SIZE = args.image_size\n if args.accum_iter:\n config.TRAIN.ACCUM_ITER = args.accum_iter\n if args.data_path:\n config.DATA.DATA_PATH = args.data_path\n if args.output:\n config.SAVE = args.output\n if args.eval:\n config.EVAL = True\n if args.pretrained:\n config.MODEL.PRETRAINED = args.pretrained\n if args.resume:\n config.MODEL.RESUME = args.resume\n if args.last_epoch:\n config.TRAIN.LAST_EPOCH = args.last_epoch\n if args.amp: # only for training\n config.AMP = not config.EVAL\n config.freeze()\n return config", "def get_config():\n config = dict(\n name=\"defaults\",\n # Either use geometric, zipf, or uniform i.e., data variable\n # can take one of \"geometric\", \"zipf\", \"uniform\".\n distribution=\"zipf\",\n lbd_geometric=0.8,\n degree_zipf=1.0,\n # Flags to indicate which methods to compare.\n run_approx_miracle=False,\n run_miracle=False,\n run_modified_miracle=True,\n run_ss=True,\n run_rhr=True,\n encoding_type=\"fast\", # Can take either fast or normal\n # Common parameters.\n num_itr=1,\n coding_cost=14,\n coding_cost_multiplier=1,\n approx_coding_cost_multiplier=3,\n approx_t=6,\n # Specific parameters (leave them as they are for now).\n delta=10**(-6),\n alpha=1.0,\n # Variation.\n vary=\"eps\", # Can take one of \"cc\", \"k\", \"n\", \"eps\".\n cc_space=[6, 8, 10, 12, 14],\n k_space=[200, 400, 600, 800, 1000],\n n_space=[2000, 4000, 6000, 8000, 10000],\n eps_space=list(range(1, 9)),\n # Defaults.\n n=5000,\n k=500,\n t=3,\n epsilon_target=6,\n )\n config = config_dict.ConfigDict(config)\n config.lock() # Prevent addition of new fields.\n return config", "def __init__(self, shape, config, dropout_probability=0.0):\n self.l_rate_bound = config['learning_rate_bounds']\n self.l_rate = self.l_rate_bound[1]\n self.decay_rate = config['decay_rate']\n self.default_dropout_chance = dropout_probability\n self.dropout_probability = self.default_dropout_chance\n self.momentum_parameter = config['momentum_parameter']\n\n self.epochs = config['epochs']\n self.loss_function = m.select_loss(config['loss'])\n self.batch_size = config['batch_size']\n\n self.batch_loss = 0.0\n\n # create input and output layers\n input_layer = InputLayer(shape[\"input\"], self.l_rate)\n output_layer = OutputLayer(shape[\"output\"], self.l_rate, loss=self.loss_function)\n\n # predictions\n self.predicts = []\n self.hit_count = 0.0\n\n # create hidden layers\n self.network = [input_layer]\n for layer in range(1, len(shape)-1):\n self.network.append(Layer(shape[\"hidden_\"+str(layer)], self.l_rate))\n self.network.append(output_layer)\n\n self.in_layer = self.network[0]\n self.out_layer = self.network[-1]\n\n # attach input and output\n self.in_layer.attach(None, self.network[1])\n self.out_layer.attach(self.network[-2], None)\n\n # attach the hidden layers\n for layer in range(1, len(self.network) - 1):\n self.network[layer].attach(self.network[layer - 1], self.network[layer + 1])", "def configure(self, config: ConfigParams):\n parameters = config.get_section(\"parameters\")\n if len(parameters) > 0:\n self.__parameters = parameters", "def _preprocess_config(self, config: Dict[str, Any]) -> Dict[str, Any]:\n return cast_config_values(\n {k: v for k, v in config.items() if k in self._hyperparameter_keys},\n config_space=self.config_space,\n )", "def config():", "def config():", "def copy_config(cfg):\n res= dict(cfg)\n #model_param = dict(cfg['model_param'])\n model_param = dict(cfg.get('model_param', {}))\n res['model_param'] = model_param\n return res", "def from_config(self, config):\n\n co = configobj.ConfigObj(config)\n try:\n if hasattr(config, 'filename') and config.filename is not None:\n if os.path.isfile(config.filename):\n log.info(\"Setting parameters from configuration \"\n \"file: {}\".format(\n os.path.abspath(config.filename)))\n else:\n log.info(\"Setting parameters from configuration \"\n \"input: {}\".format(config.filename))\n except (AttributeError, TypeError):\n pass\n for key in co:\n step = [s.strip() for s in key.split(':')]\n try:\n idx = int(step[0]) - 1\n name = step[1]\n if idx < 0 or idx >= len(self.stepnames) or \\\n self.stepnames[idx] != name:\n step = [name]\n raise ValueError(\"Parameter set and recipe do not match\")\n except (ValueError, KeyError, IndexError):\n name = step[0].strip()\n try:\n idx = self.stepnames.index(name)\n except ValueError:\n idx = None\n if idx is not None and 0 <= idx < len(self.stepnames):\n log.debug(\"Modifying parameters for \"\n \"step {} ({})\".format(idx, name))\n pset = self.current[idx]\n for pkey, pval in co[key].items():\n if pkey in pset:\n pval = self.fix_param_type(pval, pset[pkey]['dtype'])\n pset.set_value(pkey, pval)", "def config(self) -> InstrumentConfig:\n ...", "def get_config(seed, shot):\n if args.coco:\n # COCO\n assert args.two_stage, 'Only supports novel weights for COCO now'\n\n if args.novel_finetune:\n # Fine-tune novel classifier\n ITERS = {\n 1: (10000, 500),\n 2: (10000, 1500),\n 3: (10000, 1500),\n 5: (10000, 1500),\n 10: (10000, 2000),\n 30: (10000, 6000),\n }\n mode = 'novel'\n\n assert not args.fc and not args.unfreeze\n else:\n # Fine-tune entire classifier\n ITERS = {\n 1: (14400, 16000),\n 2: (28800, 32000),\n 3: (43200, 48000),\n 5: (72000, 80000),\n 10: (144000, 160000),\n 30: (216000, 240000),\n }\n mode = 'all'\n split = temp_split = ''\n temp_mode = mode\n\n config_dir = 'configs/COCO-detection'\n ckpt_dir = 'checkpoints/coco/faster_rcnn'\n base_cfg = '../../Base-RCNN-FPN.yaml'\n else:\n # PASCAL VOC\n assert not args.two_stage, 'Only supports random weights for PASCAL now'\n\n ITERS = {\n 1: (3500, 4000),\n 2: (7000, 8000),\n 3: (10500, 12000),\n 5: (17500, 20000),\n 10: (35000, 40000),\n }\n split = 'split{}'.format(args.split)\n mode = 'all{}'.format(args.split)\n # temp_split = 'split1'\n # temp_mode = 'all1'\n temp_split=split\n temp_mode = mode\n\n config_dir = 'configs/PascalVOC-detection'\n ckpt_dir = 'checkpoints/voc/faster_rcnn'\n base_cfg = '../../../Base-RCNN-FPN.yaml'\n\n seed_str = 'seed{}'.format(seed) if seed != 0 else ''\n fc = '_fc' if args.fc else ''\n unfreeze = '_unfreeze' if args.unfreeze else ''\n # Read an example config file for the config parameters\n temp = os.path.join(\n temp_split, 'faster_rcnn_R_101_FPN_ft{}_{}_1shot{}'.format(\n fc, temp_mode, unfreeze)\n )\n print('temp_file:', temp)\n config = os.path.join(args.root, config_dir, temp + '.yaml')\n print('config_file:', config)\n\n prefix = 'faster_rcnn_R_101_FPN_ft{}_{}_{}shot{}{}'.format(\n fc, mode, shot, unfreeze, args.suffix)\n print('prefix_file:', prefix)\n\n output_dir = os.path.join(args.root, ckpt_dir, seed_str)\n print('output_dir',output_dir)\n os.makedirs(output_dir, exist_ok=True)\n \n save_dir = os.path.join(\n args.root, config_dir, split, seed_str,\n )\n print('save_dir',save_dir)\n os.makedirs(save_dir, exist_ok=True)\n save_file = os.path.join(save_dir, prefix + '.yaml')\n print('save_file' , save_file)\n\n configs = load_yaml_file(config)\n print('reading from this config file ',config)\n configs['_BASE_'] = base_cfg\n configs['DATASETS']['TRAIN'] = make_tuple(configs['DATASETS']['TRAIN'])\n configs['DATASETS']['TEST'] = make_tuple(configs['DATASETS']['TEST'])\n if args.coco and not args.novel_finetune:\n ckpt_path = os.path.join(output_dir, prefix, 'model_reset_combine.pth')\n if not os.path.exists(ckpt_path):\n src2 = os.path.join(\n output_dir, 'faster_rcnn_R_101_FPN_ft_novel_{}shot{}'.format(\n shot, args.suffix),\n 'model_final.pth',\n )\n if not os.path.exists(src2):\n print('Novel weights do not exist. Please run with the ' + \\\n '--novel-finetune flag first.')\n assert False\n combine_cmd = 'python tools/ckpt_surgery.py --coco --method ' + \\\n 'combine --src1 checkpoints/coco/faster_rcnn/faster_rcnn' + \\\n '_R_101_FPN_base/model_final.pth --src2 {}'.format(src2) + \\\n ' --save-dir {}'.format(os.path.join(output_dir, prefix))\n run_cmd(combine_cmd)\n assert os.path.exists(ckpt_path)\n configs['MODEL']['WEIGHTS'] = ckpt_path\n elif not args.coco:\n configs['MODEL']['WEIGHTS'] = configs['MODEL']['WEIGHTS'].replace(\n 'base1', 'base' + str(args.split))\n for dset in ['TRAIN', 'TEST']:\n configs['DATASETS'][dset] = (\n configs['DATASETS'][dset][0].replace(\n temp_mode, 'all' + str(args.split)),\n )\n configs['DATASETS']['TRAIN'] = (\n configs['DATASETS']['TRAIN'][0].replace(\n '1shot', str(shot) + 'shot'\n ) + ('_{}'.format(seed_str) if seed_str != '' else ''),\n )\n configs['SOLVER']['BASE_LR'] = args.lr\n configs['SOLVER']['MAX_ITER'] = ITERS[shot][1]\n configs['SOLVER']['STEPS'] = (ITERS[shot][0],)\n configs['SOLVER']['CHECKPOINT_PERIOD'] = ITERS[shot][1] // args.ckpt_freq\n configs['OUTPUT_DIR'] = os.path.join(output_dir, prefix)\n\n if seed != 0:\n with open(save_file, 'w') as fp:\n yaml.dump(configs, fp)\n\n return save_file, configs", "def _init_config(self):\n self.config = self.config_template.specialize()\n print('MMH CONFIG:\\n' + str(self.config))", "def suggest_parameters(trial, config):\n\n # Get parameters from config\n parameters = config['params_' + config['model_name']]\n # Init parameters for optuna\n optuna_parameters = dict()\n for key in parameters.keys():\n if parameters[key][0] == 'int':\n optuna_parameters[key] = trial.suggest_int(key, parameters[key][1], parameters[key][2])\n elif parameters[key][0] == 'uniform':\n optuna_parameters[key] = trial.suggest_uniform(key, parameters[key][1], parameters[key][2])\n elif parameters[key][0] == 'categorical':\n optuna_parameters[key] = trial.suggest_categorical(key, parameters[key][1])\n elif parameters[key][0] == 'loguniform':\n optuna_parameters[key] = trial.suggest_loguniform(key, parameters[key][1], parameters[key][2])\n return optuna_parameters", "def init_input_pipeline(self, config):\n\n ######################\n # Calibrate parameters\n ######################\n\n print('Initiating input pipelines')\n\n # Update num classes in config\n config.num_classes = self.num_classes - len(self.ignored_labels)\n config.ignored_label_inds = [self.label_to_idx[ign_label] for ign_label in self.ignored_labels]\n\n print('ignored_label_inds:')\n print(config.ignored_label_inds)\n\n # Update network model in config\n config.network_model = self.network_model\n\n print('network_model:')\n print(config.network_model)\n\n # Calibrate generators to batch_num\n print('Calibrate generators to batch_num')\n self.batch_limit = self.calibrate_batches(config)\n\n # From config parameter, compute higher bound of neighbors number in a neighborhood\n hist_n = int(np.ceil(4 / 3 * np.pi * (config.density_parameter + 1) ** 3))\n\n # Initiate neighbors limit with higher bound\n print('Initiate neighbors limit with higher bound')\n self.neighborhood_limits = np.full(config.num_layers, hist_n, dtype=np.int32)\n\n # Calibrate max neighbors number\n print('Calibrate max neighbors number')\n self.calibrate_neighbors(config)\n\n ################################\n # Initiate tensorflow parameters\n ################################\n\n # Reset graph\n print('Reset graph')\n tf.reset_default_graph()\n\n # Set random seed (You also have to set it in network_architectures.weight_variable)\n #np.random.seed(42)\n #tf.set_random_seed(42)\n\n # Get generator and mapping function\n print('Get generator')\n gen_function, gen_types, gen_shapes = self.get_batch_gen('training', config)\n gen_function_val, _, _ = self.get_batch_gen('validation', config)\n print('Get mapping function')\n map_func = self.get_tf_mapping(config)\n\n ##################\n # Training dataset\n ##################\n\n # Create batched dataset from generator\n self.train_data = tf.data.Dataset.from_generator(gen_function,\n gen_types,\n gen_shapes)\n\n self.train_data = self.train_data.map(map_func=map_func, num_parallel_calls=self.num_threads)\n\n # Prefetch data\n self.train_data = self.train_data.prefetch(10)\n\n ##############\n # Test dataset\n ##############\n\n # Create batched dataset from generator\n self.val_data = tf.data.Dataset.from_generator(gen_function_val,\n gen_types,\n gen_shapes)\n\n # Transform inputs\n self.val_data = self.val_data.map(map_func=map_func, num_parallel_calls=self.num_threads)\n\n # Prefetch data\n self.val_data = self.val_data.prefetch(10)\n\n #################\n # Common iterator\n #################\n\n # create a iterator of the correct shape and type\n iter = tf.data.Iterator.from_structure(self.train_data.output_types, self.train_data.output_shapes)\n self.flat_inputs = iter.get_next()\n\n # create the initialisation operations\n self.train_init_op = iter.make_initializer(self.train_data)\n self.val_init_op = iter.make_initializer(self.val_data)", "def __init__(self, input_dim=(1, 28, 28), num_classes=10):\n self.params = {}\n\n #######################################################################\n # TODO: Initialize weights and biases for the convolutional neural #\n # network. Weights should be initialized from a Gaussian distribution;#\n # biases should be initialized to zero. All weights and biases should #\n # be stored in the dictionary self.params. #\n #######################################################################\n\n filter_size = 5\n weight_scale = 1e-2\n num_filters = 6\n hidden_dim = 784\n\n #****** THIS WAS TO TEST OUT FASTER NETWORKS *******\n\n self.params['W1'] = np.random.normal(scale=weight_scale, size=(num_filters, input_dim[0], filter_size, filter_size))\n # self.params['W2'] = np.random.normal(scale=weight_scale, size=(num_filters, 6, filter_size, filter_size))\n self.params['W3'] = np.random.normal(scale=weight_scale, size=(864, num_classes))\n\n # self.params['W3'] = np.random.normal(scale=weight_scale, size=(hidden_dim, num_classes))\n # self.params['W4'] = np.random.normal(scale=weight_scale, size=(hidden_dim, num_classes))\n\n self.params['b1'] = np.zeros(num_filters)\n # self.params['b2'] = np.zeros(num_filters)\n self.params['b3'] = np.zeros(num_classes)\n\n # self.params['b3'] = np.zeros(num_classes)\n # self.params['b4'] = np.zeros(num_classes)", "def sample(params):\n\n config = {}\n\n for param, value in params.items():\n if hasattr(value, 'rvs'):\n # this is a scipy.stats distribution\n config[param] = value.rvs()\n else:\n # this is a tuple\n config[param] = random.choice(value)\n\n return config", "def train_config(parser, input_argv=None):\n\n data(parser)\n token(parser)\n model(parser)\n if nsml.IS_ON_NSML:\n nsml_for_internal(parser)\n trainer(parser)\n\n # Use from config file\n base_config(parser)\n\n config = parser.parse_args(input_argv, namespace=NestedNamespace())\n\n use_base_config = config.base_config\n # use pre-defined base_config\n if use_base_config:\n base_config_path = os.path.join(\"base_config\", config.base_config)\n base_config_path = utils.add_config_extension(base_config_path)\n defined_config = utils.read_config()\n # config.overwrite(defined_config)\n\n config = NestedNamespace()\n config.load_from_json(defined_config)\n\n # overwrite input argument when base_config and arguments are provided.\n # (eg. --base_config bidaf --learning_rate 2) -> set bidaf.json then overwrite learning_rate 2)\n input_args = get_input_arguments(parser, input_argv)\n for k, v in input_args.items():\n setattr(config, k, v)\n\n if not use_base_config:\n config = optimize_config(config)\n\n set_gpu_env(config)\n set_batch_size(config)\n return config", "def reproduct_config(config):\r\n\r\n config.dim_emb = 256\r\n config.dim_phone = 128\r\n config.dim_phone_emb = 128\r\n config.dim_pre = 512\r\n config.use_drop = True\r\n config.num_emo_classes = 4\r\n \r\n config.len_crop = 96\r\n config.num_mels = 80\r\n config.wav2vec_feat_len = 1024\r\n\r\n config.batch_size = 2\r\n config.num_iters = 1000000\r\n config.checkpoint_step = 1000001 # do not save checkpoints\r\n\r\n ## save checkpoints every 50k iterations\r\n # config.num_iters = 500000\r\n # config.checkpoint_step = 50000\r\n \r\n config.speech_input = \"wav2vec\"\r\n\r\n if config.reproduct_mode == \"small\":\r\n config.dim_neck = 8\r\n config.freq = 48\r\n elif config.reproduct_mode == \"large\":\r\n config.dim_neck = 128\r\n config.freq = 2\r\n elif config.reproduct_mode == \"spec\":\r\n config.speech_input = \"spec\"\r\n config.dim_neck = 8\r\n config.freq = 48\r\n\r\n return config", "def configuration():", "def generate_advantageous_configs(n_not_dealt, threshold):\n total = 0\n for _ in combinations_with_replacement(range(1, 11), n_not_dealt):\n total += 1\n total *= 100\n count = 0\n progress = min(10000, total / 10)\n print \"total configs \" + str(total)\n for not_dealt in combinations_with_replacement(range(1, 11), n_not_dealt):\n ua = 1\n for ub in range(1, 11):\n for ud in range(1, 11):\n shoe = {}\n for card in not_dealt:\n shoe[card] = shoe.get(card, 0) + 1\n shoe[ua] = shoe.get(ua, 0) + 1\n shoe[ub] = shoe.get(ub, 0) + 1\n shoe[ud] = shoe.get(ud, 0) + 1\n C = get_C_matrix(ua, ub, ud, shoe)\n if possible_advantage(C):\n nonzero_rows = C[np.any(C, axis=1)]\n nonzero = nonzero_rows[:,~np.all(nonzero_rows == 0, axis=0)]\n\n biased_S, biased_alpha, biased_D, biased_X, biased_Y = get_biased_hyperbit_sdp_discrete(\n nonzero)\n biased_obj = get_payout(nonzero, biased_S)\n\n classical_S, classical_p, classical_alpha, classical_beta = get_classical_discrete(\n nonzero)\n\n classical_obj = get_payout(nonzero, classical_S)\n\n if biased_obj - classical_obj > threshold:\n yield ua, ub, ud, not_dealt, biased_obj, classical_obj, biased_obj - classical_obj\n count += 1\n if count % progress == 0:\n print \"finished \" + str(count) + \" out of \" + str(total) + \" configurations \"", "def on_game_start(self, config):\n gamelib.debug_write('Configuring your custom algo strategy...')\n self.config = config\n global FILTER, ENCRYPTOR, DESTRUCTOR, PING, EMP, SCRAMBLER\n FILTER = config[\"unitInformation\"][0][\"shorthand\"]\n ENCRYPTOR = config[\"unitInformation\"][1][\"shorthand\"]\n DESTRUCTOR = config[\"unitInformation\"][2][\"shorthand\"]\n PING = config[\"unitInformation\"][3][\"shorthand\"]\n EMP = config[\"unitInformation\"][4][\"shorthand\"]\n SCRAMBLER = config[\"unitInformation\"][5][\"shorthand\"]\n self.structureInPlace = False\n self.destructorsLeft = 0\n self.destructorsMiddle = 0\n self.juicyTargets = 0\n self.juicyCorner = False\n self.floodGatesOpen = True\n self.defenseRating = 0\n self.defenseCost = 0\n self.attackedFromLeft = 0\n\n self.mainStructure = [[ 25, 13],[ 24, 12],[ 23, 11],[ 22, 10],[ 21, 9],[ 20, 8],[ 19, 7],[ 18, 6],[ 17, 5],[ 16, 4],[ 15, 3],[ 14, 2],[ 13, 1]]\n\n\n self.filter0 =[[ 0, 13],[ 1, 13],[ 2, 13],[ 3, 13],[ 4, 13],[ 5, 13],[ 6, 13],[ 7, 13],[ 8, 13],\\\n [ 9, 13],[ 10, 13],[ 17, 13],[ 18, 13],[ 19, 13],[ 20, 13],[ 21, 13],[ 22, 13],[ 23, 13],[ 24, 13],[ 25, 13],[ 26, 13],[ 27, 13]] \n self.filter1 = [[ 0, 13],[ 1, 13],[ 2, 13],[ 3, 13],[ 4, 13],[ 5, 13],[ 6, 13],[ 7, 13],[ 8, 13],[ 9, 13],[ 10, 13],[ 17, 13],\\\n [ 18, 13],[ 19, 13],[ 20, 13],[ 21, 13],[ 22, 13],[ 23, 13],[ 24, 13],[ 25, 13],[ 26, 13],[ 27, 13],[ 2, 12],[ 25, 12],[ 3, 11],[ 24, 11],[ 4, 10]]\n self.filter2 = [[ 0, 13],[ 1, 13],[ 2, 13],[ 3, 13],[ 4, 13],[ 5, 13],[ 6, 13],[ 7, 13],[ 8, 13],[ 9, 13],[ 10, 13],[ 17, 13],\\\n [ 18, 13],[ 19, 13],[ 20, 13],[ 21, 13],[ 22, 13],[ 23, 13],[ 24, 13],[ 25, 13],[ 26, 13],[ 27, 13],[ 2, 12],[ 25, 12],[ 3, 11],[ 24, 11],[ 4, 10]]\n self.filter3 = [[ 4, 13],[ 5, 13],[ 6, 13],[ 7, 13]]\n\n self.destructor0 = [[ 13, 13]]\n self.destructor1 = [[ 13, 13],[ 14, 13]]\n self.destructor2 = [[ 13, 13],[ 14, 13]]\n self.destructor3 = [[ 13, 13],[ 14, 13]]\n\n self.initExclusionList = [[0,0]]\n self.exclusionList = [[0,0]]", "def get_config_template() -> dict:\n return {\n VENE_PAYMENTS_BAMBORA_API_URL: (str, \"https://payform.bambora.com/pbwapi\"),\n VENE_PAYMENTS_BAMBORA_API_KEY: str,\n VENE_PAYMENTS_BAMBORA_API_SECRET: str,\n VENE_PAYMENTS_BAMBORA_PAYMENT_METHODS: list,\n }", "def get_test_config():\n config = get_config()\n config.batch_size = 2\n config.eval_batch_size = 2\n config.eval_num = 2\n config.eval_avg_num = 1\n config.num_train_steps = 2\n config.log_loss_every_steps = 1\n config.eval_every_steps = 1\n config.checkpoint_every_steps = 1\n config.df_dim = 16\n config.gf_dim = 16\n config.z_dim = 8\n config.show_num = 4\n config.num_epochs = 1\n config.shuffle_buffer_size = 10\n return config" ]
[ "0.69010735", "0.66355693", "0.6371017", "0.6329762", "0.621126", "0.62031883", "0.616896", "0.60010886", "0.5989587", "0.59527737", "0.5929536", "0.5908079", "0.5880665", "0.58574", "0.58471894", "0.58373", "0.5830637", "0.5825823", "0.58240813", "0.57930523", "0.575335", "0.5748764", "0.5748638", "0.5737342", "0.57276154", "0.57188576", "0.5694517", "0.5681903", "0.5675123", "0.5673341", "0.56579554", "0.5649532", "0.5648167", "0.563849", "0.56368256", "0.5635191", "0.56162906", "0.5616142", "0.56094295", "0.5599601", "0.55986416", "0.55982584", "0.55925447", "0.5591508", "0.55873466", "0.5584877", "0.55831164", "0.55709285", "0.55622137", "0.55603266", "0.5558856", "0.55569565", "0.5554924", "0.55460095", "0.55458724", "0.55387354", "0.55385226", "0.5529164", "0.5526152", "0.55215925", "0.5520482", "0.55186534", "0.55134195", "0.5508184", "0.55075294", "0.550077", "0.5496459", "0.5496065", "0.5494045", "0.54879934", "0.5484857", "0.5476913", "0.5472543", "0.5472529", "0.5459977", "0.5455739", "0.5452402", "0.5449303", "0.5448712", "0.5439025", "0.54386556", "0.54378116", "0.54285276", "0.54162484", "0.54162484", "0.5410904", "0.541054", "0.5410248", "0.54102427", "0.5407865", "0.54054075", "0.5405013", "0.5389812", "0.53890514", "0.5388662", "0.5387547", "0.53849053", "0.53837043", "0.53828424", "0.5382358", "0.5379729" ]
0.0
-1
It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside floathyperparameters on a log scale, it is also able to handle categorical input parameter.
def get_configspace(): cs = CS.ConfigurationSpace() # Learning rate hyperparameter lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True) # Stochastic gradient descent momentum as parameter. sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False) cs.add_hyperparameters([lr, sgd_momentum]) # Optimizer hyperparameters. #optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD']) #cs.add_hyperparameters([optimizer]) # Only add the sgd_momentum hyperparameter if the optimizer is stochastic gradient descent. Otherwise, it doesn't make sense. #cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD') #cs.add_condition(cond) ''' The below is commented out because we're not fiddling with architecture in this optimization.''' #num_new_fc_layers = CSH.UniformIntegerHyperparameter('num_new_fc_layers', lower=0, upper=3, default_value=0, log=False) #num_els_new_1 = CSH.UniformIntegerHyperparameter('num_els_new_1', lower=128, upper=4096, default_value = 1000, log=True) #num_els_new_2 = CSH.UniformIntegerHyperparameter('num_els_new_2', lower=128, upper=4096, default_value = 1000, log=True) #num_els_new_3 = CSH.UniformIntegerHyperparameter('num_els_new_3', lower=128, upper=4096, default_value = 1000, log=True) #freeze0_old = CSH.UniformIntegerHyperparameter('freeze0_cat', lower = 0, upper = 1, default_value = 1, log=False) #freeze1_old = CSH.UniformIntegerHyperparameter('freeze1_cat', lower=0, upper=1, default_value=1, log=False) #cs.add_hyperparameters([num_new_fc_layers, num_els_new_1, num_els_new_2, num_els_new_3, freeze0_old, freeze1_old, batchsize]) dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False) cs.add_hyperparameters([dropout_rate]) return cs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_configspace():\r\n cs = CS.ConfigurationSpace()\r\n\r\n lr = CSH.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value='1e-2', log=True)\r\n\r\n # For demonstration purposes, we add different optimizers as categorical hyperparameters.\r\n # To show how to use conditional hyperparameters with ConfigSpace, we'll add the optimizers 'Adam' and 'SGD'.\r\n # SGD has a different parameter 'momentum'.\r\n optimizer = CSH.CategoricalHyperparameter('optimizer', ['Adam', 'SGD'])\r\n\r\n sgd_momentum = CSH.UniformFloatHyperparameter('sgd_momentum', lower=0.0, upper=0.99, default_value=0.9, log=False)\r\n\r\n cs.add_hyperparameters([lr, optimizer, sgd_momentum])\r\n\r\n\r\n\r\n num_conv_layers = CSH.UniformIntegerHyperparameter('num_conv_layers', lower=1, upper=3, default_value=2)\r\n\r\n num_filters_1 = CSH.UniformIntegerHyperparameter('num_filters_1', lower=4, upper=64, default_value=16, log=True)\r\n num_filters_2 = CSH.UniformIntegerHyperparameter('num_filters_2', lower=4, upper=64, default_value=16, log=True)\r\n num_filters_3 = CSH.UniformIntegerHyperparameter('num_filters_3', lower=4, upper=64, default_value=16, log=True)\r\n\r\n cs.add_hyperparameters([num_conv_layers, num_filters_1, num_filters_2, num_filters_3])\r\n\r\n\r\n dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.0, upper=0.9, default_value=0.5, log=False)\r\n num_fc_units = CSH.UniformIntegerHyperparameter('num_fc_units', lower=8, upper=256, default_value=32, log=True)\r\n\r\n cs.add_hyperparameters([dropout_rate, num_fc_units])\r\n\r\n\r\n # The hyperparameter sgd_momentum will be used,if the configuration\r\n # contains 'SGD' as optimizer.\r\n cond = CS.EqualsCondition(sgd_momentum, optimizer, 'SGD')\r\n cs.add_condition(cond)\r\n\r\n # You can also use inequality conditions:\r\n cond = CS.GreaterThanCondition(num_filters_2, num_conv_layers, 1)\r\n cs.add_condition(cond)\r\n\r\n cond = CS.GreaterThanCondition(num_filters_3, num_conv_layers, 2)\r\n cs.add_condition(cond)\r\n\r\n return cs", "def _build_space(self, param_grid):\n if self.verbose>9:\n 'Building param space...'\n \n _warnings.filterwarnings('ignore')\n \n param_grid = param_grid.copy()\n space = {}\n for key in param_grid.keys():\n params = param_grid[key]\n \n if self.verbose>9:\n print('\\tinput:',key, params)\n \n type_str = str(type(params[0]))\n\n if 'float' in type_str or 'int' in type_str:\n \n min_ = min(params)\n max_ = max(params)\n log10_min_ = _np.log10(min_)\n log10_max_ = _np.log10(max_)\n\n if round(log10_max_)-round(log10_min_)>1 and round(log10_max_)-round(log10_min_)!=_np.inf: # use uniform distribution on log spacing \n \n space['log10.'+key] = _hyperopt.hp.uniform(key, log10_min_, log10_max_)\n \n if self.verbose>9:\n print('\\toutput:','log10.'+key, 'uniform', log10_min_, log10_max_)\n \n else:\n if 'int' in type_str:\n space[key] = _hyperopt.hp.quniform(key, min_, max_, 1)\n \n if self.verbose>9:\n print('\\toutput:',key, 'quniform', min_, max_)\n \n elif 'float' in type_str:\n space[key] = _hyperopt.hp.uniform(key, min_, max_)\n \n if self.verbose>9:\n print('\\toutput:',key, 'uniform', min_, max_)\n \n \n elif 'str' in type_str:\n space[key] = _hyperopt.hp.choice(key, [i for i in range(len(params))])\n \n if self.verbose>9:\n print('\\toutput:',key, 'choice', [i for i in range(len(params))])\n\n else:\n raise Exception('type(params[0]) is '+type_str+'. This type of hyperparameter is not yet supported.')\n\n assert(len(space.keys())==len(param_grid.keys())), 'len(space.keys())='+str(len(space.keys()))+', which is not equal to len(param_grid.keys())='+str(len(param_grid.keys()))\n \n if self.verbose>9:\n print('...finished building space')\n \n _warnings.filterwarnings('default')\n\n return space", "def get_configspace() -> CS.Configuration:\n cs = CS.ConfigurationSpace(seed=0)\n # START TODO ################\n lr_hp = CS.UniformFloatHyperparameter('lr', lower=1e-6, upper=1e-1, default_value=1e-2, log=True)\n optimizer_hp = CSH.CategoricalHyperparameter(name='optimizer', choices=['Adam', 'SGD', 'RMSprop'])\n sgd_momentum_hp = CS.UniformFloatHyperparameter('sgd_momentum', lower=0.00, upper=0.99, default_value=0.9)\n\n rms_momentum_hp = CS.UniformFloatHyperparameter('rms_momentum', lower=0.00, upper=0.99, default_value=0.9)\n rms_alpha_hp = CS.UniformFloatHyperparameter('rms_alpha', lower=0.00, upper=0.99, default_value=0.99)\n\n scheduler_hp = CSH.CategoricalHyperparameter(name='scheduler',\n choices=['CosineAnnealingLR', 'CosineAnnealingWarmRestarts'])\n cosine_max_t_hp = CS.UniformIntegerHyperparameter(name='cosine_max_t', lower=50, upper=300, default_value=150)\n cosine_warm_hp = CS.UniformIntegerHyperparameter(name='warm_t_0', lower=50, upper=300, default_value=150)\n\n sgd_cond = CS.EqualsCondition(sgd_momentum_hp, optimizer_hp, 'SGD')\n rms_cond1 = CS.EqualsCondition(rms_momentum_hp, optimizer_hp, 'RMSprop')\n rms_cond2 = CS.EqualsCondition(rms_alpha_hp, optimizer_hp, 'RMSprop')\n cosine_warm_cond = CS.EqualsCondition(cosine_warm_hp, scheduler_hp, 'CosineAnnealingWarmRestarts')\n cosine_cond = CS.EqualsCondition(cosine_max_t_hp, scheduler_hp, 'CosineAnnealingLR')\n cs.add_hyperparameters([lr_hp, optimizer_hp, sgd_momentum_hp, rms_momentum_hp,\n rms_alpha_hp, scheduler_hp, cosine_max_t_hp, cosine_warm_hp])\n cs.add_conditions([sgd_cond, rms_cond1, rms_cond2, cosine_cond, cosine_warm_cond])\n # END TODO ################\n return cs", "def get_hyper_params():\n #################################\n ##### INSERT YOUR CODE HERE #####\n layers_size = [4096, 4096, 10]\n activation = 'relu'\n lr = 1e-7\n epochs = 30\n dropout_rate = 0.2\n init_kind = 'xavier'\n ##### END YOUR CODE HERE ########\n #################################\n hyper_params = {\n 'layers_size': layers_size,\n 'activation': activation,\n 'lr': lr,\n 'epochs': epochs,\n 'init_kind': init_kind,\n 'dropout_rate': dropout_rate,\n }\n return hyper_params", "def get_hyper_params():\n #################################\n ##### INSERT YOUR CODE HERE #####\n layers_size = [4096, 4096, 10]\n activation = 'relu'\n lr = 0.3\n epochs = 30\n dropout_rate = 0.2\n init_kind = 'xavier'\n ##### END YOUR CODE HERE ########\n #################################\n hyper_params = {\n 'layers_size': layers_size,\n 'activation': activation,\n 'lr': lr,\n 'epochs': epochs,\n 'init_kind': init_kind,\n 'dropout_rate': dropout_rate,\n }\n return hyper_params", "def get_hyper_params():\n #################################\n ##### INSERT YOUR CODE HERE #####\n layers_size = [4096, 4096, 10]\n activation = 'relu'\n lr = 5e-4\n epochs = 30\n dropout_rate = 0.2\n init_kind = 'xavier'\n ##### END YOUR CODE HERE ########\n #################################\n hyper_params = {\n 'layers_size': layers_size,\n 'activation': activation,\n 'lr': lr,\n 'epochs': epochs,\n 'init_kind': init_kind,\n 'dropout_rate': dropout_rate,\n }\n return hyper_params", "def get_hyper_params():\n #################################\n ##### INSERT YOUR CODE HERE #####\n layers_size = [4096, 4096, 10]\n activation = 'relu'\n lr = 5e-4\n epochs = 30\n dropout_rate = 0.2\n init_kind = 'xavier'\n ##### END YOUR CODE HERE ########\n #################################\n hyper_params = {\n 'layers_size': layers_size,\n 'activation': activation,\n 'lr': lr,\n 'epochs': epochs,\n 'init_kind': init_kind,\n 'dropout_rate': dropout_rate,\n }\n return hyper_params", "def get_configspace():\n configspace = cs.ConfigurationSpace()\n\n memory = cs.hyperparameters.UniformIntegerHyperparameter(name='memory', lower=2, upper=25)\n configspace.add_hyperparameter(hyperparameter=memory)\n\n batch_size = cs.hyperparameters.UniformIntegerHyperparameter(\n name='batch_size', lower=32, upper=8192, log=True\n )\n configspace.add_hyperparameter(hyperparameter=batch_size)\n\n frequency = cs.hyperparameters.UniformFloatHyperparameter(\n name='frequency', lower=3e-2, upper=1.0, log=True\n )\n configspace.add_hyperparameter(hyperparameter=frequency)\n\n learning_rate = cs.hyperparameters.UniformFloatHyperparameter(\n name='learning_rate', lower=1e-5, upper=3e-2, log=True\n )\n configspace.add_hyperparameter(hyperparameter=learning_rate)\n\n horizon = cs.hyperparameters.UniformIntegerHyperparameter(\n name='horizon', lower=1, upper=50\n )\n configspace.add_hyperparameter(hyperparameter=horizon)\n\n discount = cs.hyperparameters.UniformFloatHyperparameter(\n name='discount', lower=0.8, upper=1.0, log=True\n )\n configspace.add_hyperparameter(hyperparameter=discount)\n\n ratio_based = cs.hyperparameters.CategoricalHyperparameter(\n name='ratio_based', choices=('no', 'yes')\n )\n configspace.add_hyperparameter(hyperparameter=ratio_based)\n\n clipping_value = cs.hyperparameters.UniformFloatHyperparameter(\n name='clipping_value', lower=0.05, upper=0.5\n )\n configspace.add_hyperparameter(hyperparameter=clipping_value)\n\n baseline = cs.hyperparameters.CategoricalHyperparameter(\n name='baseline',\n choices=('no', 'auto', 'same-network', 'same-policy', 'same-policy-noopt')\n )\n configspace.add_hyperparameter(hyperparameter=baseline)\n\n baseline_learning_rate = cs.hyperparameters.UniformFloatHyperparameter(\n name='baseline_learning_rate', lower=1e-5, upper=3e-2, log=True\n )\n configspace.add_hyperparameter(hyperparameter=baseline_learning_rate)\n\n estimate_advantage = cs.hyperparameters.CategoricalHyperparameter(\n name='estimate_advantage', choices=('no', 'yes')\n )\n configspace.add_hyperparameter(hyperparameter=estimate_advantage)\n\n entropy_regularization = cs.hyperparameters.UniformFloatHyperparameter(\n name='entropy_regularization', lower=1e-5, upper=1.0, log=True\n )\n configspace.add_hyperparameter(hyperparameter=entropy_regularization)\n\n configspace.add_condition(\n condition=cs.EqualsCondition(child=clipping_value, parent=ratio_based, value='yes')\n )\n\n configspace.add_condition(\n condition=cs.NotEqualsCondition(\n child=baseline_learning_rate, parent=baseline, value='no'\n )\n )\n\n configspace.add_condition(\n condition=cs.NotEqualsCondition(\n child=estimate_advantage, parent=baseline, value='no'\n )\n )\n\n return configspace", "def init_parameters(obj, hyperparameters):\n # Initialize Global Configuration Parameter\n params = hyperparameters['global']\n setattr(obj, 'param', params)\n\n # Initialize Attributes (Pre-Checked Parameters)\n setattr(obj, 'learning_rate', params['learning_rate'])\n setattr(obj, 'loss', params['loss'])\n setattr(obj, 'max_iter', params['max_iter'])\n\n if params['loss'] == 'least_squares':\n setattr(obj, 'num_classes', 1)\n elif params['loss'] in ['binary_crossentropy', 'categorical_crossentropy', 'auto']:\n setattr(obj, 'num_classes', params['num_classes'])\n\n # Initialize Attributes (Optional Values - Based on Default Parameters)\n if 'l2_regularization' not in params or params['l2_regularization'] is None:\n setattr(obj, 'l2_regularization', 0)\n else:\n setattr(obj, 'l2_regularization', params['l2_regularization'])\n\n if 'max_bins' not in params:\n setattr(obj, 'max_bins', 255)\n else:\n setattr(obj, 'max_bins', params['max_bins'])\n\n if 'max_depth' not in params or params['max_depth'] is None:\n setattr(obj, 'max_depth', None)\n else:\n setattr(obj, 'max_depth', params['max_depth'])\n\n if 'max_leaf_nodes' not in params or params['max_leaf_nodes'] is None:\n setattr(obj, 'max_leaf_nodes', 31)\n else:\n setattr(obj, 'max_leaf_nodes', params['max_leaf_nodes'])\n\n if 'min_samples_leaf' not in params or params['min_samples_leaf'] is None:\n setattr(obj, 'min_samples_leaf', 20)\n else:\n setattr(obj, 'min_samples_leaf', params['min_samples_leaf'])\n\n if 'random_state' in params:\n setattr(obj, 'random_state', params['random_state'])\n else:\n setattr(obj, 'random_state', None)\n\n if 'scoring' in params:\n setattr(obj, 'scoring', params['scoring'])\n else:\n setattr(obj, 'scoring', None)\n\n if 'verbose' not in params or params['verbose'] is None:\n setattr(obj, 'verbose', False)\n else:\n setattr(obj, 'verbose', True)\n\n return obj", "def get_hyper_params():\n #################################\n ##### INSERT YOUR CODE HERE #####\n layers_size = [10]\n activation = 'relu'\n lr = 5e-4\n epochs = 30\n dropout_rate = 0.2\n init_kind = 'xavier'\n ##### END YOUR CODE HERE ########\n #################################\n hyper_params = {\n 'layers_size': layers_size,\n 'activation': activation,\n 'lr': lr,\n 'epochs': epochs,\n 'init_kind': init_kind,\n 'dropout_rate': dropout_rate,\n }\n return hyper_params", "def get_configspace(self):\n cd = self.cd\n sp_dict = {}\n sp_dict['epochs'] = int(cd['epochs'])\n sp_dict['gamma'] = self._get_range_uniform('gamma', cd)\n sp_dict['multilabel'] = self._get_atomic('multilabel', cd)\n sp_dict['lr'] = self._get_range_uniform('lr', cd)\n sp_dict['optimizer'] = self._get_categorical('optimizer', cd)\n sp_dict['n_latent'] = self._get_range_integer('n_latent',cd)\n sp_dict['enc_hidden_dim'] = self._get_range_integer('enc_hidden_dim', cd)\n sp_dict['batch_size'] = self._get_range_integer('batch_size', cd)\n sp_dict['coherence_loss_wt'] = self._get_range_uniform('coherence_loss_wt', cd) or 0.0\n sp_dict['redundancy_loss_wt'] = self._get_range_uniform('redundancy_loss_wt', cd) or 0.0\n sp_dict['num_enc_layers'] = self._get_range_integer('num_enc_layers', cd) or 1\n sp_dict['enc_dr'] = self._get_range_uniform('enc_dr', cd) or 0.0\n sp_dict['covar_net_layers'] = self._get_range_integer('covar_net_layers', cd) or 1\n sp_dict['classifier_dropout'] = self._get_range_uniform('classifier_dropout', cd) or 0.1\n\n embedding_types = cd['embedding']\n embedding_space = [] \n for et in embedding_types:\n if et['source'] == 'random':\n embedding_space.append(ag.space.Dict(**{'source': 'random', 'size': self._get_range_integer('size', et)}))\n else:\n fixed_assigned = et.get('fixed')\n if fixed_assigned is None:\n embedding_space.append(ag.space.Dict(**{'source': et['source'], 'fixed': ag.space.Bool()}))\n else:\n embedding_space.append(ag.space.Dict(**{'source': et['source'], 'fixed': fixed_assigned.lower()}))\n sp_dict['embedding'] = ag.space.Categorical(*embedding_space)\n\n latent_types = cd['latent_distribution']\n latent_space = []\n for lt in latent_types:\n dist_type = lt['dist_type']\n if dist_type == 'vmf':\n latent_space.append(ag.space.Dict(**{'dist_type': 'vmf', 'kappa': self._get_range_uniform('kappa', lt)}))\n elif dist_type == 'logistic_gaussian':\n latent_space.append(ag.space.Dict(**{'dist_type': 'logistic_gaussian', 'alpha': self._get_range_uniform('alpha', lt)}))\n else:\n latent_space.append(ag.space.Dict(**{'dist_type': 'gaussian'}))\n sp_dict['latent_distribution'] = ag.space.Categorical(*latent_space)\n return sp_dict", "def hyperparams():\n H = 6\n return Munch(N=500, H=H, D=(H // 2) ** 2, batch_size=10, precision=to.float32)", "def setup_parameters(self):\n structure = self.ctx.structure_initial_primitive\n ecutwfc = []\n ecutrho = []\n\n for kind in structure.get_kind_names():\n try:\n dual = self.ctx.protocol['pseudo_data'][kind]['dual']\n cutoff = self.ctx.protocol['pseudo_data'][kind]['cutoff']\n cutrho = dual * cutoff\n ecutwfc.append(cutoff)\n ecutrho.append(cutrho)\n except KeyError as exception:\n self.abort_nowait('failed to retrieve the cutoff or dual factor for {}'.format(kind))\n\n natoms = len(structure.sites)\n conv_thr = self.ctx.protocol['convergence_threshold'] * natoms\n\n self.ctx.inputs['parameters'] = {\n 'CONTROL': {\n 'restart_mode': 'from_scratch',\n 'tstress': self.ctx.protocol['tstress'],\n },\n 'SYSTEM': {\n 'ecutwfc': max(ecutwfc),\n 'ecutrho': max(ecutrho),\n 'smearing': self.ctx.protocol['smearing'],\n 'degauss': self.ctx.protocol['degauss'],\n 'occupations': self.ctx.protocol['occupations'],\n },\n 'ELECTRONS': {\n 'conv_thr': conv_thr,\n }\n }", "def get_param_grid():\n layer_width = [32, 64, 128, 256, 512]\n layers = [2, 3, 4, 5, 6]\n epochs = [10, 25, 50, 75, 100]\n batch_size = [32, 64, 96, 128, 160, 192, 224, 256]\n activation = ['softmax', 'softplus', 'softsign', 'relu', 'tanh', 'sigmoid', 'hard_sigmoid', 'linear']\n init_mode = ['uniform', 'lecun_uniform', 'normal', 'zero', 'glorot_normal', 'glorot_uniform', 'he_normal',\n 'he_uniform']\n dropout_rate = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5]\n optimizer = ['adam', 'sgd', 'adadelta', 'adagrad', 'adamax', 'ftrl', 'nadam', 'rmsprop']\n\n grid = {'layer_width': layer_width,\n 'layers': layers,\n 'epochs': epochs,\n 'batch_size': batch_size,\n 'activation': activation,\n 'init_mode': init_mode,\n 'dropout_rate': dropout_rate,\n 'optimizer': optimizer}\n\n return grid", "def __init__(self, input_size, hidden_size, output_size, std=1e-4):\n self.params = {}\n self.params['W1'] = std * np.random.randn(input_size, hidden_size)\n self.params['b1'] = np.zeros(hidden_size)\n self.params['W2'] = std * np.random.randn(hidden_size, output_size)\n self.params['b2'] = np.zeros(output_size)", "def get_hyperparameter_configuration(cat_hparam, num_hparam, layers_hparam, combinations, n, random_state=420):\n np.random.seed(seed=random_state)\n configuration = dict.fromkeys(range(n))\n for ind in range(n):\n configuration[ind] = {'hparams': None}\n configuration[ind]['hparams'] = dict.fromkeys(\n [*cat_hparam.keys(), *num_hparam.keys(), 'list_hidden_layer']\n )\n if len(layers_hparam['num_hidden_layer']) == 3:\n try:\n distribution = eval(\n layers_hparam['num_hidden_layer'][2].replace(\"-\", \"\"))\n num_hidden_layer = int(distribution.rvs(\n layers_hparam['num_hidden_layer'][0], layers_hparam['num_hidden_layer'][1]-layers_hparam['num_hidden_layer'][0]))\n except NameError:\n logging.warning(\n f'WARNING: Distribution {layers_hparam[\"num_hidden_layer\"][2]} not found, generating random number uniformly.')\n num_hidden_layer = randint.rvs(\n layers_hparam['num_hidden_layer'][0], layers_hparam['num_hidden_layer'][1]+1)\n else:\n num_hidden_layer = randint.rvs(\n layers_hparam['num_hidden_layer'][0], layers_hparam['num_hidden_layer'][1]+1)\n\n if len(layers_hparam['num_neuron']) == 3:\n try:\n distribution = eval(\n layers_hparam['num_neuron'][2].replace(\"-\", \"\"))\n configuration[ind]['hparams']['list_hidden_layer'] = distribution.rvs(\n layers_hparam['num_neuron'][0], layers_hparam['num_neuron'][1]-layers_hparam['num_neuron'][0], size=num_hidden_layer).astype(int).tolist()\n except NameError:\n logging.warning(\n f'WARNING: Distribution {layers_hparam[\"num_neuron\"][2]} not found, generating random number uniformly.')\n configuration[ind]['hparams']['list_hidden_layer'] = randint.rvs(\n layers_hparam['num_neuron'][0], layers_hparam['num_neuron'][1]+1, size=num_hidden_layer).tolist()\n else:\n configuration[ind]['hparams']['list_hidden_layer'] = randint.rvs(\n layers_hparam['num_neuron'][0], layers_hparam['num_neuron'][1]+1, size=num_hidden_layer).tolist()\n\n if len(cat_hparam):\n cat_combination_num = random.randint(\n 0, len(combinations)-1)\n for hparam in cat_hparam.keys():\n configuration[ind]['hparams'][hparam] = combinations.loc[cat_combination_num, hparam]\n\n if len(num_hparam):\n for hparam in num_hparam.keys():\n if len(num_hparam[hparam]) == 3:\n try:\n distribution = eval(\n num_hparam[hparam][2].replace(\"-\", \"\"))\n if (type(num_hparam[hparam][0]) == int) and (type(num_hparam[hparam][1]) == int):\n configuration[ind]['hparams'][hparam] = int(distribution.rvs(\n num_hparam[hparam][0], num_hparam[hparam][1]-num_hparam[hparam][0]))\n else:\n configuration[ind]['hparams'][hparam] = distribution.rvs(\n num_hparam[hparam][0], num_hparam[hparam][1]-num_hparam[hparam][0])\n except NameError:\n logging.warning(\n f'WARNING: Distribution {num_hparam[hparam][2]} not found, generating random number uniformly.')\n if (type(num_hparam[hparam][0]) == int) and (type(num_hparam[hparam][1]) == int):\n configuration[ind]['hparams'][hparam] = randint.rvs(\n num_hparam[hparam][0], num_hparam[hparam][1]+1)\n else:\n configuration[ind]['hparams'][hparam] = uniform.rvs(\n num_hparam[hparam][0], num_hparam[hparam][1]-num_hparam[hparam][0])\n else:\n if (type(num_hparam[hparam][0]) == int) and (type(num_hparam[hparam][1]) == int):\n configuration[ind]['hparams'][hparam] = randint.rvs(\n num_hparam[hparam][0], num_hparam[hparam][1]+1)\n else:\n configuration[ind]['hparams'][hparam] = uniform.rvs(\n num_hparam[hparam][0], num_hparam[hparam][1]-num_hparam[hparam][0])\n\n return configuration", "def build_param_grid(self, C_list:list=[0.1, 1, 10, 100], gamma_list:list=[1, 0.1, 0.01, 0.001], kernel_list:list=['rbf']):\n ans = {}\n ans['C'] = C_list\n ans['gamma'] = gamma_list\n ans['kernel'] = kernel_list\n self.param_grid = ans\n return ans", "def __init__(self, input_dim=(3, 32, 32), hidden_dims_CNN = ((32, 5, 1, 1), (2, 2, 2)),\n hidden_dims_FC = ((1024), (0.5)), num_classes=10, weight_scale=1e-3, \n reg=0.0, dtype=np.float32):\n self.params = {}\n self.fix_params = {}\n self.reg = reg\n self.dtype = dtype\n \n C_input, H_input, W_input = input_dim\n pre_C = C_input \n pre_H = H_input\n pre_W = W_input\n \n num_CNN = len(hidden_dims_CNN)\n num_FC = len(hidden_dims_FC)\n\n for i in range(0, num_CNN):\n W_name = \"W\" + str(i)\n b_name = \"b\" + str(i)\n conv_param_name = \"conv_param\" + str(i)\n gamma_name = \"gamma\" + str(i)\n beta_name = \"beta\" + str(i)\n bn_param_name = \"bn_param\" + str(i)\n pool_param_name = \"pool_param\" + str(i)\n\n if num_CNN == 1:\n num_filters, filter_size, stride, pad = hidden_dims_CNN[0] # (F, filter_size, stride, pad)\n pool_stride, pool_height, pool_width = hidden_dims_CNN[1] # (pooling_stride, pooling_size)\n else:\n num_filters, filter_size, stride, pad = hidden_dims_CNN[i][0] # (F, filter_size, stride, pad)\n pool_stride, pool_height, pool_width = hidden_dims_CNN[i][1] # (pooling_stride, pooling_size)\n \n if weight_scale == -1:\n self.params[W_name] = np.random.randn(num_filters, pre_C, filter_size, filter_size) / np.sqrt(filter_size * filter_size * pre_C)\n else: \n self.params[W_name] = np.random.randn(num_filters, pre_C, filter_size, filter_size) * weight_scale\n self.params[b_name] = np.zeros(num_filters)\n self.fix_params[conv_param_name] = {'stride': stride, 'pad': pad}\n \n self.params[gamma_name] = np.random.randn(num_filters)\n self.params[beta_name] = np.random.randn(num_filters)\n self.fix_params[bn_param_name] = {'mode': 'train'}\n\n self.fix_params[pool_param_name] = {'pool_height': pool_height, 'pool_width': pool_width, 'stride': pool_stride}\n \n pre_H, pre_W = cnn_out_shape(pre_H, pre_W, filter_size, filter_size, stride, pad)\n pre_C = num_filters \n pre_H, pre_W = pool_out_shape(pre_H, pre_W, pool_height, pool_width, pool_stride)\n\n pre_fc_dim = pre_H * pre_W * pre_C\n\n for i in range(0, num_FC):\n W_name = \"W\" + str(i + num_CNN)\n b_name = \"b\" + str(i + num_CNN)\n gamma_name = \"gamma\" + str(i + num_CNN)\n beta_name = \"beta\" + str(i + num_CNN)\n bn_param_name = \"bn_param\" + str(i + num_CNN)\n drop_name = \"drop_ratio\" + str(i + num_CNN)\n \n if num_FC == 1 :\n fc_num = hidden_dims_FC[0]\n drop_ratio = hidden_dims_FC[1]\n else:\n fc_num = hidden_dims_FC[i][0]\n drop_ratio = hidden_dims_FC[i][1]\n\n if weight_scale == -1:\n self.params[W_name] = np.random.randn(pre_fc_dim, fc_num) / np.sqrt(pre_fc_dim)\n else:\n self.params[W_name] = np.random.randn(pre_fc_dim, fc_num) * weight_scale\n self.params[b_name] = np.zeros(fc_num)\n\n self.params[gamma_name] = np.random.randn(fc_num)\n self.params[beta_name] = np.random.randn(fc_num)\n self.fix_params[bn_param_name] = {'mode': 'train'}\n\n self.fix_params[drop_name] = {'mode': 'train', 'p': drop_ratio}\n\n pre_fc_dim = fc_num\n\n total_layer = num_CNN + num_FC\n W_name = \"W\" + str(total_layer)\n b_name = \"b\" + str(total_layer)\n if weight_scale == -1:\n self.params[W_name] = np.random.randn(pre_fc_dim, num_classes) / np.sqrt(pre_fc_dim)\n else:\n self.params[W_name] = np.random.randn(pre_fc_dim, num_classes) * weight_scale\n self.params[b_name] = np.zeros(num_classes)\n\n\n self.num_CNN = num_CNN\n self.num_FC = num_FC\n self.total_layer = num_CNN + num_FC\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def get_hyperparams(self):", "def customize_experiment_config(self, config):\n # TODO: use ConfigList from Coach launcher, and share customization code.\n hyperparams_dict = json.loads(os.environ.get(\"SM_HPS\", \"{}\"))\n\n # Set output dir to intermediate\n # TODO: move this to before customer-specified so they can override\n hyperparams_dict[\"rl.training.local_dir\"] = \"/opt/ml/output/intermediate\"\n\n self.hyperparameters = ConfigurationList() # TODO: move to shared\n for name, value in hyperparams_dict.items():\n # self.map_hyperparameter(name, val) #TODO\n if name.startswith(\"rl.\"):\n # self.apply_hyperparameter(name, value) #TODO\n self.hyperparameters.store(name, value)\n # else:\n # raise ValueError(\"Unknown hyperparameter %s\" % name)\n\n self.hyperparameters.apply_subset(config, \"rl.\")\n return config", "def build(dynamic_hyperparams_config, is_training):\n if not isinstance(dynamic_hyperparams_config,\n hyperparams_pb2.Hyperparams):\n raise ValueError('dynamic_hyperparams_config not of type '\n 'hyperparams_pb.Hyperparams.')\n\n batch_norm = None\n batch_norm_params = None\n if dynamic_hyperparams_config.HasField('batch_norm'):\n batch_norm = slim.batch_norm\n batch_norm_params = _build_batch_norm_params(\n dynamic_hyperparams_config.batch_norm, is_training)\n\n affected_ops = [dynamic_conv2d]\n with slim.arg_scope(\n affected_ops,\n activation_fn=_build_activation_fn(dynamic_hyperparams_config.activation),\n normalizer_fn=batch_norm,\n normalizer_params=batch_norm_params) as sc:\n return sc", "def _init_hyperparam(self, **p_par):\r\n \r\n try:\r\n p_input_size = self._input_space.get_num_dim()\r\n p_output_size = self._output_space.get_num_dim()\r\n except:\r\n raise ParamError('Input size and/or output size of the network are not defined.')\r\n \r\n if 'p_update_rate' not in p_par:\r\n p_par['p_update_rate'] = 1\r\n elif p_par.get('p_update_rate') < 1:\r\n raise ParamError(\"p_update_rate must be equal or higher than 1.\")\r\n \r\n if 'p_num_hidden_layers' not in p_par:\r\n raise ParamError(\"p_num_hidden_layers is not defined.\")\r\n \r\n if 'p_output_activation_fct' not in p_par:\r\n p_par['p_output_activation_fct'] = None\r\n \r\n if 'p_optimizer' not in p_par:\r\n raise ParamError(\"p_optimizer is not defined.\")\r\n \r\n if 'p_loss_fct' not in p_par:\r\n raise ParamError(\"p_loss_fct is not defined.\")\r\n\r\n if 'p_test_data' not in p_par:\r\n p_par['p_test_data'] = 0.3\r\n\r\n if 'p_batch_size' not in p_par:\r\n p_par['p_batch_size'] = 100\r\n\r\n if 'p_seed_buffer' not in p_par:\r\n p_par['p_seed_buffer'] = 1\r\n\r\n if 'p_learning_rate' not in p_par:\r\n p_par['p_learning_rate'] = 3e-4\r\n \r\n if 'p_hidden_size' not in p_par:\r\n raise ParamError(\"p_hidden_size is not defined.\")\r\n try:\r\n if len(p_par['p_hidden_size']) != p_par['p_num_hidden_layers']:\r\n raise ParamError(\"length of p_hidden_size list must be equal to p_num_hidden_layers or an integer.\")\r\n except:\r\n p_par['p_hidden_size'] = [int(p_par['p_hidden_size'])] * int(p_par['p_num_hidden_layers'])\r\n \r\n if 'p_activation_fct' not in p_par:\r\n raise ParamError(\"p_activation_fct is not defined.\")\r\n try:\r\n if len(p_par['p_activation_fct']) != p_par['p_num_hidden_layers']:\r\n raise ParamError(\"length of p_activation_fct list must be equal to p_num_hidden_layers or a single activation function.\")\r\n except:\r\n if isinstance(p_par['p_activation_fct'], list):\r\n raise ParamError(\"length of p_activation_fct list must be equal to p_num_hidden_layers or a single activation function.\")\r\n else:\r\n p_par['p_activation_fct'] = [p_par['p_activation_fct']] * int(p_par['p_num_hidden_layers'])\r\n \r\n if 'p_weight_bias_init' not in p_par:\r\n p_par['p_weight_bias_init'] = True\r\n \r\n if p_par['p_weight_bias_init']:\r\n if 'p_weight_init' not in p_par:\r\n p_par['p_weight_init'] = torch.nn.init.orthogonal_\r\n \r\n if 'p_bias_init' not in p_par:\r\n p_par['p_bias_init'] = lambda x: torch.nn.init.constant_(x, 0)\r\n \r\n if 'p_gain_init' not in p_par:\r\n p_par['p_gain_init'] = np.sqrt(2)\r\n \r\n self._hyperparam_space.add_dim(HyperParam('p_input_size','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_output_size','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_update_rate','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_num_hidden_layers','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_hidden_size','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_activation_fct'))\r\n self._hyperparam_space.add_dim(HyperParam('p_output_activation_fct'))\r\n self._hyperparam_space.add_dim(HyperParam('p_optimizer'))\r\n self._hyperparam_space.add_dim(HyperParam('p_loss_fct'))\r\n self._hyperparam_space.add_dim(HyperParam('p_test_data'))\r\n self._hyperparam_space.add_dim(HyperParam('p_batch_size'))\r\n self._hyperparam_space.add_dim(HyperParam('p_seed_buffer'))\r\n self._hyperparam_space.add_dim(HyperParam('p_learning_rate'))\r\n self._hyperparam_space.add_dim(HyperParam('p_weight_bias_init'))\r\n self._hyperparam_space.add_dim(HyperParam('p_weight_init'))\r\n self._hyperparam_space.add_dim(HyperParam('p_bias_init'))\r\n self._hyperparam_space.add_dim(HyperParam('p_gain_init'))\r\n self._hyperparam_tuple = HyperParamTuple(self._hyperparam_space)\r\n \r\n ids_ = self.get_hyperparam().get_dim_ids()\r\n self.get_hyperparam().set_value(ids_[0], p_input_size)\r\n self.get_hyperparam().set_value(ids_[1], p_output_size)\r\n self.get_hyperparam().set_value(ids_[2], p_par['p_update_rate'])\r\n self.get_hyperparam().set_value(ids_[3], p_par['p_num_hidden_layers'])\r\n self.get_hyperparam().set_value(ids_[4], p_par['p_hidden_size'])\r\n self.get_hyperparam().set_value(ids_[5], p_par['p_activation_fct'])\r\n self.get_hyperparam().set_value(ids_[6], p_par['p_output_activation_fct'])\r\n self.get_hyperparam().set_value(ids_[7], p_par['p_optimizer'])\r\n self.get_hyperparam().set_value(ids_[8], p_par['p_loss_fct'])\r\n self.get_hyperparam().set_value(ids_[9], p_par['p_test_data'])\r\n self.get_hyperparam().set_value(ids_[10], p_par['p_batch_size'])\r\n self.get_hyperparam().set_value(ids_[11], p_par['p_seed_buffer'])\r\n self.get_hyperparam().set_value(ids_[12], p_par['p_learning_rate'])\r\n self.get_hyperparam().set_value(ids_[13], p_par['p_weight_bias_init'])\r\n self.get_hyperparam().set_value(ids_[14], p_par['p_weight_init'])\r\n self.get_hyperparam().set_value(ids_[15], p_par['p_bias_init'])\r\n self.get_hyperparam().set_value(ids_[16], p_par['p_gain_init'])", "def create_hparams(experiment):\n hparams = {}\n\n # General parameters.\n hparams['batch_size'] = 64\n hparams['eval_batch_size'] = 64\n hparams['learning_rate_warmup_steps'] = 2000\n hparams['learning_rate_constant'] = 1\n hparams['learning_rate'] = 0.001\n hparams['train_epoches'] = 200\n hparams['steps_per_epoch'] = 30\n hparams['train_steps'] = 1000 * 1000\n hparams['eval_steps'] = 100\n hparams['caption_optimizer'] = 't2t'\n hparams['clip_norm'] = 5.0\n hparams['train_files'] = ''\n hparams['eval_files'] = ''\n hparams['train_buffer_size'] = 2000\n hparams['eval_buffer_size'] = 500\n hparams['train_pixel_encoder'] = True\n hparams['debug'] = False\n hparams['distribution_strategy'] = 'mirrored'\n\n # Embedding parameters.\n hparams['embedding_file'] = ''\n hparams['word_vocab_path'] = ''\n hparams['glove_trainable'] = True\n hparams['vocab_size'] = 10000\n\n # View hierarchy encoder parameters.\n hparams['max_pixel_pos'] = 100\n hparams['max_dom_pos'] = 500\n hparams['screen_encoder'] = 'pixel_transformer'\n hparams['screen_embedding_feature'] = ['text', 'type', 'pos', 'click', 'dom']\n hparams['obj_text_aggregation'] = 'max'\n hparams['synthetic_screen_noise'] = 0.\n\n # General parameters.\n hparams['num_hidden_layers'] = 2\n hparams['hidden_size'] = 2\n hparams['filter_size'] = 2\n hparams['num_heads'] = 2\n hparams['dropout'] = 0.2\n hparams['layer_prepostprocess_dropout'] = 0.2\n hparams['attention_dropout'] = 0.2\n hparams['relu_dropout'] = 0.2\n\n transformer_hparams = model_params.BASE_PARAMS\n\n # Add parameters from transformer model.\n hparams.update(transformer_hparams)\n\n # Rewrite all the parameters from command-line flags.\n config = screen2words_experiment_config.experiments[experiment]\n hparams.update(config)\n\n return hparams", "def __init__(self, input_size, hidden_size, output_size, weight_init_std=0.01):\n\n self.params = {}\n self.params['W1'] = weight_init_std * \\\n np.random.randn(input_size, hidden_size)\n self.params['b1'] = np.zeros(hidden_size)\n self.params['W2'] = weight_init_std * \\\n np.random.randn(hidden_size, output_size)\n self.params['b2'] = np.zeros(output_size)", "def _starting_hparams():\n hparams = contrib_training.HParams()\n hparams.add_hparam('batch_style', 'bucket')\n hparams.add_hparam('gradient_clipping_decay', 0.9999)\n hparams.add_hparam('learning_rate', 0.0005)\n hparams.add_hparam('lr_decay_rate', .997)\n hparams.add_hparam('lr_decay_steps', 1000)\n hparams.add_hparam('lr_warmup_steps', 3000)\n hparams.add_hparam('model_type', 'cnn')\n hparams.add_hparam('resnet_bottleneck_factor', 0.5)\n hparams.add_hparam('decision_threshold', 0.5)\n hparams.add_hparam('denominator_power', 1.0) # Standard mean-pooling.\n return hparams", "def set_hyperparams(self, params):", "def create_hparams(hparam_string=None):\n hparams = tf.contrib.training.HParams(\n # The name of the architecture to use.\n arch='resnet',\n lrelu_leakiness=0.2,\n batch_norm_decay=0.9,\n weight_decay=1e-5,\n normal_init_std=0.02,\n generator_kernel_size=3,\n discriminator_kernel_size=3,\n\n # Stop training after this many examples are processed\n # If none, train indefinitely\n num_training_examples=0,\n\n # Apply data augmentation to datasets\n # Applies only in training job\n augment_source_images=False,\n augment_target_images=False,\n\n # Discriminator\n # Number of filters in first layer of discriminator\n num_discriminator_filters=64,\n discriminator_conv_block_size=1, # How many convs to have at each size\n discriminator_filter_factor=2.0, # Multiply # filters by this each layer\n # Add gaussian noise with this stddev to every hidden layer of D\n discriminator_noise_stddev=0.2, # lmetz: Start seeing results at >= 0.1\n # If true, add this gaussian noise to input images to D as well\n discriminator_image_noise=False,\n discriminator_first_stride=1, # Stride in first conv of discriminator\n discriminator_do_pooling=False, # If true, replace stride 2 with avg pool\n discriminator_dropout_keep_prob=0.9, # keep probability for dropout\n\n # DCGAN Generator\n # Number of filters in generator decoder last layer (repeatedly halved\n # from 1st layer)\n num_decoder_filters=64,\n # Number of filters in generator encoder 1st layer (repeatedly doubled\n # after 1st layer)\n num_encoder_filters=64,\n\n # This is the shape to which the noise vector is projected (if we're\n # transferring from noise).\n # Write this way instead of [4, 4, 64] for hparam search flexibility\n projection_shape_size=4,\n projection_shape_channels=64,\n\n # Indicates the method by which we enlarge the spatial representation\n # of an image. Possible values include:\n # - resize_conv: Performs a nearest neighbor resize followed by a conv.\n # - conv2d_transpose: Performs a conv2d_transpose.\n upsample_method='resize_conv',\n\n # Visualization\n summary_steps=500, # Output image summary every N steps\n\n ###################################\n # Task Classifier Hyperparameters #\n ###################################\n\n # Which task-specific prediction tower to use. Possible choices are:\n # none: No task tower.\n # doubling_pose_estimator: classifier + quaternion regressor.\n # [conv + pool]* + FC\n # Classifiers used in DSN paper:\n # gtsrb: Classifier used for GTSRB\n # svhn: Classifier used for SVHN\n # mnist: Classifier used for MNIST\n # pose_mini: Classifier + regressor used for pose_mini\n task_tower='doubling_pose_estimator',\n weight_decay_task_classifier=1e-5,\n source_task_loss_weight=1.0,\n transferred_task_loss_weight=1.0,\n\n # Number of private layers in doubling_pose_estimator task tower\n num_private_layers=2,\n\n # The weight for the log quaternion loss we use for source and transferred\n # samples of the cropped_linemod dataset.\n # In the DSN work, 1/8 of the classifier weight worked well for our log\n # quaternion loss\n source_pose_weight=0.125 * 2.0,\n transferred_pose_weight=0.125 * 1.0,\n\n # If set to True, the style transfer network also attempts to change its\n # weights to maximize the performance of the task tower. If set to False,\n # then the style transfer network only attempts to change its weights to\n # make the transferred images more likely according to the domain\n # classifier.\n task_tower_in_g_step=True,\n task_loss_in_g_weight=1.0, # Weight of task loss in G\n\n #########################################\n # 'simple` generator arch model hparams #\n #########################################\n simple_num_conv_layers=1,\n simple_conv_filters=8,\n\n #########################\n # Resnet Hyperparameters#\n #########################\n resnet_blocks=6, # Number of resnet blocks\n resnet_filters=64, # Number of filters per conv in resnet blocks\n # If true, add original input back to result of convolutions inside the\n # resnet arch. If false, it turns into a simple stack of conv/relu/BN\n # layers.\n resnet_residuals=True,\n\n #######################################\n # The residual / interpretable model. #\n #######################################\n res_int_blocks=2, # The number of residual blocks.\n res_int_convs=2, # The number of conv calls inside each block.\n res_int_filters=64, # The number of filters used by each convolution.\n\n ####################\n # Latent variables #\n ####################\n # if true, then generate random noise and project to input for generator\n noise_channel=True,\n # The number of dimensions in the input noise vector.\n noise_dims=10,\n\n # If true, then one hot encode source image class and project as an\n # additional channel for the input to generator. This gives the generator\n # access to the class, which may help generation performance.\n condition_on_source_class=False,\n\n ########################\n # Loss Hyperparameters #\n ########################\n domain_loss_weight=1.0,\n style_transfer_loss_weight=1.0,\n\n ########################################################################\n # Encourages the transferred images to be similar to the source images #\n # using a configurable metric. #\n ########################################################################\n\n # The weight of the loss function encouraging the source and transferred\n # images to be similar. If set to 0, then the loss function is not used.\n transferred_similarity_loss_weight=0.0,\n\n # The type of loss used to encourage transferred and source image\n # similarity. Valid values include:\n # mpse: Mean Pairwise Squared Error\n # mse: Mean Squared Error\n # hinged_mse: Computes the mean squared error using squared differences\n # greater than hparams.transferred_similarity_max_diff\n # hinged_mae: Computes the mean absolute error using absolute\n # differences greater than hparams.transferred_similarity_max_diff.\n transferred_similarity_loss='mpse',\n\n # The maximum allowable difference between the source and target images.\n # This value is used, in effect, to produce a hinge loss. Note that the\n # range of values should be between 0 and 1.\n transferred_similarity_max_diff=0.4,\n\n ################################\n # Optimization Hyperparameters #\n ################################\n learning_rate=0.001,\n batch_size=32,\n lr_decay_steps=20000,\n lr_decay_rate=0.95,\n\n # Recomendation from the DCGAN paper:\n adam_beta1=0.5,\n clip_gradient_norm=5.0,\n\n # The number of times we run the discriminator train_op in a row.\n discriminator_steps=1,\n\n # The number of times we run the generator train_op in a row.\n generator_steps=1)\n\n if hparam_string:\n tf.logging.info('Parsing command line hparams: %s', hparam_string)\n hparams.parse(hparam_string)\n\n tf.logging.info('Final parsed hparams: %s', hparams.values())\n return hparams", "def build(self):\n return self.hyperparams.items()", "def make_params(config):\n params = copy.deepcopy(config.view.params)\n params.t2bins = np.arange(0, params.t2bin_max + 1e-4, params.t2bin_stepsize)\n params.out = make_Bunch(\"State and output of detection processing\") # outputs are not parameters, maybe separate \n return params", "def build_model(self):\n self.global_step = tf.Variable(0, name=\"global_step\", trainable=False)\n\n if self.config.optimizer == 'sgd':\n self.optimizer = tf.keras.optimizers.SGD(learning_rate=self.config.learning_rate)\n elif self.config.optimizer == 'rms':\n self.optimizer = tf.keras.optimizers.RMSprop(learning_rate=self.config.learning_rate)\n elif self.config.optimizer == 'adam':\n self.optimizer = tf.keras.optimizers.Adam(learning_rate=self.config.learning_rate)\n elif self.config.optimizer == 'adagrad':\n self.optimizer = tf.keras.optimizers.Adagrad(learning_rate=self.config.learning_rate)\n elif self.config.optimizer == 'adadelta':\n self.optimizer = tf.keras.optimizers.Adadelta(learning_rate=self.config.learning_rate)\n else:\n raise NotImplementedError(\"No support for %s optimizer\" % self.config.optimizer)\n \n if self.config.optimizer in ['rms', 'adagrad', 'adadelta']:\n with tf.device('cpu:0'):\n self.model.def_parameters()\n else:\n self.model.def_parameters()\n\n self.config.summary()\n self.config.summary_hyperparameter(self.model.model_name)", "def build_hparams(FLAGS):\n hparams = add_model_parameters(hyperparameters.params, FLAGS)\n hparams.training = True\n if FLAGS.hparams:\n hparams.parse(FLAGS.hparams)\n if FLAGS.eval_model:\n hparams.summary_frequency = 1\n hparams.test_frequency = 1\n hparams.save_frequency = 5\n hparams.training = False\n\n hparams.sdr_frequency = hparams.test_frequency * constants.AVG_SDR_ON_N_BATCHES\n # See STFT scipy doc\n hparams.waveform_size = (hparams.ntimebins - 1) * constants.ndiff\n\n return hparams", "def train_hyperopt(params):\n lasagne.random.set_rng(RandomState(9859295))\n\n template_name = params.pop('template_name') \n params = adjust_params_for_hyperopt(params)\n \n config_strings = create_config_strings(template_name)\n config_objects = create_config_objects(config_strings)\n templates, _ = create_templates_variants_from_config_objects(\n config_objects)\n \n \n processed_templates, params_without_template_params = process_templates(\n templates, params)\n final_params = process_parameters_by_templates(params_without_template_params, \n processed_templates)\n \n # go to directory above this source-file\n main_template_filename = os.path.dirname(os.path.abspath(os.path.dirname(\n __file__)))\n # then complete path to config\n main_template_filename = os.path.join(main_template_filename, \"configs\", \n \"eegnet_template.yaml\")\n \n with (open(main_template_filename, 'r')) as main_template_file:\n main_template_str = main_template_file.read()\n \n \n final_params['original_params'] = 'dummy'\n train_str = Template(main_template_str).substitute(final_params)\n \n def do_not_load_constructor(loader, node):\n return None\n yaml.add_constructor(u'!DoNotLoad', do_not_load_constructor)\n modified_train_str = train_str.replace('layers: ', 'layers: !DoNotLoad ')\n train_dict = yaml_parse.load(modified_train_str) \n dataset = train_dict['dataset'] \n dataset.load()\n dataset_provider = train_dict['dataset_provider']\n \n assert 'in_sensors' in train_str\n assert 'in_rows' in train_str\n assert 'in_cols' in train_str\n \n train_str = train_str.replace('in_sensors',\n str(dataset.get_topological_view().shape[1]))\n train_str = train_str.replace('in_rows',\n str(dataset.get_topological_view().shape[2]))\n train_str = train_str.replace('in_cols', \n str(dataset.get_topological_view().shape[3]))\n \n train_dict = yaml_parse.load(train_str)\n layers = train_dict['layers']\n final_layer = layers[-1]\n\n # turn off debug/info logging\n logging.getLogger(\"pylearn2\").setLevel(logging.WARN)\n logging.getLogger(\"braindecode\").setLevel(logging.WARN)\n exp = Experiment()\n exp.setup(final_layer, dataset_provider, **train_dict['exp_args'])\n exp.run()\n final_misclass = exp.monitor_chans['test_misclass'][-1]\n print(\"Result for\")\n pprint(params)\n print(\"Final Test misclass: {:5.4f}\".format(float(final_misclass)))\n return final_misclass", "def config():\n\n compared_algorithms_type: AlgorithmsType = AlgorithmsType.LinearRegression\n compared_methods: List = [] # Leave empty for using all solvers.\n numpy_distribution: NumpyDistribution = NumpyDistribution.IntelDistribution\n used_database: DatabaseType = DatabaseType.Synthetic\n experiment_type: ExperimentType = ExperimentType.RunTimeExperiment\n cross_validation_folds: int = 1\n n_alphas: int = 100\n reduction_factor: int = 1\n\n run_time_experiments_config: Dict[str, range] = {\n \"run_time_compared_data_sizes\": range(int(5000 / reduction_factor), int(15000 / reduction_factor),\n int(5000 / reduction_factor)),\n \"calc_transpose_dot_residuals\": compared_algorithms_type == AlgorithmsType.LinearRegression\n }\n number_of_alphas_experiments_config: Dict[str, range] = {\n \"alphas_range\": range(1, 221, 20)\n }\n\n synthetic_data_config: Dict[str, int] = {\n \"data_size\": int(15000 / reduction_factor),\n \"features_num\": 7\n }\n\n sketch_preconditioned_config: Dict[str, float] = {\n \"sampled_rows\": 0.005,\n \"switch_sign_probability\": 0.5,\n \"min_sampled_rows\": 100.0\n }\n resources_path: str = r'Resources'\n results_path: str = r'Results'\n clusters_count: int = _choose_clusters_num(used_database, synthetic_data_config[\"features_num\"])\n elastic_net_factor: float = 0.5 # Rho factor in Elastic-Net regularization.\n is_positive_definite: bool = True", "def default_kernel_config(defn):\n return [('beam', {}),\n ('hypers',\n {\n 'alpha_a': 4.0,\n 'alpha_b': 2.0,\n 'gamma_a': 3.0, \n 'gamma_b': 6.0\n }\n )]", "def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n\n self.params['W1'] = np.random.normal(size=(num_filters,\n input_dim[0],\n filter_size,\n filter_size),\n scale=weight_scale)\n self.params['W2'] = np.random.normal(size=(num_filters * \\\n input_dim[1] // 2 *\\\n input_dim[2] // 2,\n hidden_dim),\n scale=weight_scale)\n self.params['W3'] = np.random.normal(size=(hidden_dim,\n num_classes),\n scale=weight_scale)\n\n self.params['b1'] = np.zeros(num_filters)\n self.params['b2'] = np.zeros(hidden_dim)\n self.params['b3'] = np.zeros(num_classes)\n\n for k, v in self.params.items():\n self.params[k] = v.astype(dtype)", "def default_feature_hp_kernel_config(defn):\n defn = _validate_definition(defn)\n\n # hyperparams\n hparams = {}\n for i, hp in enumerate(defn.hyperpriors()):\n if not hp:\n continue\n # XXX(stephentu): we are arbitrarily picking w=0.1\n hparams[i] = {k: (fn, 0.1) for k, fn in hp.iteritems()}\n\n if not hparams:\n return []\n else:\n return [('slice_feature_hp', {'hparams': hparams})]", "def construct_parameters(self, method= \"random\", W = np.zeros(1), b = np.zeros(1), initialization=True):\n #W = np.asarray(W, dtype=object)\n #b = np.asarray(b, dtype=object)\n for i in reversed(range(1,len(self.architecture))):\n \n if initialization==True:\n if self.activations[i-1] in {'relu' , 'leakyrelu' , 'ealu'}:\n variance = np.sqrt(2/(self.architecture[i-1])) #He initialization\n elif self.activations[i-1] == 'tanh':\n variance = np.sqrt(6/(self.architecture[i-1] + self.architecture[i])) #Xavier initialization\n elif self.activations[i-1] in ('swish' , 'sigmoid'):\n variance = np.sqrt(1/(self.architecture[i-1]))\n else:\n variance = 1\n \n elif initialization == False:\n variance = 1\n \n if method == 'random':\n self.weights_and_biases[f'W{i}'] = np.random.rand(self.architecture[i-1], self.architecture[i])*variance #randomised initialisation \n self.weights_and_biases[f'b{i}'] = np.zeros(self.architecture[i])*variance\n \n elif method == 'manual': #manual initialisation using given weights and biases\n self.weights_and_biases[f'W{i}'] = W[i-1]\n self.weights_and_biases[f'b{i}'] = b[i-1] \n return self.weights_and_biases", "def _build_param_dict(self):\n self._build_common_param_dict()\n\n self._param_dict.add(Parameter.NUM_AVG_SAMPLES,\n r'ScansToAverage>([\\d]+)</ScansToAverage>',\n lambda match: int(match.group(1)),\n str,\n type=ParameterDictType.INT,\n display_name=\"Scans to Average\",\n description=\"Number of samples to average (must be even)\",\n range=INT16,\n startup_param=True,\n direct_access=False,\n default_value=4,\n visibility=ParameterDictVisibility.READ_WRITE)\n self._param_dict.add(Parameter.MIN_COND_FREQ,\n r'MinimumCondFreq>([\\d]+)</MinimumCondFreq',\n lambda match: int(match.group(1)),\n str,\n type=ParameterDictType.INT,\n display_name=\"Minimum Conductivity Frequency\",\n range=INT16,\n description=\"Minimum conductivity frequency to enable pump turn-on.\",\n startup_param=True,\n direct_access=False,\n default_value=500,\n units=Units.HERTZ,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.PUMP_DELAY,\n r'PumpDelay>([\\d]+)</PumpDelay',\n lambda match: int(match.group(1)),\n str,\n type=ParameterDictType.INT,\n display_name=\"Pump Delay\",\n range=INT16,\n description=\"Time to wait after minimum conductivity frequency is reached before turning pump on.\",\n startup_param=True,\n direct_access=False,\n default_value=60,\n units=Units.SECOND,\n visibility=ParameterDictVisibility.READ_WRITE)\n self._param_dict.add(Parameter.AUTO_RUN,\n r'AutoRun>(.*)</AutoRun',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Auto Run\",\n description=\"Enable automatic logging when power is applied: (true | false).\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=False,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.IGNORE_SWITCH,\n r'IgnoreSwitch>(.*)</IgnoreSwitch',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Ignore Switch\",\n description=\"Disable magnetic switch position for starting or stopping logging: (true | false)\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=True,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.OPTODE,\n r'OPTODE>(.*)</OPTODE',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Optode Attached\",\n description=\"Enable optode: (true | false)\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=True,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.VOLT1,\n r'ExtVolt1>(.*)</ExtVolt1',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Volt 1\",\n description=\"Enable external voltage 1: (true | false)\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=True,\n visibility=ParameterDictVisibility.IMMUTABLE)\n\n self._build_ctd_specific_params()", "def __init__(self, input_dim=(1, 28, 28), num_filters=32, filter_size=3,\n hidden_dim=100, num_classes=10, weight_scale=1e-3,\n dtype=np.float32):\n self.params = {}\n self.dtype = dtype\n\n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n # Store weights for the convolutional layer using the keys 'W1' (here #\n # we do not consider the bias term in the convolutional layer); #\n # use keys 'W2' and 'b2' for the weights and biases of the #\n # hidden fully-connected layer, and keys 'W3' and 'b3' for the weights #\n # and biases of the output affine layer. For this question, we assume #\n # the max-pooling layer is 2x2 with stride 2. Then you can calculate the #\n # shape of features input into the hidden fully-connected layer, in terms #\n # of the input dimension and size of filter. #\n ############################################################################\n C, H, W = input_dim\n H_p = int((H - filter_size + 1) /2)\n W_p = int((W - filter_size + 1) /2)\n self.params['W1'] = np.random.normal(loc = 0.0, scale = weight_scale, size = (num_filters,C,filter_size,filter_size)).astype(self.dtype)\n self.params['W2'] = np.random.normal(loc = 0.0, scale = weight_scale, size = (num_filters*H_p*W_p,hidden_dim)).astype(self.dtype)\n self.params['b2'] = np.zeros(hidden_dim, dtype=self.dtype)\n self.params['W3'] = np.random.normal(loc = 0.0, scale = weight_scale, size = (hidden_dim,num_classes)).astype(self.dtype)\n self.params['b3'] = np.zeros(num_classes, dtype=self.dtype)\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.items():\n self.params[k] = v.astype(dtype)", "def __init__(self, config_data, dims, layer_num, params):\n self.use_bias = params['use_bias']\n self.in_dims = params['in_dims']\n self.out_dims = params['out_dims']\n self.use_bias = params['use_bias']\n self.num_outputs = params['num_outputs']\n self.dims = dims\n self.layer_num = layer_num\n self.activation = config_data[\"activation\"]\n self.layer_type = config_data[\"type\"]\n self.name = config_data[\"name\"]\n self.params = []\n # following two parameters not used in dense layers\n # they will be set to one in dense layers\n self.kernel_size = params['kernel_size'] # only used in conv layers\n self.stride = params['stride'] # only used in conv layers", "def config_params0(data,parameter):\n model = []\n #Range of value of p\n acf = sm.graphics.tsa.acf(data.diff().dropna())\n for i in range(len(acf)):\n acf[i] = abs(acf[i]*10)\n if (ceil(acf[i])) <= 2:\n p = range(ceil(acf[i])-1,ceil(acf[i])+2)\n break\n\n #range of value of q\n pacf = sm.graphics.tsa.pacf(data.diff().dropna())\n for i in range(len(pacf)):\n pacf[i] = abs(pacf[i]*10)\n if (ceil(pacf[i])) <= 2:\n q = range(ceil(pacf[i])-1,ceil(pacf[i])+2)\n break\n\n\t# define config lists\n p_params = p\n d_params = parameter['d']\n q_params = q\n m_params = parameter['m']\n #P_params = p\n #D_params = [0, 1]\n #Q_params = q\n \n pdq_m = list(itertools.product(p_params, d_params, q_params,m_params)) #Generate all different combinations of p, q and q triplets\n params = [[(x[0], x[1], x[2]),(x[0], x[1], x[2], x[3])] for x in pdq_m]\n return params", "def default_grid_feature_hp_kernel_config(defn):\n defn = _validate_definition(defn)\n config = {}\n\n grid = enumerate(zip(defn.models(), defn.hyperpriors()))\n\n for fi, (model, priors) in grid:\n partials = copy.deepcopy(model.default_partial_hypergrid())\n if not partials:\n continue\n\n evals = []\n for update_descs, fn in priors.iteritems():\n if not hasattr(update_descs, '__iter__'):\n update_descs = [update_descs]\n keyidxs = []\n for update_desc in update_descs:\n key, idx = _parse_descriptor(update_desc, default=None)\n keyidxs.append((key, idx))\n\n def func(raw, keyidxs):\n s = 0.\n for key, idx in keyidxs:\n if idx is None:\n s += raw[key]\n else:\n s += raw[key][idx]\n return s\n evals.append(lambda raw, keyidxs=keyidxs: func(raw, keyidxs))\n\n def jointprior(raw, evals):\n return np.array([f(raw) for f in evals]).sum()\n\n config[fi] = {\n 'hpdf': lambda raw, evals=evals: jointprior(raw, evals),\n 'hgrid': partials,\n }\n\n if not config:\n return []\n else:\n return [('grid_feature_hp', config)]", "def __init__(self, input_dim=(1, 28, 28), num_classes=10):\n self.params = {}\n\n #######################################################################\n # TODO: Initialize weights and biases for the convolutional neural #\n # network. Weights should be initialized from a Gaussian distribution;#\n # biases should be initialized to zero. All weights and biases should #\n # be stored in the dictionary self.params. #\n #######################################################################\n\n filter_size = 5\n weight_scale = 1e-2\n num_filters = 6\n hidden_dim = 784\n\n #****** THIS WAS TO TEST OUT FASTER NETWORKS *******\n\n self.params['W1'] = np.random.normal(scale=weight_scale, size=(num_filters, input_dim[0], filter_size, filter_size))\n # self.params['W2'] = np.random.normal(scale=weight_scale, size=(num_filters, 6, filter_size, filter_size))\n self.params['W3'] = np.random.normal(scale=weight_scale, size=(864, num_classes))\n\n # self.params['W3'] = np.random.normal(scale=weight_scale, size=(hidden_dim, num_classes))\n # self.params['W4'] = np.random.normal(scale=weight_scale, size=(hidden_dim, num_classes))\n\n self.params['b1'] = np.zeros(num_filters)\n # self.params['b2'] = np.zeros(num_filters)\n self.params['b3'] = np.zeros(num_classes)\n\n # self.params['b3'] = np.zeros(num_classes)\n # self.params['b4'] = np.zeros(num_classes)", "def build_configs():", "def __init__(self, input_size, neurons):\n super().__init__()\n self.input_size = input_size\n self.neurons = neurons\n self.params[\"w\"] = np.random.randn(input_size, neurons)\n self.params[\"b\"] = np.random.randn(1, neurons)\n self.grads = {}", "def build(\n self, input_shape\n ):\n\n if isinstance(input_shape, tuple):\n expert_shapes, routing_input_shape = input_shape\n else:\n expert_shapes, routing_input_shape = input_shape, None\n num_experts = len(expert_shapes)\n # num_binary is the number of binary vars required to encode the\n # num_experts choices.\n self._num_binary = math.ceil(math.log2(num_experts))\n # Boolean to check if num_experts is a power of 2.\n self._power_of_2 = (num_experts == 2**self._num_binary)\n if routing_input_shape is None:\n # z_logits is a trainable 3D tensor used for selecting the experts.\n # Axis 0: Number of non-zero experts to select.\n # Axis 1: Dummy axis of length 1 used for broadcasting.\n # Axis 2: Each num_binary-dimensional row corresponds to a \"single-expert\"\n # selector.\n self._z_logits = self.add_weight(\n name=\"z_logits\",\n shape=(self._num_nonzeros, 1, self._num_binary),\n initializer=self._z_initializer,\n trainable=True)\n # w_logits is a trainable tensor used to assign weights to the\n # single-expert selectors. Each element of w_logits is a logit.\n self._w_logits = self.add_weight(\n name=\"w_logits\",\n shape=(self._num_nonzeros, 1),\n initializer=self._w_initializer,\n trainable=True)\n else:\n self._z_logits = tf.keras.layers.Dense(\n self._num_nonzeros * self._num_binary,\n kernel_initializer=self._z_initializer,\n bias_initializer=self._z_initializer)\n self._w_logits = tf.keras.layers.Dense(\n self._num_nonzeros,\n kernel_initializer=self._w_initializer,\n bias_initializer=self._w_initializer)\n # binary_matrix is a (num_experts, num_binary)-matrix used for binary\n # encoding. The i-th row contains a num_binary-digit binary encoding of the\n # integer i.\n binary_matrix = np.array([\n list(np.binary_repr(val, width=self._num_binary))\n for val in range(num_experts)\n ]).astype(bool)\n # A constant tensor = binary_matrix, with an additional dimension for\n # broadcasting.\n self._binary_codes = tf.expand_dims(\n tf.constant(binary_matrix, dtype=bool), axis=0)\n self.built = True", "def Params(cls):\n p = hyperparams.InstantiableParams(cls)\n\n p.Define('task_dict', None, 'dataset_name -> task params')\n p.Define('task_name', None, 'High level task name')\n p.Define('logdir', None, 'Log directory')\n p.Define('train_program', None, 'Train program params')\n p.Define('train_executions_per_eval', 1, '')\n p.Define('dataset_names', [], 'List of all dataset names.')\n p.Define('num_splits_per_client', None, '')\n\n p.Define('ml_perf', hyperparams.Params(), 'MlPerf configuration.')\n\n mlp = p.ml_perf\n mlp.Define('benchmark_name', None, 'Benchmark name for compliance log.')\n mlp.Define('decoder_metric_name', None,\n 'Name of the decoder metric to report for compliance log.')\n mlp.Define('decoder_metric_success_threshold', None,\n 'Benchmark run must exceed this value to succeed.')\n mlp.Define('max_steps_to_train', None,\n 'Maximum number of steps to reach target accuracy')\n mlp.Define('steps_per_epoch', None, 'Number of training steps per epoch.')\n mlp.Define('global_batch_size', None, 'Global batch size.')\n mlp.Define('max_sequence_length', None, 'Maximum sequence length.')\n mlp.Define('optimizer_name', None, 'Optimizer used.')\n mlp.Define('base_learning_rate', None, 'Base learning rate.')\n mlp.Define('warmup_steps', None, 'Number of warm-up steps.')\n\n return p", "def Params(cls):\n p = hyperparams.InstantiableParams(cls)\n p.Define('task_dict', None, 'dataset_name -> task params')\n p.Define('task_name', None, 'High level task name')\n p.Define('logdir', None, 'Log directory')\n p.Define('train_program', None, 'Train program params')\n p.Define('train_executions_per_eval', 1, '')\n p.Define('eval_programs', [], 'List of eval program params.')\n p.Define('num_splits_per_client', None, '')\n p.Define('dataset_names', [], 'List of all dataset names.')\n p.Define('emails', [], 'List of emails to send metrics.')\n p.Define('summary_exporter', None, 'The summary exporter Params.')\n p.Define('async_postprocess', True,\n 'whether to CPU postprocess asynchronously with TPU train')\n p.Define(\n 'checkpoint_to_load', None,\n 'If set, the program will initially load from this checkpoint, '\n 'ignoring train_dir. Typically used for oneoff decode.')\n\n # TODO(blee): Clean these up.\n p.Define('ml_perf', hyperparams.Params(), 'MlPerf configuration.')\n mlp = p.ml_perf\n mlp.Define('submission_metadata', None,\n 'A dictionary of static submission metadata')\n mlp.Define('benchmark_name', None, 'Benchmark name for compliance log.')\n mlp.Define('steps_per_epoch', None, 'Number of training steps per epoch.')\n mlp.Define('decoder_metric_name', None,\n 'Name of the decoder metric to report for compliance log.')\n mlp.Define('decoder_metric_success_threshold', None,\n 'Benchmark run must exceed this value to succeed.')\n mlp.Define('max_steps_to_train', None,\n 'Maximum number of steps to reach target accuracy')\n return p", "def run(self):\n if self.pp['net']:\n space = {\n # Qlearnnet\n 'net_lr': hp.loguniform('net_lr', np.log(5e-7), np.log(1e-4)),\n 'net_lr_decay': hp.loguniform('net_lr_decay', np.log(0.90), np.log(0.99)),\n # Singh\n # 'net_lr': hp.loguniform('net_lr', np.log(1e-7), np.log(5e-4)),\n 'beta': hp.uniform('beta', 16, 30),\n # Double\n 'net_copy_iter': hp.loguniform('net_copy_iter', np.log(5), np.log(150)),\n 'net_creep_tau': hp.loguniform('net_creep_tau', np.log(0.01),\n np.log(0.7)),\n # Exp. replay\n 'batch_size': scope.int(hp.uniform('batch_size', 8, 16)),\n 'buffer_size': scope.int(hp.uniform('buffer_size', 2000, 10000)),\n # N-step\n 'n_step': scope.int(hp.uniform('n_step', 3, 40)),\n # Policy\n 'vf_coeff': hp.uniform('vf_coeff', 0.005, 0.5),\n 'entropy_coeff': hp.uniform('entropy_coeff', 1.0, 100.0)\n }\n else:\n space = {\n 'beta': hp.uniform('beta', 7, 23),\n 'alpha': hp.uniform('alpha', 0.0001, 0.4),\n 'alpha_decay': hp.uniform('alpha_decay', 0.9999, 0.9999999),\n 'epsilon': hp.loguniform('epsilon', np.log(0.2), np.log(0.8)),\n 'epsilon_decay': hp.uniform('epsilon_decay', 0.9995, 0.9999999),\n 'gamma': hp.uniform('gamma', 0.7, 0.90),\n 'lambda': hp.uniform('lambda', 0.0, 1.0)\n }\n # Only optimize parameters specified in args\n space = {param: space[param] for param in self.pp['hopt']}\n if self.pp['hopt_fname'].startswith('mongo:'):\n self._hopt_mongo(space)\n else:\n self._hopt_pickle(space)", "def build(self):\n self.build_inputs()\n self.build_image_embeddings()\n self.build_seq_embeddings()\n self.build_encoder()\n self.build_prediction_model()\n self.setup_encoder_initializer()\n self.setup_global_step()\n self.list_trainable_variables()", "def set_params(self, config):\n params = {'n_bins', 'edges', 'classes', 'chi', 'n_params'}\n self.__dict__.update((param, np.array(value)) for param, value in config.items() if param in params)", "def __init__(self, \n input_dim=(3, 32, 32), \n num_filters = (32, 64), filter_sizes = (7, 7), conv_param = {\"stride\": 1, \"pad\": 3},\n hidden_dim= 100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32\n ):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n self.conv_param = conv_param\n self.filter_sizes = filter_sizes\n self.num_layers = 4\n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n ############################################################################\n \n C, H, W = input_dim\n filter_size1, filter_size2 = filter_sizes\n num_filters1, num_filters2 = num_filters\n\n # conv layer 1: (N, C, H, W) -> (N, num_filters1, H, W)\n self.params['W1'] = np.random.normal(0, weight_scale, [num_filters1, C, filter_size1, filter_size1]) # square filter\n self.params['b1'] = np.zeros((num_filters1, ))\n self.params[\"sbnGamma1\"] = np.ones((num_filters1, )) # scale parameter one for each color channel during spatial batch norm\n self.params[\"sbnBeta1\"] = np.zeros((num_filters1, )) # shift parameter one for each color channel during spatial batch norm\n\n # conv layer 2: (N, num_filters1, H, W) -> (N, num_filters2, H, W)\n self.params['W2'] = np.random.normal(0, weight_scale, [num_filters2, num_filters1, filter_size2, filter_size2]) # square filter\n self.params['b2'] = np.zeros((num_filters2, ))\n self.params[\"sbnGamma2\"] = np.ones((num_filters2, ))\n self.params[\"sbnBeta2\"] = np.zeros((num_filters2, ))\n\n # (2, 2, 2) maxpool: (N, num_filters2, H, W) -> (N, num_filters2, H/2. W/2)\n # maxpool layer contributes nothing to self.params that need to be updated.\n self.maxpool_params = {\"pool_height\": 2, \"pool_width\": 2, \"stride\": 2}\n\n # affine layer 3: (N, num_filters2, H/2. W/2) -> (N, hidden_dim)\n self.params['W3'] = np.random.normal(0, weight_scale, [num_filters2 * (H / 2) * (W / 2), hidden_dim])\n self.params['b3'] = np.zeros((hidden_dim, ))\n self.params[\"bnGamma3\"] = np.ones((hidden_dim, ))\n self.params[\"bnBeta3\"] = np.zeros((hidden_dim, ))\n\n # output affine - sfmx layer 4: (N, hidden_dim) -> (N, num_classes)\n self.params['W4'] = np.random.normal(0, weight_scale, [hidden_dim, num_classes])\n self.params['b4'] = np.zeros((num_classes, ))\n\n self.bn_params = [{\"mode\": \"train\"} for _ in range(self.num_layers)]\n\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def _read_config(config_path):\n hyperparameters = {}\n\n config_df = pd.read_csv(config_path)\n for _, row in config_df.iterrows():\n # Randomly initialize a hyperparameter using the search space from the config file\n hyperparameter_name = str(row['name'])\n min_value = float(row['min'])\n max_value = float(row['max'])\n search_scale = str(row['scale'])\n\n if search_scale == 'log':\n # Sample randomly along a logarithm search scale\n min_exp = math.log(min_value, 10)\n max_exp = math.log(max_value, 10)\n random_exp = min_exp + random.random() * (max_exp - min_exp)\n hyperparameter_value = 10 ** random_exp\n elif search_scale == 'linear':\n # Sample randomly along a linear search scale\n hyperparameter_value = min_value + random.random() * (max_value - min_value)\n else:\n raise ValueError('Expected \"log\" or \"linear\" search scale, got \"{}\"'.format(search_scale))\n\n hyperparameters[hyperparameter_name] = hyperparameter_value\n\n return hyperparameters", "def tuned_for_ec():\n # TODO(theosanderson): update these to true SOTA values\n hparams = contrib_training.HParams()\n hparams.add_hparam('gradient_clipping_decay', 0.9999)\n hparams.add_hparam('batch_style', 'bucket')\n hparams.add_hparam('batch_size', 34)\n hparams.add_hparam('dilation_rate', 5)\n hparams.add_hparam('filters', 411)\n hparams.add_hparam('first_dilated_layer', 1) # This is 0-indexed\n hparams.add_hparam('kernel_size', 7)\n hparams.add_hparam('num_layers', 5)\n hparams.add_hparam('pooling', 'mean')\n hparams.add_hparam('resnet_bottleneck_factor', 0.88152)\n hparams.add_hparam('lr_decay_rate', 0.9977)\n hparams.add_hparam('learning_rate', 0.00028748)\n hparams.add_hparam('decision_threshold', 0.3746)\n hparams.add_hparam('denominator_power', 0.88)\n\n hparams.add_hparam('train_steps', 650000)\n return hparams", "def __init__(self, model, data, batch_size=50, num_epochs=2, optim_type=\"adam\", optim_config={'learning_rate': 1e-2,}, lr_decay=1.0, num_train_samples=100, num_val_samples=None, verbose=True):\n self.model = model\n \n self.X_train = data[\"X_train\"]\n self.y_train = data[\"y_train\"]\n self.X_val = data[\"X_val\"]\n self.y_val = data[\"y_val\"]\n\n # Setting up variables for the hyperparameters\n \n self.optim_type = optim_type\n self.optim_config = optim_config # dict containing hyperparameters related to parameter update\n self.lr_decay = lr_decay # learning rate decay rate\n self.batch_size = batch_size\n self.num_epochs = num_epochs\n self.num_train_samples = num_train_samples\n self.num_val_samples = num_val_samples\n\n self.print_every = 20\n self.verbose = verbose\n \n # Setting up some extra variables for faster convergence / book-keeping\n \n self.epoch = 0 # to keep track of number of epochs done\n self.best_val_acc = 0 # to keep track of the best val accuracy across all epochs\n self.best_params = {} # to keep track of best model across all epochs\n self.latest_loss = 0 # to keep track of loss in latest iteration\n\n # Making a copy of the optim_config for each parameter\n # for using in other functions of the solver class\n # optim_cofig contains first and second moment of gradients, if applicable, wrt 1 param and hence each parameter has its own optim_config dict\n \n self.optim_configs = {} # dictionary containing config dicts of all params\n for p in self.model.params:\n d = {k: v for k, v in self.optim_config.items()} # copying the input config dict to config dicts of all params\n self.optim_configs[p] = d", "def __init__(self, hyperparameters, total_dim, num_is):\n self._dim = total_dim # dimension of IS \\times search space\n self._num_is = num_is # Number of information sources, then including 0th IS (truth), size of hyper should be dim * (num_is+1).\n # Note: it's not (dim+1)*(num_is+1) because dimension of search space is (dim-1), plus the multiplication factor param is dim\n self.set_hyperparameters(hyperparameters)", "def __init__(self, size, parameters):\n\n self.weights = self.init_weights(size)\n self.alpha = parameters['alpha']\n self.epsilon = parameters['epsilon']\n self.gamma = parameters['gamma']\n self.value = 0.0 #np.random.random()", "def parameter_initialization(self):\n dictsize = settings.PARS.get('numBases')\n numClass = self.train_labels.shape[0] # number of objects\n Dinit = np.empty((self.train_feats.shape[0], 0)) # for C-Ksvd and D-Ksvd\n dictLabel = np.empty((numClass, 0), dtype=np.int)\n numPerClass = dictsize//numClass\n param1 = {\n 'mode': 2,\n 'K': settings.PARS.get('numBases'), # size of the dictionary\n 'lambda1': settings.PARS.get('lambda_'),\n 'lambda2': 0,\n 'iter': settings.PARS.get('iterationini')\n }\n param2 = {\n 'lambda1': settings.PARS.get('lambda_'),\n 'lambda2': 0,\n 'mode': 2\n }\n\n for classid in range(numClass):\n col_ids = np.array(np.nonzero(self.train_labels[classid, :] == 1)).ravel()\n # ensure no zero data elements are chosen\n data_ids = np.array(np.nonzero(np.sum(self.train_feats[:, col_ids]**2, axis=0) > 1e-6)).ravel()\n\n # Raising an error if any zero lement is found\n if col_ids.shape[0] != data_ids.shape[0]:\n raise DatasetZeroElementFound\n\n # Initilization for LC-KSVD (perform KSVD in each class)\n Dpart = self.train_feats[:, col_ids[np.random.choice(data_ids, numPerClass, replace=False)]]\n param1['D'] = Dpart # initial dictionary\n Dpart = trainDL(self.train_feats[:, col_ids[data_ids]], **param1)\n Dinit = np.c_[Dinit, Dpart]\n labelvector = np.zeros((numClass, 1), dtype=np.int)\n labelvector[classid] = 1\n dictLabel = np.c_[dictLabel, np.tile(labelvector, (1, numPerClass))]\n\n param1['D'] = np.asfortranarray(Dinit) # initial dictionary\n # RuntimeError: matrix arg 10 must be a 2d double Fortran Array\n self.train_feats = self.train_feats if np.isfortran(self.train_feats) else np.asfortranarray(self.train_feats)\n Dinit = trainDL(self.train_feats, **param1)\n Xinit = lasso(self.train_feats, Dinit, **param2)\n\n # learning linear classifier parameters\n tmp = np.linalg.inv(Xinit@Xinit.T+np.eye(*(Xinit@Xinit.T).shape))@Xinit\n Winit = tmp@self.train_labels.T\n Winit = Winit.T\n\n Q = np.zeros((dictsize, self.train_feats.shape[1])) # energy matrix\n\n for frameid in range(self.train_feats.shape[1]):\n label_training = self.train_labels[:, frameid]\n maxid1 = label_training.argmax(0)\n\n for itemid in range(Dinit.shape[1]):\n label_item = dictLabel[:, itemid]\n maxid2 = label_item.argmax(0)\n\n if maxid1 == maxid2:\n Q[itemid, frameid] = 1\n\n Tinit = tmp@Q.T\n Tinit = Tinit.T\n\n return Dinit, Winit, Tinit, Q", "def __init__(self, params):\n super().__init__(params)\n p = self.params\n assert p.input_dim, f'input_dim is {p.input_dim}'\n assert p.hidden_dim, f'hidden_dim is {p.hidden_dim}'\n assert p.num_heads > 0, f'num_heads is {p.num_heads}'\n # if proj_tpl does not have dim_per_head set, set it\n if p.proj_tpl.dim_per_head == 0:\n p.proj_tpl.dim_per_head = self.dim_per_head\n\n if p.device_mesh is not None:\n assert p.weight_split_dims_mapping is not None\n assert p.activation_split_dims_mapping is not None\n\n if isinstance(p.weight_split_dims_mapping, dict):\n qkv_weight_split_dims_mapping = p.weight_split_dims_mapping['qkv']\n post_weight_split_dims_mapping = p.weight_split_dims_mapping['post']\n else:\n qkv_weight_split_dims_mapping = p.weight_split_dims_mapping\n post_weight_split_dims_mapping = p.weight_split_dims_mapping\n\n def ProjectInput(input_dim):\n return p.proj_tpl.Copy().Set(\n input_dim=input_dim,\n num_heads=p.num_heads,\n use_bias=p.use_bias,\n device_mesh=p.device_mesh,\n weight_split_dims_mapping=qkv_weight_split_dims_mapping,\n make_output_proj_no_op=False)\n\n def ProjectInputOneStep(input_dim):\n return p.proj_tpl.Copy().Set(\n input_dim=input_dim,\n num_heads=p.num_heads,\n dim_per_head=self.dim_per_head * 3,\n use_bias=p.use_bias,\n device_mesh=p.device_mesh,\n weight_split_dims_mapping=qkv_weight_split_dims_mapping,\n make_output_proj_no_op=False,\n )\n\n if isinstance(p.input_dim, dict):\n key_input_dim = p.input_dim['key']\n value_input_dim = p.input_dim['value']\n query_input_dim = p.input_dim['query']\n assert key_input_dim, f'key_input_dim is {key_input_dim}'\n assert query_input_dim, f'query_input_dim is {query_input_dim}'\n else:\n key_input_dim = p.input_dim\n value_input_dim = p.input_dim\n query_input_dim = p.input_dim\n\n if p.enable_value_proj and p.enable_qkv_proj_in_onestep:\n self.CreateChild('qkv', ProjectInputOneStep(key_input_dim))\n else:\n self.CreateChild('key', ProjectInput(key_input_dim))\n self.CreateChild('query', ProjectInput(query_input_dim))\n if p.enable_value_proj:\n assert value_input_dim, f'value_input_dim is {value_input_dim}'\n self.CreateChild('value', ProjectInput(value_input_dim))\n if p.enable_query_scale and p.enable_per_dim_scale:\n self.CreateChild(\n 'per_dim_scale',\n PerDimScaleLayer.Params().Set(dim=p.proj_tpl.dim_per_head))\n self.CreateChild('atten_dropout',\n p.dropout_tpl.Set(keep_prob=1.0 - p.atten_dropout_prob))\n # Setting is_output_projection=True to set the projection direction\n # from hidden dim to input dim. Output projection follows query_input_dim.\n self.CreateChild(\n 'post',\n p.proj_tpl.Copy().Set(\n input_dim=p.output_dim or query_input_dim,\n num_heads=p.num_heads,\n is_output_projection=True,\n use_bias=p.use_bias,\n device_mesh=p.device_mesh,\n weight_split_dims_mapping=post_weight_split_dims_mapping))\n\n if p.rope_tpl:\n assert issubclass(p.rope_tpl.cls, layers.RotaryPositionalEmbeddingLayer)\n rope_p = p.rope_tpl.Copy()\n if rope_p.embedding_dim == 0:\n rope_p.embedding_dim = self.dim_per_head\n self.CreateChild('rope', rope_p)\n\n if p.attn_add_memory:\n assert p.memory_tpl is not None\n self.CreateChild(\n 'lsh_mem',\n p.memory_tpl.Copy().Set(\n input_dim=self.dim_per_head,\n output_dim=self.dim_per_head,\n name='attn_lsh_mem'))\n if p.use_scale_invariant_atten:\n assert not (p.enable_scaling_code_motion or p.atten_extra_logit)", "def __init__(self, parameter_dictionary):\n super().__init__(parameter_dictionary)\n\n self.model_string = \"gauss\"\n model_dictionary = self._get_model_dict(__class__.default_parameters)\n\n # wake expansion parameters\n self.ka = model_dictionary[\"ka\"]\n self.kb = model_dictionary[\"kb\"]\n\n # near wake / far wake boundary parameters\n self.alpha = model_dictionary[\"alpha\"]\n self.beta = model_dictionary[\"beta\"]\n\n # GCH Parameters\n self.calculate_VW_velocities = model_dictionary[\"calculate_VW_velocities\"]\n self.use_yaw_added_recovery = model_dictionary[\"use_yaw_added_recovery\"]\n self.eps_gain = model_dictionary[\"eps_gain\"]", "def get_config(self):\n config = {'epsilon':self.eps}\n base_config = super(LogTransform, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))", "def __init__(self, hidden_dims, input_dim=3*32*32, num_classes=10,\n reg=0.0, weight_scale=1e-2, dtype=np.float32):\n self.reg = reg\n self.num_layers = 1 + len(hidden_dims)\n self.dtype = dtype\n self.params = {}\n\n ############################################################################\n # Initialize the network parameters with different weights and biases for #\n # network layers #\n ############################################################################\n \n key = ['W' + str(1), 'b' + str(1)]\n self.params[key[0]] = weight_scale * np.random.randn(input_dim, hidden_dims[0])\n self.params[key[1]] = np.zeros(hidden_dims[0])\n \n for i in range(1, len(hidden_dims)):\n key = ['W' + str(i+1), 'b' + str(i+1)]\n \n self.params[key[0]] = weight_scale * np.random.randn(hidden_dims[i-1], hidden_dims[i])\n self.params[key[1]] = np.zeros(hidden_dims[i])\n\n key = ['W' + str(self.num_layers), 'b' + str(self.num_layers)]\n self.params[key[0]] = weight_scale * np.random.randn(hidden_dims[len(hidden_dims)-1], num_classes)\n self.params[key[1]] = np.zeros(num_classes)\n\n\n # Cast all parameters to the correct datatype\n for k, v in self.params.items():\n self.params[k] = v.astype(dtype)", "def config(self) -> dict:\n if self.__class__.__name__ == 'MyFunctionTransformer':\n pass\n else:\n check_is_fitted(self)\n\n _config = {}\n for attr in self.config_paras:\n _config[attr] = getattr(self, attr)\n\n return {\"params\": self.get_params(),\n \"config\": _config}", "def build_by_params(task, params, input_size):\n if params is None or task is None or input_size is None:\n print(\"You need to pass a valid parameter grid, task or input size\")\n return -1\n \n #params type check\n for param in params:\n if type(params[param]) is list and param != \"layers\" and param != \"dropout\":\n if type(params[param][0]) is list:\n print(\"no list of list (problem in \", param, \")\")\n else:\n print(\"only layers or dropout as lists (problem in \", param, \")\")\n return -1\n elif type(params[param]) is tuple and param != 'lr_sched' and param != 'layers':\n print(\"only lr_sched or layer as tuple (problem in \", param, \")\")\n return -1\n\n nn = NeuralNetwork()\n for i in range(len(params['layers'])):\n if i == 0:\n nn.add_layer('dense', params['layers'][i], params['activation'], input_size)\n else:\n if i == len(params['layers']) - 1 and task == 'Regression':\n nn.add_layer('dense', params['layers'][i], 'linear')\n else:\n nn.add_layer('dense', params['layers'][i], params['activation'])\n\n nn.compile(task=task,\n dropout=params['dropout'],\n l2_lambda=params['l2_lambda'],\n optimizer=SGD(lr_init=params['lr'],\n momentum=params['momentum'],\n nesterov=params['nesterov'],\n lr_sched=StepDecayScheduler(drop=params['lr_sched'][0],epochs_drop=params['lr_sched'][1])))\n return nn", "def __init__(self, api_config):\n AbstractOptimizer.__init__(self, api_config)\n \n api_space = BoEI.api_manipulator(api_config) # used for GPyOpt initialization\n\n self.space_x = JointSpace(api_config) # used for warping & unwarping of new suggestions & observations\n\n self.hasCat, self.cat_vec = BoEI.is_cat(api_config)\n \n self.dim = len(self.space_x.get_bounds())\n\n self.objective = GPyOpt.core.task.SingleObjective(None)\n\n self.space = GPyOpt.Design_space(api_space)\n \n self.model = GPyOpt.models.GPModel(optimize_restarts=5,verbose=False)\n \n self.aquisition_optimizer = GPyOpt.optimization.AcquisitionOptimizer(self.space)\n \n \n self.aquisition = AcquisitionEI(self.model, self.space, optimizer=self.aquisition_optimizer, cost_withGradients=None)\n \n self.batch_size = None", "def __init__(self):\n # super(MultiEmbedding,self).__init__()\n HyperParameters.__init__(self)", "def build(self): \n self.computation_graph = tf.Graph()\n with self.computation_graph.as_default():\n\n self.factorization_layer = Factorization(self.args, self.vocab_size)\n self.regularizer_layer = Regularization(self.args)\n\n self.gamma = tf.placeholder(\"float\")\n self.loss = self.factorization_layer()\n\n self.batch = tf.Variable(0)\n self.step = tf.placeholder(\"float\")\n \n self.learning_rate_new = tf.train.polynomial_decay(self.args.initial_learning_rate,\n self.batch,\n self.true_step_size,\n self.args.minimal_learning_rate,\n self.args.annealing_factor)\n \n self.train_op = tf.train.AdamOptimizer(self.learning_rate_new).minimize(self.loss, global_step = self.batch)\n \n self.init = tf.global_variables_initializer()\n\n self.weights = overlap_generator(self.args, self.graph)", "def build(self): \n self.computation_graph = tf.Graph()\n with self.computation_graph.as_default():\n\n self.factorization_layer = Factorization(self.args, self.vocab_size)\n self.cluster_layer = Clustering(self.args)\n\n self.gamma = tf.placeholder(\"float\")\n self.loss = self.factorization_layer()+self.gamma*self.cluster_layer(self.factorization_layer)\n\n self.batch = tf.Variable(0)\n self.step = tf.placeholder(\"float\")\n \n self.learning_rate_new = tf.train.polynomial_decay(self.args.initial_learning_rate,\n self.batch,\n self.true_step_size,\n self.args.minimal_learning_rate,\n self.args.annealing_factor)\n \n self.train_op = tf.train.AdamOptimizer(self.learning_rate_new).minimize(self.loss, global_step = self.batch)\n \n self.init = tf.global_variables_initializer()\n\n self.weights = overlap_generator(self.args, self.graph)", "def initializeParameters(self):\r\n\t\tself.input_raster.enabled = True\r\n\t\tself.approach.enabled = True\r\n\t\tself.predefined_pattern.enabled = False\r\n\t\tself.predefined_pattern.value = 'Mexican Hat wavelet'\r\n\t\tself.pattern_workspace.enabled = False\r\n\t\tself.point_matrix_size.enabled = False\r\n\t\tself.point_matrix_size.value = 3\r\n\t\tself.point_vectors.enabled = False\r\n\t\tself.mapping_field.enabled = False\r\n\t\tself.move_to_max.enabled = False\r\n\t\tself.move_to_max_distance.enabled = False\r\n\t\tself.move_to_max_distance.value = 3\r\n\t\tself.mh_iteration.enabled = False\r\n\t\tself.mh_dil_val.enabled = False\r\n\t\tself.mh_dil_val.value = 1\r\n\t\tself.mh_dil_start.value = 0.01\r\n\t\tself.mh_dil_stop.value = 1\r\n\t\tself.mh_dil_step.value = 0.1\r\n\t\tself.mh_dil_start.enabled = False\r\n\t\tself.mh_dil_stop.enabled = False\r\n\t\tself.mh_dil_step.enabled = False\r\n\t\tself.transform.enabled = False\r\n\t\tself.size_of_the_cell.enabled = False\r\n\t\tself.size_of_the_cell.value = 1\r\n\t\tself.output_sim_matrix.enabled = False\r\n\t\tself.output_table.enabled = False\r\n\t\tself.output_raster_workspace.enabled = False", "def build(self): \n self.computation_graph = tf.Graph()\n with self.computation_graph.as_default():\n\n self.factorization_layer = Factorization(self.args, self.vocab_size)\n self.cluster_layer = Clustering(self.args)\n self.regularizer_layer = Regularization(self.args)\n\n self.gamma = tf.placeholder(\"float\")\n self.loss = self.factorization_layer()+self.gamma*self.cluster_layer(self.factorization_layer)+self.regularizer_layer(self.factorization_layer)\n\n self.batch = tf.Variable(0)\n self.step = tf.placeholder(\"float\")\n \n self.learning_rate_new = tf.train.polynomial_decay(self.args.initial_learning_rate,\n self.batch,\n self.true_step_size,\n self.args.minimal_learning_rate,\n self.args.annealing_factor)\n \n self.train_op = tf.train.AdamOptimizer(self.learning_rate_new).minimize(self.loss, global_step = self.batch)\n \n self.init = tf.global_variables_initializer()\n\n self.weights = overlap_generator(self.args, self.graph)", "def __init__(self, config_data, in_dims, layer_num):\n self.layer_num = layer_num\n self.in_dims = in_dims\n self.out_dims = in_dims[\"width\"] * in_dims[\"height\"] * in_dims[\"channels\"]\n self.layer_type = config_data[\"type\"]\n self.name = config_data[\"name\"]\n self.params = []", "def _initialize_parameters(self, layer_dimensions, layer_activations, cost_function):\n self.layer_dims = layer_dimensions\n self.layer_num = len(self.layer_dims)\n self.layer_activations = layer_activations\n self.parameters = {}\n self.cost_function = cost_function\n\n assert(len(self.layer_activations) == len(self.layer_dims),\n 'Number of layers in layer_dimensions: {} and layer_activations: {} are not matching'.format(self.layer_num, len(self.layer_activations)))\n\n for l in range(1, self.layer_num):\n self.parameters['W' + str(l)] = np.random.randn(self.layer_dims[l], self.layer_dims[l-1])\n self.parameters['b' + str(l)] = np.zeros(self.layer_dims[l], 1)", "def build(self): \n self.computation_graph = tf.Graph()\n with self.computation_graph.as_default():\n\n self.factorization_layer = Factorization(self.args, self.vocab_size)\n self.regularizer_layer = Regularization(self.args)\n\n self.gamma = tf.placeholder(\"float\")\n self.loss = self.factorization_layer()+self.regularizer_layer(self.factorization_layer)\n\n self.batch = tf.Variable(0)\n self.step = tf.placeholder(\"float\")\n \n self.learning_rate_new = tf.train.polynomial_decay(self.args.initial_learning_rate,\n self.batch,\n self.true_step_size,\n self.args.minimal_learning_rate,\n self.args.annealing_factor)\n \n self.train_op = tf.train.AdamOptimizer(self.learning_rate_new).minimize(self.loss, global_step = self.batch)\n \n self.init = tf.global_variables_initializer()\n\n self.weights = overlap_generator(self.args, self.graph)", "def get_config(self):\n layer_config = {\n \"anchors\": self._anchors, \n \"classes\": self._classes,\n \"ignore_thresh\": self._ignore_thresh, \n \"truth_thresh\": self._truth_thresh, \n \"iou_thresh\": self._iou_thresh, \n \"loss_type\": self._loss_type, \n \"iou_normalizer\": self._iou_normalizer,\n \"cls_normalizer\": self._cls_normalizer, \n \"scale_x_y\": self._scale_x_y, \n }\n layer_config.update(super().get_config())\n return layer_config", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_nav', 'g_kvhh', 'g_kva', 'g_kvsi', \n 'g_cav', 'g_kca', 'g_nap', 'g_kir']\n gX_name: List[str] = list(itertools.compress(gX_name, list(self.channel_bool.values())[:9]))\n gX_log: np.ndarray = 4 * np.random.rand(len(gX_name)) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(len(gX_name))) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n gR_name: List[str] = ['g_ampar', 'g_nmdar', 'g_gabar']\n gR_name: List[str] = list(itertools.compress(gR_name, list(self.channel_bool.values())[9:12]))\n gR_log: np.ndarray = 4 * np.random.rand(len(gR_name)) - 3 # from -3 to 1\n gR: np.ndarray = (10 * np.ones(len(gR_name))) ** gR_log # 0.001 ~ 10\n gR_itr: Iterator = zip(gR_name, gR)\n\n param_dict.update(gX_itr)\n param_dict.update(gR_itr)\n\n if self.channel_bool['ca']:\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n param_dict.update(tCa_dict)\n\n return param_dict", "def initialize(self):\n params = {}\n for i in range(1, len(self.layer_dimensions)):\n params['b_' + str(i)] = np.ones((self.layer_dimensions[i], 1))\n if self.he_initialization:\n params['W_' + str(i)] = np.random.randn(self.layer_dimensions[i],\n self.layer_dimensions[i - 1]) * np.sqrt(\n 2 / self.layer_dimensions[i - 1])\n else:\n params['W_' + str(i)] = np.random.rand(self.layer_dimensions[i], self.layer_dimensions[i - 1]) - 0.5\n return params", "def __init__(self, prod_obj, log_level=logutil.logging.NOTSET, use_defaults=True, input_custom_pars_file=None,\n output_custom_pars_file=None):\n log.setLevel(log_level)\n if input_custom_pars_file and input_custom_pars_file and input_custom_pars_file == output_custom_pars_file:\n sys.exit(\"ERROR: Input and output parameter files must have unique names!\")\n self.label = \"hap_config\"\n self.description = \"A set of routines to generate appropriate set of configuration parameters\"\n self.instrument = prod_obj.instrument\n self.detector = prod_obj.detector\n self.inst_det = \"{}_{}\".format(prod_obj.instrument, prod_obj.detector).lower()\n self.use_defaults = use_defaults\n self.input_custom_pars_file = input_custom_pars_file\n self.output_custom_pars_file = output_custom_pars_file\n\n # The filters attribute is populated by _determine_conditions()\n self.filters = None\n\n self._determine_conditions(prod_obj)\n self.full_cfg_index, self.pars_dir = read_index(self.instrument,\n self.detector)\n\n # Instantiate the parameter set\n self.pars = {}\n\n # open input parameter file if specified by user\n if self.input_custom_pars_file:\n with open(self.input_custom_pars_file) as f_cfg:\n self.input_cfg_json_data = json.load(f_cfg)[prod_obj.product_basename]\n else:\n self.input_cfg_json_data = None\n\n # generate parameter sets for each pipeline step\n # step_name_list = [AstrodrizzlePars, CatalogGenerationPars, QualityControlPars]\n # step_title_list = ['astrodrizzle', 'catalog generation', 'quality control']\n for step_title, step_name in zip(step_title_list, step_name_list):\n cfg_index = self.full_cfg_index[step_title]\n self.pars[step_title] = step_name(cfg_index,\n self.conditions,\n self.pars_dir,\n step_title,\n self.use_defaults,\n self.input_cfg_json_data)\n\n # write out all parameters to file if specified by user\n if output_custom_pars_file:\n self.write_pars(prod_obj)", "def config1() :\n data_name = \"titanic\" ### in data/input/\n model_class = 'AutoML' ### ACTUAL Class name for model_sklearn.py\n n_sample = 1000\n\n def post_process_fun(y): ### After prediction is done\n return int(y)\n\n def pre_process_fun(y): ### Before the prediction is done\n return int(y)\n\n\n model_dict = {'model_pars': {\n ### LightGBM API model #######################################\n 'model_class': model_class\n ,'model_pars' : {\n 'total_time_limit' : 20,\n 'algorithms' : 'auto',\n 'results_path' : root_repo + f'/data/output/{data_name}/{os_get_function_name()}/automl_1',\n 'eval_metric' : 'auto'\n\n # mode='Explain',\n # ml_task='auto', model_time_limit=None, algorithms='auto', train_ensemble=True,\n # stack_models='auto', eval_metric='auto', validation_strategy='auto', explain_level='auto',\n # golden_features='auto', features_selection='auto', start_random_models='auto',\n # hill_climbing_steps='auto', top_models_to_improve='auto', verbose=1, random_state=1234)\n }\n\n , 'post_process_fun' : post_process_fun ### After prediction ##########################################\n , 'pre_process_pars' : {'y_norm_fun' : pre_process_fun , ### Before training ##########################\n\n\n ### Pipeline for data processing ##############################\n 'pipe_list': [\n #### coly target prorcessing\n {'uri': 'source/prepro.py::pd_coly', 'pars': {}, 'cols_family': 'coly', 'cols_out': 'coly', 'type': 'coly' },\n\n\n {'uri': 'source/prepro.py::pd_colnum_bin', 'pars': {}, 'cols_family': 'colnum', 'cols_out': 'colnum_bin', 'type': '' },\n {'uri': 'source/prepro.py::pd_colnum_binto_onehot', 'pars': {}, 'cols_family': 'colnum_bin', 'cols_out': 'colnum_onehot', 'type': '' },\n\n #### catcol INTO integer, colcat into OneHot\n {'uri': 'source/prepro.py::pd_colcat_bin', 'pars': {}, 'cols_family': 'colcat', 'cols_out': 'colcat_bin', 'type': '' },\n # {'uri': 'source/prepro.py::pd_colcat_to_onehot', 'pars': {}, 'cols_family': 'colcat_bin', 'cols_out': 'colcat_onehot', 'type': '' },\n\n\n ### Cross_feat = feat1 X feat2\n # {'uri': 'source/prepro.py::pd_colcross', 'pars': {}, 'cols_family': 'colcross', 'cols_out': 'colcross_pair', 'type': 'cross'},\n\n\n #### Example of Custom processor\n #{'uri': THIS_FILEPATH + '::pd_col_myfun', 'pars': {}, 'cols_family': 'colnum', 'cols_out': 'col_myfun', 'type': '' }, \n\n\n ],\n }\n },\n\n 'compute_pars': { 'metric_list': ['accuracy_score','average_precision_score']\n\n ,'mlflow_pars' : None # {} ### Not empty --> use mlflow\n },\n\n 'data_pars': { 'n_sample' : n_sample,\n\n 'download_pars' : None,\n\n\n 'cols_input_type' : cols_input_type_1,\n ### family of columns for MODEL #########################################################\n # \"colnum\", \"colnum_bin\", \"colnum_onehot\", \"colnum_binmap\", #### Colnum columns\n # \"colcat\", \"colcat_bin\", \"colcat_onehot\", \"colcat_bin_map\", #### colcat columns\n # 'colcross_single_onehot_select', \"colcross_pair_onehot\", 'colcross_pair', #### colcross columns 'coldate', 'coltext',\n 'cols_model_group': [ 'colnum_bin',\n 'colcat_bin',\n # 'coltext',\n # 'coldate',\n #'colcross_pair',\n \n ### example of custom\n # 'col_myfun'\n ]\n\n ### Filter data rows ##################################################################\n ,'filter_pars': { 'ymax' : 2 ,'ymin' : -1 }\n\n }\n }\n\n ##### Filling Global parameters ############################################################\n model_dict = global_pars_update(model_dict, data_name, config_name=os_get_function_name() )\n return model_dict", "def init(self):\n self.reparam_layers = []\n if self.model_type == \"GCN\":\n for i in range(self.num_layers):\n if self.reparam_all_layers is True:\n is_reparam = True\n elif isinstance(self.reparam_all_layers, tuple):\n reparam_all_layers = tuple([kk + self.num_layers if kk < 0 else kk for kk in self.reparam_all_layers])\n is_reparam = i in reparam_all_layers\n else:\n raise\n if is_reparam:\n self.reparam_layers.append(i)\n setattr(self, \"conv{}\".format(i + 1),\n GCNConv(self.num_features if i == 0 else self.latent_size,\n self.latent_size if i != self.num_layers - 1 else self.num_classes,\n cached=True,\n reparam_mode=self.reparam_mode if is_reparam else None,\n prior_mode=self.prior_mode if is_reparam else None,\n sample_size=self.sample_size,\n bias=True if self.with_relu else False,\n val_use_mean=self.val_use_mean,\n normalize=self.normalize,\n ))\n # self.conv1 = ChebConv(self.num_features, 16, K=2)\n # self.conv2 = ChebConv(16, self.num_features, K=2)\n\n elif self.model_type == \"GAT\":\n latent_size = int(self.latent_size / 2) # Under the default setting, latent_size = 8\n for i in range(self.num_layers):\n if i == 0:\n input_size = self.num_features\n else:\n if self.struct_dropout_mode[0] == 'DNsampling' or (self.struct_dropout_mode[0] == 'standard' and len(self.struct_dropout_mode) == 3):\n input_size = latent_size * 8 * 2\n else:\n input_size = latent_size * 8\n if self.reparam_all_layers is True:\n is_reparam = True\n elif isinstance(self.reparam_all_layers, tuple):\n reparam_all_layers = tuple([kk + self.num_layers if kk < 0 else kk for kk in self.reparam_all_layers])\n is_reparam = i in reparam_all_layers\n else:\n raise\n if is_reparam:\n self.reparam_layers.append(i)\n setattr(self, \"conv{}\".format(i + 1), GATConv(\n input_size,\n latent_size if i != self.num_layers - 1 else self.num_classes,\n heads=8 if i != self.num_layers - 1 else 1, concat=True,\n reparam_mode=self.reparam_mode if is_reparam else None,\n prior_mode=self.prior_mode if is_reparam else None,\n val_use_mean=self.val_use_mean,\n struct_dropout_mode=self.struct_dropout_mode,\n sample_size=self.sample_size,\n ))\n if self.struct_dropout_mode[0] == 'DNsampling' or (self.struct_dropout_mode[0] == 'standard' and len(self.struct_dropout_mode) == 3):\n setattr(self, \"conv{}_1\".format(i + 1), GATConv(\n input_size,\n latent_size if i != self.num_layers - 1 else self.num_classes,\n heads=8 if i != self.num_layers - 1 else 1, concat=True,\n reparam_mode=self.reparam_mode if is_reparam else None,\n prior_mode=self.prior_mode if is_reparam else None,\n val_use_mean=self.val_use_mean,\n struct_dropout_mode=self.struct_dropout_mode,\n sample_size=self.sample_size,\n ))\n # On the Pubmed dataset, use heads=8 in conv2.\n \n else:\n raise Exception(\"Model_type {} is not valid!\".format(self.model_type))\n\n self.reparam_layers = sorted(self.reparam_layers)\n \n if self.model_type == \"GCN\":\n if self.with_relu:\n reg_params = [getattr(self, \"conv{}\".format(i+1)).parameters() for i in range(self.num_layers - 1)]\n self.reg_params = itertools.chain(*reg_params)\n self.non_reg_params = getattr(self, \"conv{}\".format(self.num_layers)).parameters()\n else:\n self.reg_params = OrderedDict()\n self.non_reg_params = self.parameters()\n else:\n self.reg_params = self.parameters()\n self.non_reg_params = OrderedDict()\n self.to(self.device)", "def __init__(self, encut, magmom, ldaul, Uparam, Jparam, name=\"DFTCL_settings\"):\n\n cl_settings = {\"ISPIN\": 2, \"MAGMOM\": magmom, \"SAXIS\": None, \"LSORBIT\": None, \"LNONCOLLINEAR\": None}\n dftu_settings = {\"LDAU\": \".TRUE.\", \"LDAUU\": Uparam, \"LDATYPE\": 2, \"LDAUL\": ldaul, \"LDAUJ\": Jparam , \"LMAXMIMX\": 4}\n InputParameters.__init__(self, name=name, magnetic_settings=cl_settings, hubbard_settings=dftu_settings)\n self.update_electronic_settings(\"encut\", encut)", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_kvhh', 'g_cav', 'g_kca', 'g_nap']\n gX_log: np.ndarray = 4 * np.random.rand(5) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(5)) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n\n param_dict.update(gX_itr)\n param_dict.update(tCa_dict)\n return param_dict", "def __init__(self, in_features, out_features):\n \n ########################\n # PUT YOUR CODE HERE #\n #######################\n self.params = {'weight': np.random.normal(loc = 0, scale=0.0001, size=(out_features,in_features)),\\\n 'bias': np.zeros((1, out_features))}\n \n self.grads = {'weight': np.zeros((out_features,in_features)),\\\n 'bias': np.zeros((1, out_features))}\n ########################\n # END OF YOUR CODE #\n #######################", "def set_parameters(self):\n params = {}\n if self.modelname == 'SI':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after splot\n # Ts: Time from split to present, in 2*Na generation units\n names = ['N1', 'N2', 'Ts']\n values = [1, 1, 1]\n upper_bounds = [20, 20, 10]\n lower_bounds = [0.01, 0.01, 0]\n elif self.modelname == 'IM':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after split\n # m21: Migration from 1 to 2 (2*Na*mm21)\n # m12: Migration from 2 to 1 (2*Na*m12)\n # Ts: Time from split to present, in 2*Na generations\n names = ['N1', 'N2', 'm21', 'm12', 'Ts']\n values = [1, 1, 1, 1, 1]\n upper_bounds = [20, 20, 20, 20, 10]\n lower_bounds = [0.01, 0.01, 0, 0, 0]\n elif self.modelname == 'AM':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after split\n # m21: Migration from 1 to 2 (2*Na*mm21)\n # m12: Migration from 2 to 1 (2*Na*m12)\n # Tam: Time from end of anc migration to split, in 2*Na gens\n # Ts: Time from split to present, in 2*Na generations\n names = ['N1', 'N2', 'm21', 'm12', 'Tam', 'Ts']\n values = [1, 1, 1, 1, 0.1, 1]\n upper_bounds = [20, 20, 20, 20, 2, 10]\n lower_bounds = [0.01, 0.01, 0, 0, 0, 0]\n elif self.modelname == 'SC':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after split\n # m21: Migration from 1 to 2 (2*Na*mm21)\n # m12: Migration from 2 to 1 (2*Na*m12)\n # Ts: Time from split to secondary contact, in 2*Na generations\n # Tsc: Time from secondary contact to presesnt, in 2*Na gens\n names = ['N1', 'N2', 'm21', 'm12', 'Ts', 'Tsc']\n values = [1, 1, 1, 1, 1, 0.1]\n upper_bounds = [20, 20, 20, 20, 10, 2]\n lower_bounds = [0.01, 0.01, 0, 0, 0, 0]\n elif self.modelname == 'IM2M':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after split\n # m21: Migration from 1 to 2 (2*Na*mm21)\n # m12: Migration from 2 to 1 (2*Na*m12)\n # mi21: Migration from 1 to 2 in \"islands\" (2*Na*mi21)\n # mi12: Migration from 1 to 2 in \"islands\" (2*Na*mi12)\n # Ts: Time from split to present, in 2*Na generations\n # p: Porpotion of genome evoloving in \"islands\"\n names = ['N1', 'N2', 'm21', 'm12', 'mi21', 'mi12', 'Ts', 'p']\n values = [1, 1, 5, 5, 0.5, 0.5, 1, 0.5]\n upper_bounds = [20, 20, 30, 30, 5, 5, 10, 0.95]\n lower_bounds = [0.01, 0.01, 0, 0, 0, 0, 0, 0.05]\n elif self.modelname == 'AM2M':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after split\n # m21: Migration from 1 to 2 (2*Na*mm21)\n # m12: Migration from 2 to 1 (2*Na*m12)\n # mi21: Migration from 1 to 2 in \"islands\" (2*Na*mi21)\n # mi12: Migration from 1 to 2 in \"islands\" (2*Na*mi12)\n # Tam: Time from end of anc migration to split, in 2*Na gens\n # Ts: Time from split to present, in 2*Na generations\n # p: Porpotion of genome evoloving in \"islands\"\n names = ['N1', 'N2', 'm21', 'm12', 'mi21', 'mi12', 'Tam', 'Ts', 'p']\n values = [1, 1, 5, 5, 0.5, 0.5, 0.1, 1, 0.5]\n upper_bounds = [20, 20, 30, 30, 5, 5, 2, 10, 0.95]\n lower_bounds = [0.01, 0.01, 0, 0, 0, 0, 0, 0, 0.05]\n elif self.modelname == 'SC2M':\n # N1: Pop 1 size after split\n # N2: Pop 2 size after split\n # m21: Migration from 1 to 2 (2*Na*mm21)\n # m12: Migration from 2 to 1 (2*Na*m12)\n # mi21: Migration from 1 to 2 in \"islands\" (2*Na*mi21)\n # mi12: Migration from 1 to 2 in \"islands\" (2*Na*mi12)\n # Ts: Time from split to secondary contact, in 2*Na generations\n # Tsc: Time from secondary contact to presesnt, in 2*Na gens\n # p: Porpotion of genome evoloving in \"islands\"\n names = ['N1', 'N2', 'm21', 'm12', 'mi21', 'mi12', 'Ts', 'Tsc', 'p']\n values = [1, 1, 5, 5, 0.5, 0.5, 1, 0.1, 0.5]\n upper_bounds = [20, 20, 30, 30, 5, 5, 10, 2, 0.95]\n lower_bounds = [0.01, 0.01, 0, 0, 0, 0, 0, 0, 0.05]\n params['Names'] = names\n params['Values'] = values\n params['Upper'] = upper_bounds\n params['Lower'] = lower_bounds\n return params", "def Params(cls):\n p = hyperparams.InstantiableParams(cls)\n p.Define('task', None, 'Underlying task')\n p.Define('logdir', None, 'Log directory')\n p.Define('num_splits_per_client', None, '')\n p.Define('steps_per_loop', None, 'Number of steps to run.')\n p.Define('dataset_name', None,\n 'Dataset the program is operating on, eg: \"Test\"')\n p.Define('name', 'base_program', 'Program name.')\n p.Define('task_name', None,\n 'If multi-task, what the high-level task name is')\n p.Define('num_threads', 1, 'Number of threads in multiprocessing pool.')\n p.Define('spmd', False, 'Whether program is running under SPMD mode.')\n p.Define('write_train_input_stats', False,\n 'Whether to write input data stats during training.')\n p.Define('max_metrics', 256, 'Overrides TpuEvalMetrics.max_metrics')\n p.Define('ml_perf', None, 'MLPerf config')\n return p", "def create_hparams(hparams_string=None, verbose=False):\n\n hparams = tf.contrib.training.HParams(\n ################################\n # Experiment Parameters #\n ################################\n epochs=1000,\n iters_per_checkpoint=1000,\n iters_per_validation=1000,\n seed=1234,\n dynamic_loss_scaling=True,\n fp16_run=False,\n distributed_run=False,\n dist_backend=\"nccl\",\n dist_url=\"tcp://127.0.0.1:54321\",\n cudnn_enabled=True,\n cudnn_benchmark=False,\n #ignore_layers=[\"decoder.attention_layer.F.2.weight\", \"decoder.attention_layer.F.2.bias\",\"decoder.attention_layer.F.0.linear_layer.weight\",\"decoder.attention_layer.F.0.linear_layer.bias\"],\n ignore_layers=[\"encoder.lstm.weight_ih_l0\",\"encoder.lstm.weight_hh_l0\",\"encoder.lstm.bias_ih_l0\",\"encoder.lstm.bias_hh_l0\",\"encoder.lstm.weight_ih_l0_reverse\",\"encoder.lstm.weight_hh_l0_reverse\",\"encoder.lstm.bias_ih_l0_reverse\",\"encoder.lstm.bias_hh_l0_reverse\",\"decoder.attention_rnn.weight_ih\",\"decoder.attention_rnn.weight_hh\",\"decoder.attention_rnn.bias_ih\",\"decoder.attention_rnn.bias_hh\",\"decoder.attention_layer.query_layer.linear_layer.weight\",\"decoder.attention_layer.memory_layer.linear_layer.weight\",\"decoder.decoder_rnn.weight_ih\",\"decoder.linear_projection.linear_layer.weight\",\"decoder.gate_layer.linear_layer.weight\"],\n \n ################################\n # Data Parameters #\n ################################\n load_mel_from_disk=True,\n training_files='/media/cookie/Samsung 860 QVO/ClipperDatasetV2/filelists/mel_train_taca2_merged.txt',\n validation_files='/media/cookie/Samsung 860 QVO/ClipperDatasetV2/filelists/mel_validation_taca2_merged.txt',\n text_cleaners=['english_cleaners'],\n \n ################################\n # Audio Parameters #\n ################################\n max_wav_value=32768.0,\n sampling_rate=48000,\n filter_length=2400,\n hop_length=600,\n win_length=2400,\n n_mel_channels=160,\n mel_fmin=0.0,\n mel_fmax=16000.0,\n \n ################################\n # Model Parameters #\n ################################\n n_symbols=len(symbols),\n symbols_embedding_dim=512,\n \n # Gate\n gate_threshold=0.5,\n mask_gate_loss=False, # False = Vanilla Nvidia Tacotron2\n # masking the gate after the end of the clip will make the model never see the gate loss after the end of the clip. # TODO, explain this better # TODO, figure out why this is useful. # TODO, figure out why I added this\n # false would punish the model for trying to end the clip before it's ready, but barely punish the model for just forgetting to end the clip.\n # True will also help with badly trimmed audio.\n gate_positive_weight=10, # how much more valuable 1 positive frame is to 1 zero frame. 80 Frames per seconds, therefore values around 20 are fine.\n \n # Synthesis/Inference Related\n max_decoder_steps=3000,\n low_vram_inference=False, # doesn't save alignment and gate information, frees up some vram, especially for large input sequences.\n \n # Teacher-forcing Config\n p_teacher_forcing=1.00, # 1.00 baseline\n teacher_force_till=20, # int, number of starting frames with teacher_forcing at 100%, helps with clips that have challenging starting conditions i.e breathing before the text begins.\n val_p_teacher_forcing=0.80,\n val_teacher_force_till=20,\n \n # (Encoder) Encoder parameters\n encoder_speaker_embed_dim=256, # speaker_embedding before encoder\n encoder_concat_speaker_embed='inside', # concat before encoder convs, or just before the LSTM inside decode. Options 'before','inside'\n encoder_kernel_size=5,\n encoder_n_convolutions=3,\n encoder_embedding_dim=768, # = symbols_embedding_dim + encoder_speaker_embed_dim\n \n # (Decoder) Decoder parameters\n start_token = \"\",#\"☺\"\n stop_token = \"\",#\"␤\"\n hide_startstop_tokens=False, # remove first/last encoder output, *should* remove start and stop tokens from the decocer assuming the tokens are used.\n n_frames_per_step=1, # currently only 1 is supported\n context_frames=1, # TODO TODO TODO TODO TODO\n \n # (Decoder) Prenet\n prenet_dim=256, # 256 baseline\n prenet_layers=2, # 2 baseline\n prenet_batchnorm=False, # False baseline\n p_prenet_dropout=0.5, # 0.5 baseline\n \n # (Decoder) AttentionRNN\n attention_rnn_dim=1280, # 1024 baseline\n AttRNN_extra_decoder_input=True,# False baselinee\n AttRNN_hidden_dropout_type='zoneout',# options ('dropout','zoneout')\n p_AttRNN_hidden_dropout=0.10, # 0.1 baseline\n p_AttRNN_cell_dropout=0.00, # 0.0 baseline\n \n # (Decoder) AttentionRNN Speaker embedding\n n_speakers=512,\n speaker_embedding_dim=256, # speaker embedding size # 128 baseline\n \n # (Decoder) DecoderRNN\n decoder_rnn_dim=1024, # 1024 baseline\n extra_projection=False, # another linear between decoder_rnn and the linear projection layer (hopefully helps with high sampling rates and hopefully doesn't help decoder_rnn overfit)\n DecRNN_hidden_dropout_type='zoneout',# options ('dropout','zoneout')\n p_DecRNN_hidden_dropout=0.1, # 0.1 baseline\n p_DecRNN_cell_dropout=0.00, # 0.0 baseline\n \n # (Decoder) Attention parameters\n attention_type=0,\n # 0 -> Location-Based Attention (Vanilla Tacotron2)\n # 1 -> GMMAttention (Multiheaded Long-form Synthesis)\n attention_dim=128, # 128 Layer baseline\n \n # (Decoder) Attention Type 0 Parameters\n attention_location_n_filters=32, # 32 baseline\n attention_location_kernel_size=31, # 31 baseline\n \n # (Decoder) Attention Type 1 Parameters\n num_att_mixtures=1,# 5 baseline\n attention_layers=1,# 1 baseline\n delta_offset=0, # 0 baseline, values around 0.005 will push the model forwards. Since we're using the sigmoid function caution is suggested.\n delta_min_limit=0, # 0 baseline, values around 0.010 will force the model to move forward, in this example, the model cannot spend more than 100 steps on the same encoder output.\n lin_bias=False, # I need to figure out what that layer is called.\n initial_gain='relu', # initial weight distribution 'tanh','relu','sigmoid','linear'\n normalize_attention_input=True, # False baseline\n normalize_AttRNN_output=False, # True baseline\n \n # (Postnet) Mel-post processing network parameters\n postnet_embedding_dim=512,\n postnet_kernel_size=5,\n postnet_n_convolutions=5,\n \n # (GST) Reference encoder\n with_gst=True,\n ref_enc_filters=[32, 32, 64, 64, 128, 128],\n ref_enc_size=[3, 3],\n ref_enc_strides=[2, 2],\n ref_enc_pad=[1, 1],\n ref_enc_gru_size=128,\n \n # (GST) Multi-headed Attention Layer\n gstAtt_dim=128,\n num_heads=8,\n \n # (GST) Style Token Layer\n token_num=5, # acts as the information bottleneck.\n token_activation_func='tanh', # default 'softmax', options 'softmax','sigmoid','tanh','absolute'\n token_embedding_size=256, # token embedding size\n \n # (GST) TorchMoji\n torchMoji_attDim=2304,# pretrained model uses 2304\n torchMoji_linear=False,# load/save text infer linear layer.\n torchMoji_training=False,# switch GST to torchMoji mode\n \n # (GST) Drop Style Tokens\n p_drop_tokens=0.4, # Nudge the decoder to infer style without GST's input\n drop_tokens_mode='speaker_embedding',#Options: ('zeros','halfs','embedding','speaker_embedding') # Replaces style_tokens with either a scaler or an embedding, or a speaker_dependant embedding\n \n ################################\n # Optimization Hyperparameters #\n ################################\n use_saved_learning_rate=False,\n learning_rate=0.1e-5,\n weight_decay=1e-6,\n grad_clip_thresh=1.0,\n batch_size=56, # 32*3 = 0.377 val loss, # 2 = 0.71 val loss\n val_batch_size=56, # for more precise comparisons between models, constant batch_size is useful\n mask_padding=True, # set model's padded outputs to padded values\n \n # DFR (Drop Frame Rate)\n global_mean_npy='global_mean.npy',\n drop_frame_rate=0.25,\n \n ##################################\n # MMI options #\n ##################################\n use_mmi=False,#depreciated\n use_gaf=True,#depreciated\n max_gaf=0.01,#depreciated\n )\n\n if hparams_string:\n tf.compat.v1.logging.info('Parsing command line hparams: %s', hparams_string)\n hparams.parse(hparams_string)\n\n if verbose:\n tf.compat.v1.logging.info('Final parsed hparams: %s', hparams.values())\n\n return hparams", "def __init__(\n self,\n hparams: argparse.Namespace,\n num_labels=None,\n mode=\"base\",\n config=None,\n tokenizer=None,\n model=None,\n **config_kwargs\n ):\n super().__init__()\n # TODO: move to self.save_hyperparameters()\n # self.save_hyperparameters()\n # can also expand arguments into trainer signature for easier reading\n\n self.save_hyperparameters(hparams)\n self.step_count = 0\n self.output_dir = Path(self.hparams.output_dir)\n cache_dir = self.hparams.cache_dir if self.hparams.cache_dir else None\n if config is None:\n self.config = AutoConfig.from_pretrained(\n self.hparams.config_name if self.hparams.config_name else self.hparams.model_name_or_path,\n **({\"num_labels\": num_labels} if num_labels is not None else {}),\n cache_dir=cache_dir,\n **config_kwargs,\n )\n else:\n self.config: BartConfig = config\n\n extra_model_params = (\"encoder_layerdrop\", \"decoder_layerdrop\", \"dropout\", \"attention_dropout\")\n for p in extra_model_params:\n if getattr(self.hparams, p, None):\n assert hasattr(self.config, p), f\"model config doesn't have a `{p}` attribute\"\n setattr(self.config, p, getattr(self.hparams, p))\n\n if tokenizer is None:\n self.tokenizer = AutoTokenizer.from_pretrained(\n self.hparams.tokenizer_name if self.hparams.tokenizer_name else self.hparams.model_name_or_path,\n cache_dir=cache_dir,\n )\n else:\n self.tokenizer: BartTokenizer = tokenizer\n # self.model_type = MODEL_MODES[mode]\n if model is None:\n self.model = self.model_type.from_pretrained(\n self.hparams.model_name_or_path,\n from_tf=bool(\".ckpt\" in self.hparams.model_name_or_path),\n config=self.config,\n cache_dir=cache_dir,\n )\n else:\n self.model = model", "def initialize(self):\n\n\t\tparameters = {}\n\t\tL = len(self.layer_dims) # number of layers in the network\n\n\t\tfor l in range(1, L):\n\t\t\tparameters['W' + str(l)] = np.random.randn(self.layer_dims[l], self.layer_dims[l-1]) * 0.01\n\t\t\tparameters['b' + str(l)] = np.zeros((self.layer_dims[l], 1))\n\n\t\t\tassert(parameters['W' + str(l)].shape == (self.layer_dims[l], self.layer_dims[l-1]))\n\t\t\tassert(parameters['b' + str(l)].shape == (self.layer_dims[l], 1))\n\n\t\treturn parameters", "def build(self):\n self.build_inputs()\n self.build_word_embeddings()\n self.build_encoder()\n self.build_fc()\n self.build_loss()\n self.build_global_step()", "def _get_current_hyperparameters(self):", "def init_input_pipeline(self, config):\n\n ######################\n # Calibrate parameters\n ######################\n\n print('Initiating input pipelines')\n\n # Update num classes in config\n config.num_classes = self.num_classes - len(self.ignored_labels)\n config.ignored_label_inds = [self.label_to_idx[ign_label] for ign_label in self.ignored_labels]\n\n print('ignored_label_inds:')\n print(config.ignored_label_inds)\n\n # Update network model in config\n config.network_model = self.network_model\n\n print('network_model:')\n print(config.network_model)\n\n # Calibrate generators to batch_num\n print('Calibrate generators to batch_num')\n self.batch_limit = self.calibrate_batches(config)\n\n # From config parameter, compute higher bound of neighbors number in a neighborhood\n hist_n = int(np.ceil(4 / 3 * np.pi * (config.density_parameter + 1) ** 3))\n\n # Initiate neighbors limit with higher bound\n print('Initiate neighbors limit with higher bound')\n self.neighborhood_limits = np.full(config.num_layers, hist_n, dtype=np.int32)\n\n # Calibrate max neighbors number\n print('Calibrate max neighbors number')\n self.calibrate_neighbors(config)\n\n ################################\n # Initiate tensorflow parameters\n ################################\n\n # Reset graph\n print('Reset graph')\n tf.reset_default_graph()\n\n # Set random seed (You also have to set it in network_architectures.weight_variable)\n #np.random.seed(42)\n #tf.set_random_seed(42)\n\n # Get generator and mapping function\n print('Get generator')\n gen_function, gen_types, gen_shapes = self.get_batch_gen('training', config)\n gen_function_val, _, _ = self.get_batch_gen('validation', config)\n print('Get mapping function')\n map_func = self.get_tf_mapping(config)\n\n ##################\n # Training dataset\n ##################\n\n # Create batched dataset from generator\n self.train_data = tf.data.Dataset.from_generator(gen_function,\n gen_types,\n gen_shapes)\n\n self.train_data = self.train_data.map(map_func=map_func, num_parallel_calls=self.num_threads)\n\n # Prefetch data\n self.train_data = self.train_data.prefetch(10)\n\n ##############\n # Test dataset\n ##############\n\n # Create batched dataset from generator\n self.val_data = tf.data.Dataset.from_generator(gen_function_val,\n gen_types,\n gen_shapes)\n\n # Transform inputs\n self.val_data = self.val_data.map(map_func=map_func, num_parallel_calls=self.num_threads)\n\n # Prefetch data\n self.val_data = self.val_data.prefetch(10)\n\n #################\n # Common iterator\n #################\n\n # create a iterator of the correct shape and type\n iter = tf.data.Iterator.from_structure(self.train_data.output_types, self.train_data.output_shapes)\n self.flat_inputs = iter.get_next()\n\n # create the initialisation operations\n self.train_init_op = iter.make_initializer(self.train_data)\n self.val_init_op = iter.make_initializer(self.val_data)", "def log_hyperparameters(\n cfg: DictConfig,\n model: pl.LightningModule,\n trainer: pl.Trainer,\n) -> None:\n hparams = OmegaConf.to_container(cfg, resolve=True)\n\n # save number of model parameters\n hparams[f\"{STATS_KEY}/params_total\"] = sum(p.numel() for p in model.parameters())\n hparams[f\"{STATS_KEY}/params_trainable\"] = sum(\n p.numel() for p in model.parameters() if p.requires_grad\n )\n hparams[f\"{STATS_KEY}/params_not_trainable\"] = sum(\n p.numel() for p in model.parameters() if not p.requires_grad\n )\n\n # send hparams to all loggers\n trainer.logger.log_hyperparams(hparams)\n\n # disable logging any more hyperparameters for all loggers\n # (this is just a trick to prevent trainer from logging hparams of model, since we already did that above)\n trainer.logger.log_hyperparams = lambda params: None", "def main():\n target = 'Coding:Level1'\n output_root = f'problem_5_output/{target.replace(\":\", \"_\")}'\n if not os.path.exists(output_root):\n os.makedirs(output_root, exist_ok=True)\n\n # dictionary of parameter grids, one for each process\n param_grids = {\n 'early_stopping': ParameterGrid([\n {\n 'patience': [15], # , 20, 40]\n },\n ]),\n 'fit': ParameterGrid([\n {\n 'batch_size': [128], # , 64, 128, 256],\n 'epochs': [16], # 20, 50],\n },\n ]),\n 'model_preprocessor': ParameterGrid([\n {\n 'num_unique_words': [5000], # 4000, 1000, 6000, 10000],\n 'max_sequence_length': [150], # 50, 75, 100, 125, 150, 200],\n },\n ]),\n 'model': ParameterGrid([\n # {\n # Dense single hidden layer model hyperparameters:\n # 'name': ['dense_h1'],\n # 'embedded_dims': [8], # , 16, 32, 64, 128, 256],\n # 'num_units_h1': [8], # , 16, 32, 64, 128, 256],\n # 'drop_h1': [None], # , 0.1, 0.2, 0.25, 0.5, 0.75],\n # 'optimizer': ['nadam', 'adam'],\n # 'learning_rate': [None], # , 0.01, 0.001],\n # 'activation': ['relu', 'tanh'],\n # },\n # {\n # Dense double hidden layer model hyperparameters:\n # 'name': ['dense_h2'],\n # 'embedded_dims': [64],\n # 'num_units_h1': [128],\n # 'num_units_h2': [128],\n # 'drop_h1': [None],\n # 'drop_h2': [0.5],\n # 'optimizer': ['nadam'],\n # 'activation': ['relu'],\n # 'learning_rate': [0.01],\n # },\n # {\n # CNN single hidden layer model hyperparameters\n # 'name': ['conv_h1'],\n # 'embedded_dims': [64],\n # 'num_units_h1': [32], # , 64, 256],\n # 'k_conv_h1': [2], # , 3, 4],\n # 'drop_embed': [0.2], # , 0.5],\n # 'activation': ['relu', 'tanh'],\n # 'optimizer': ['adam', 'nadam']\n # },\n # {\n # CNN double hidden layer model hyperparameters\n # 'name': ['conv_h2'],\n # 'embedded_dims': [128], # , 64, 32, 16, 8],\n # 'num_units_h1': [32], # , 64, 128],\n # 'drop_h2': [0.5], # , 0.75, 0.25, 0.1],\n # 'k_conv_h1': [2], # , 3, 4],\n # 'num_units_h2': [128], # , 64, 32, 16, 8],\n # 'drop_embed': [0.2], # , 0.50],\n # 'activation': ['relu'],\n # 'optimizer': ['adam'], # , 'nadam'],\n # },\n # {\n # CNN double hidden layer model hyperparameters\n # 'name': ['conv_h2.1'],\n # 'embedded_dims': [64],\n # 'num_units_h1': [32], # , 64, 128],\n # 'k_conv_h1': [2], # , 3, 4],\n # 'drop_embed': [0.2], # , 0.5],\n # 'activation': ['relu'],\n # 'optimizer': ['adam'], # , 'nadam']\n # },\n # {\n # RNN single hidden layer model hyperparameters\n # 'name': ['rnn_h1'],\n # 'embedded_dims': [64],\n # 'drop_embed': [0.2],\n # 'num_units_h1': [128],\n # 'optimizer': ['nadam'],\n # 'learning_rate': [0.01]\n # },\n # {\n # # LSTM double hidden layer (second layer dense FC) model hyperparameters\n # 'name': ['lstm_h1'],\n # 'embedded_dims': [64],\n # 'drop_embed': [0.2],\n # 'drop_h1': [0.5],\n # 'num_units_h1': [128],\n # 'optimizer': ['nadam'],\n # },\n # {\n # LSTM double hidden layer (second layer dense FC) model hyperparameters\n # 'name': ['lstm_h2'],\n # 'embedded_dims': [64],\n # 'drop_embed': [0.2],\n # 'num_units_h1': [128],\n # 'drop_h1': [0.5],\n # 'num_units_h2': [128],\n # 'optimizer': ['nadam'],\n # 'activation': ['relu']\n # },\n # {\n # # Bi-directional LSTM single hidden layer model hyperparameters\n # 'name': ['bi_lstm_h1'],\n # 'embedded_dims': [32], # , 64, 128],\n # 'drop_embed': [0.2], # , 0.25, 0.5],\n # 'num_units_h1': [32], # , 64, 128],\n # 'drop_h1': [0.2], # , 0.25, 0.5],\n # 'optimizer': ['nadam', 'adam']\n # },\n # {\n # Bi-directional LSTM double hidden layer (second layer Bi-LSTM) model hyperparameters\n # 'name': ['bi_lstm_h2'],\n # 'embedded_dims': [32], # , 64, 128],\n # 'num_units_h1': [32], # , 64, 128],\n # 'num_units_h2': [32], # , 64, 128],\n # 'drop_h1': [0.25, 0.5],\n # 'drop_h2': [0.25, 0.5],\n # 'optimizer': ['nadam', 'adam']\n # },\n {\n # Multi Convolutional model hyperparameters\n 'name': ['multi_conv_h3_s2'],\n 'drop_embed': [0.5], # , 0.3],\n 'embedded_dims': [128], # , 64, 128, 256],\n 'num_units_h1': [128], # , 64, 128, 256],\n 'num_units_h2': [128], # , 64, 128, 256],\n 'num_units_h3': [128], # , 64, 128, 256],\n 'num_units_h4': [128], # , 64, 128, 256],\n 'k_conv_h1': [3],\n 'k_conv_h2': [2],\n 'activation': ['relu'], # , 'tanh'],\n 'drop_h3': [0.2], # , 0.2, 0.25, 0.5, 0.75],\n 'optimizer': ['adam'], # 'nadam']\n },\n # {\n # # Multi Convolutional model hyperparameters\n # 'name': ['multi_conv_h3_s3'],\n # 'drop_embed': [0.5], # , 0.3],\n # 'embedded_dims': [32], # , 64, 128, 256],\n # 'num_units_h1': [32], # , 64, 128, 256],\n # 'num_units_h2': [32], # , 64, 128, 256],\n # 'num_units_h3': [32], # , 64, 128, 256],\n # 'num_units_h4': [32], # , 64, 128, 256],\n # 'k_conv_h1': [3],\n # 'k_conv_h2': [2],\n # 'k_conv_h3': [4],\n # 'k_conv_h4': [4],\n # 'activation': ['relu', 'tanh'],\n # 'drop_h4': [0.1], # , 0.2, 0.25, 0.5, 0.75],\n # 'optimizer': ['adam', 'nadam']\n # },\n ]),\n 'preprocessor': ParameterGrid([\n # {\n # 'do_clean': [False],\n # 'pad_type': ['pre', 'post'],\n # 'trunc_type': ['pre', 'post'],\n # },\n {\n 'do_clean': [True],\n 'pad_type': ['post'], # , 'post'],\n 'trunc_type': ['post'], # 'post'],\n 'omit_stopwords': [False],\n 'ignore_urls': [False],\n 'fix_contractions': [True],\n 'stem': [True],\n 'remove_foreign_characters': [False], # , False],\n 'lower': [True], # , False],\n 'remove_punctuation': [True], # , False],\n 'bigrams': [True], # , False]\n },\n ])\n }\n\n def prod(a):\n if len(a) == 0:\n return 1\n return a[0] * prod(a[1:])\n\n num_models = prod([len(pg) for pg in param_grids.values()])\n\n param_grid_names = sorted(list(param_grids.keys()))\n param_grid_list = [param_grids[k] for k in param_grid_names]\n\n all_params, best_params = assemble_results(output_root)\n\n if CHECK_ONLY:\n for i, params in enumerate(itertools.product(*param_grid_list[3:5])):\n params = {k: v for k, v in zip(param_grid_names[3:5], params)}\n print(i, params)\n Preprocessor(**params['preprocessor'], **params['model_preprocessor'])\n\n for i, params in enumerate(itertools.product(*param_grid_list[2:4])):\n params = {k: v for k, v in zip(param_grid_names[2:4], params)}\n print(i, params)\n build_fn(num_classes=3, **params['model'], **params['model_preprocessor'])\n return\n\n for i, params in enumerate(itertools.product(*param_grid_list)):\n mem = psutil.virtual_memory()\n percent_used = 1 - mem.available / mem.total\n print(f'{percent_used:.2%} memory used')\n if percent_used > 0.80:\n # exit failure\n print('Exiting (-1)')\n exit(-1)\n\n params = {k: v for k, v in zip(param_grid_names, params)}\n print(f'\\n{i + 1}/{num_models}: {params}\\n')\n\n if params in all_params:\n # skip this one because we already ran it.\n continue\n\n if best_params is not None:\n # print best performance so far\n print(f'best params: {best_params}')\n print(f'best val loss: {best_params[\"results\"][\"valid\"][\"loss\"]:.6f}')\n print(f'best val acc: {best_params[\"results\"][\"valid\"][\"accuracy\"]:.4%}')\n\n # create a new output directory with path to model file.\n date = datetime.datetime.utcnow().strftime(\"%Y-%m-%d-%H.%M.%S.%f\")\n output_dir = os.path.join(output_root, date)\n if not os.path.exists(output_dir):\n os.makedirs(output_dir)\n model_file = os.path.join(output_dir, 'model.h5')\n\n # get the preprocessed training and validation data\n preprocess_time = time.time()\n classes, data_sets, set_names = get_xy(Preprocessor(**params['preprocessor'], **params['model_preprocessor']),\n target=target)\n ((x_train, y_train), (x_valid, y_valid)) = data_sets\n preprocess_time -= time.time()\n\n # build and compile model\n model = build_fn(num_classes=len(classes), **params['model'], **params['model_preprocessor'])\n\n # setup callbacks\n early_stopping = EarlyStopping(monitor='val_loss', verbose=1, **params['early_stopping'])\n model_checkpoint = ModelCheckpoint(\n filepath=model_file,\n save_weights_only=False, save_freq='epoch',\n save_best_only=True, monitor='val_loss', verbose=1)\n callbacks = [early_stopping, model_checkpoint]\n\n # Use sample weights to treat classes equally in loss and accuracy.\n sample_weight = get_sample_weight(y_train)\n sample_weight_valid = get_sample_weight(y_valid)\n\n # fit the model\n train_time = time.time()\n model.fit(x=x_train, y=y_train, sample_weight=sample_weight, verbose=1,\n validation_data=(x_valid, y_valid, sample_weight_valid), callbacks=callbacks, **params['fit'])\n train_time -= time.time()\n\n # load the best model (last one saved)\n model = load_model(model_file, compile=True)\n\n # compute results\n results = get_performance(model, data_sets, set_names)\n results['time'] = {'train': train_time, 'preprocess': preprocess_time}\n\n print(pd.DataFrame(data=results).T)\n params['results'] = results\n\n # save params and results\n with open(os.path.join(output_dir, 'params.json'), 'w') as fp:\n json.dump(params, fp)\n\n # save a copy of *this* Python file.\n shutil.copyfile(__file__, os.path.join(output_dir, 'roatan.py'))\n\n # for convenience, show the validation loss and accuracy in a file name in the same directory.\n result_file_name = f'{params[\"results\"][\"valid\"][\"loss\"]:.6f}_{params[\"results\"][\"valid\"][\"accuracy\"]:.4f}.out'\n with open(os.path.join(output_dir, result_file_name), 'w'):\n pass\n\n # check_model(output_dir)\n\n if best_params is None or (params['results']['valid']['loss'] < best_params['results']['valid']['loss']):\n best_params = params\n\n # assemble results from all runs into one CSV file in output root.\n assemble_results(output_root)", "def get_hyperparameter_search_space(seed) -> ConfigSpaceWrapper:\n cs = ConfigSpace.ConfigurationSpace('sklearn.naive_bayes.BernoulliNB', seed)\n\n # the smoothing parameter is a non-negative float\n # I will limit it to 1000 and put it on a logarithmic scale. (SF)\n # Please adjust that, if you know a proper range, this is just a guess.\n alpha = ConfigSpace.UniformFloatHyperparameter(\n name='alpha', lower=1e-2, upper=100, default_value=1, log=True)\n fit_prior = ConfigSpace.CategoricalHyperparameter(\n name='fit_prior', choices=[True, False], default_value=True)\n\n hyperparameters = [alpha, fit_prior]\n\n return ConfigSpaceWrapper(cs, hyperparameters, None)", "def __init__(self,\n batch_size,\n max_num_context,\n x_size=1,\n y_size=1,\n l1_scale=0.6,\n sigma_scale=1.0,\n random_kernel_parameters=True,\n kernel = 'SE', #valid options {SE,PER}\n testing=False):\n self._batch_size = batch_size\n self._max_num_context = max_num_context\n self._x_size = x_size\n self._y_size = y_size\n self._l1_scale = l1_scale\n self._sigma_scale = sigma_scale\n self._random_kernel_parameters = random_kernel_parameters\n self._testing = testing\n self._kernel = kernel", "def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n \n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n # Store weights and biases for the convolutional layer using the keys 'W1' #\n # and 'b1'; use keys 'W2' and 'b2' for the weights and biases of the #\n # hidden affine layer, and keys 'W3' and 'b3' for the weights and biases #\n # of the output affine layer. #\n ############################################################################\n C, H, W = input_dim\n self.params['W1'] = np.random.randn(num_filters, C, filter_size, filter_size) * weight_scale\n self.params['b1'] = np.zeros(num_filters)\n self.params['W2'] = np.random.randn( num_filters * (0.5 * H) * (0.5 * W), hidden_dim) * weight_scale # * sqrt(2.0/n)\n self.params['b2'] = np.zeros(hidden_dim)\n self.params['W3'] = np.random.randn(hidden_dim, num_classes) * weight_scale # * sqrt(2.0/n)\n self.params['b3'] = np.zeros(num_classes)\n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def buildP(self):\r\n\r\n print 'Building P ...'\r\n\r\n #---Building p(y|x)---#\r\n pygx_params_mlp = MLP(activations=self.hyper['pygx_activs'],\r\n dims=self.hyper['pygx_dims'],\r\n weights_init=self.hyper['pygx_W_init'],\r\n biases_init=Constant(0))\r\n\r\n pygx_params = pygx_params_mlp.apply(self.x.reshape((self.x.shape[0]*self.x.shape[1],self.x.shape[2])))\r\n pygx_params = pygx_params.reshape((self.x.shape[0],self.x.shape[1],2*self.hyper['y_dim']))\r\n pygx_params_mlp.initialize()\r\n\r\n # self.pygx_mu.shape == (minibatch size, L_x , num of dimension of y)\r\n self.pygx_mu = pygx_params[:,:,:self.hyper['y_dim']]\r\n\r\n # self.pygx_var.shape == (minibatch size, L_x, num of dimension of y)\r\n self.pygx_var = T.exp( pygx_params[:,:,self.hyper['y_dim']:] )\r\n\r\n\r\n #---Building graph for the density of p(y|x)---#\r\n little_num = 10**(-32)\r\n inside_exp = -T.sum((self.y.dimshuffle(0,'x',1) - self.pygx_mu)**2/(2*self.pygx_var), axis=2)\r\n norm_cst = (2*np.pi)**(-self.hyper['y_dim']/2.)*T.exp(T.sum(T.log(self.pygx_var), axis=2))**(-1/2.)\r\n\r\n # shape == (minibatch size, # of x samples)\r\n pygx = norm_cst*T.exp(inside_exp)\r\n\r\n # shape == (minibatch size, # of x samples)\r\n self.log_pygx = T.log(pygx + little_num)\r\n\r\n #---Building NN for p(x|z=j,w) for all j---#\r\n pxgzw_mus = [None]*self.hyper['num_clust']\r\n pxgzw_vars = [None]*self.hyper['num_clust']\r\n pxgzw = [None]*self.hyper['num_clust']\r\n\r\n for j in range(self.hyper['num_clust']):\r\n\r\n pxgzw_params_mlp = MLP(activations=self.hyper['pxgzw_activs'][j],\r\n dims=self.hyper['pxgzw_dims'][j],\r\n weights_init=self.hyper['pxgzw_W_init'],\r\n biases_init=Constant(0))\r\n\r\n pxgzw_params = pxgzw_params_mlp.apply(self.w.reshape((self.w.shape[0]*self.w.shape[1],self.w.shape[2])))\r\n pxgzw_params = pxgzw_params.reshape((self.w.shape[0],self.w.shape[1], 2*self.hyper['x_dim']))\r\n pxgzw_params_mlp.initialize()\r\n\r\n # pxgzw_mus[j].shape == (minibatch size, L_w , num of dimension of x)\r\n pxgzw_mus[j] = pxgzw_params[:,:,:self.hyper['x_dim']]\r\n\r\n # pxgzw_vars[j].shape == (minibatch size, L_w, num of dimension of x)\r\n pxgzw_vars[j] = T.exp( pxgzw_params[:,:,self.hyper['x_dim']:] )\r\n\r\n #---Building graph for the density of p(x|z=j,w)---#\r\n little_num = 10**(-32)\r\n inside_exp = -T.sum((self.x.dimshuffle(0,'x',1,2) - pxgzw_mus[j].dimshuffle(0,1,'x',2))**2/(2*pxgzw_vars[j].dimshuffle(0,1,'x',2)), axis=3)\r\n norm_cst = (2*np.pi)**(-self.hyper['x_dim']/2.)*T.exp(T.sum(T.log(pxgzw_vars[j]), axis=2))**(-1/2.)\r\n\r\n # shape == (minibatch size, # of w samples (L_w), # of x samples (L_x))\r\n pxgzw[j] = norm_cst.dimshuffle(0,1,'x')*T.exp(inside_exp)\r\n\r\n\r\n # shape is (minibatch size, L_w , # of clusters , num of dimension of x)\r\n self.pxgzw_mus = T.concatenate([mu.dimshuffle(0,1,'x',2) for mu in pxgzw_mus], axis=2)\r\n # shape is (minibatch size, L_w , # of clusters , num of dimension of x)\r\n self.pxgzw_vars = T.concatenate([var.dimshuffle(0,1,'x',2) for var in pxgzw_vars], axis=2)\r\n\r\n # self.pxgzw.shape == (minibatch size, L_w, L_x, num_clust)\r\n self.pxgzw = T.concatenate([density.dimshuffle(0,1,2,'x') for density in pxgzw], axis=3)\r\n self.log_pxgzw = T.log(self.pxgzw + little_num)\r\n\r\n #---Building the p(z=j|x,w) posterior for all j---#\r\n # self.log_pzgxw.shape == (minibatch size, L_w, L_x, num_clust)\r\n self.log_pzgxw = T.log(self.pxgzw + little_num) -T.log(T.sum(self.pxgzw + little_num, axis=3).dimshuffle(0,1,2,'x'))", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_nav', 'g_kvhh', 'g_kva', 'g_kvsi', \n 'g_cav', 'g_kca', 'g_nap', 'g_kir']\n gX_log: np.ndarray = 4 * np.random.rand(9) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(9)) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n gR_name: List[str] = ['g_ampar', 'g_nmdar', 'g_gabar']\n gR_log: np.ndarray = 4 * np.random.rand(3) - 3 # from -3 to 1\n gR: np.ndarray = (10 * np.ones(3)) ** gR_log # 0.001 ~ 10\n gR_itr: Iterator = zip(gR_name, gR)\n\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n\n param_dict.update(gX_itr)\n param_dict.update(gR_itr)\n param_dict.update(tCa_dict)\n return param_dict", "def __init__(self, conf):\n self.model_conf = conf[\"model\"]\n self.epochs = self.model_conf.getint(\"n_epochs\")\n self.epoch = self.model_conf.getint(\"epoch_start\")\n self.batch_size = self.model_conf.getint(\"batch_size\")\n self.criterion = nn.CrossEntropyLoss()\n self.device = torch.device(self.model_conf.get('device'))\n #self.model = (\n # eval(self.model_conf.get('name'))(self.model_conf).to(self.device)\n #)\n self.model = nn.DataParallel(\n eval(self.model_conf.get('name'))(self.model_conf).to(self.device)\n )\n total_params = sum(p.numel() for p in self.model.parameters())\n print(\"Created model {}: {} parameters\"\n .format(self.model_conf.get('name'), total_params))\n if self.model_conf.get(\"optim\") == 'SGD':\n self.optimizer = optim.SGD(\n self.model.parameters(),\n lr=self.model_conf.getfloat(\"learning_rate\"),\n momentum=self.model_conf.getfloat(\"momentum\"),\n weight_decay=self.model_conf.getfloat(\"weight_decay\"))\n elif self.model_conf.get(\"optim\") == 'Adam':\n self.optimizer = optim.Adam(\n self.model.parameters(),\n lr=self.model_conf.getfloat(\"learning_rate\"),\n betas=json.loads(self.model_conf.get(\"betas\")))\n else:\n raise ValueError('Only SGD is supported')\n\n if self.model_conf.get(\"checkpoint\") is not None:\n self.load_checkpoint(self.model_conf.get(\"checkpoint\"))\n\n self.checkpoints_path = conf.get(\"paths\", \"checkpoints\")\n self.results_path = conf.get(\"paths\", \"results\")\n self.best_accuracy = 0\n self.train_size = None\n self.valid_size = None\n self.iteration_print_freq = conf.getint(\"log\", \"iteration_print_freq\")", "def __init__(\n self, config: SimpleGCNConfig = SimpleGCNConfig(name=\"simplegcn\")\n ):\n super().__init__()\n self.edge_lengthscale = config.edge_lengthscale\n self.weight_edges = config.weight_edges\n\n self.atom_embedding = nn.Linear(\n config.atom_input_features, config.width\n )\n\n self.layer1 = GraphConv(config.width, config.width)\n self.layer2 = GraphConv(config.width, config.output_features)\n self.readout = AvgPooling()", "def __init__(self, **kwargs):\n # Register the hyperparameters and their type in _hparam_types.\n # _hparam_types maps the parameter name to a tuple (type, bool).\n # The type value is the type of the parameter for scalar hyperparameters,\n # or the type of the list elements for multidimensional hyperparameters.\n # The bool value is True if the value is a list, False otherwise.\n self._hparam_types = {}\n for name, value in six.iteritems(kwargs):\n self.add_hparam(name, value)" ]
[ "0.7177107", "0.68647355", "0.66281545", "0.6522021", "0.6518543", "0.651809", "0.651809", "0.6490908", "0.64341015", "0.6414481", "0.6403292", "0.63531125", "0.6241054", "0.62265354", "0.62035", "0.6195846", "0.61952543", "0.61585575", "0.6135327", "0.6123834", "0.61211634", "0.611494", "0.6112285", "0.6109049", "0.6108987", "0.6056004", "0.60485643", "0.60371566", "0.6032182", "0.6009671", "0.5993526", "0.5943346", "0.59158784", "0.59125656", "0.5911319", "0.59008425", "0.58876", "0.58819616", "0.5869287", "0.58648807", "0.5852054", "0.584856", "0.58402455", "0.5833191", "0.58224267", "0.57947093", "0.5786626", "0.57689476", "0.57507145", "0.5742349", "0.5741396", "0.5741202", "0.5730648", "0.5719133", "0.57121855", "0.5701552", "0.5688516", "0.5687907", "0.5684276", "0.5683779", "0.5673926", "0.5662289", "0.5661806", "0.5659898", "0.5656724", "0.5640651", "0.5636127", "0.56360227", "0.56357235", "0.56271374", "0.56093985", "0.5607772", "0.5603848", "0.5583166", "0.5583025", "0.5580684", "0.55790913", "0.55732894", "0.55700356", "0.55696124", "0.5568007", "0.55677515", "0.5566354", "0.5564152", "0.5563627", "0.55633914", "0.55595684", "0.5557359", "0.5552873", "0.55467206", "0.5543217", "0.5542013", "0.55403274", "0.5537206", "0.5533731", "0.5533613", "0.55309224", "0.5528574", "0.5523358", "0.5521939" ]
0.713302
1
Private method controlling the primary algorithm behind the implementation of conditional abundance matching. This method will be renamed according to ``self.galprop_key`` in the instance of `ConditionalAbunMatch`. For example, if the property being modeled is ``gr_color``, then the `_mc_galprop` function would instead be named ``mc_gr_color``, a bound method to the `ConditionalAbunMatch` class instance.
def _mc_galprop(self, seed=None, **kwargs): model_helpers.update_param_dict(self, **kwargs) self._set_correlation_strength() if ('galaxy_table' in kwargs.keys()) & ('halos' in kwargs.keys()): msg = ("The mc_"+self.galprop_key+" method accepts either " + "a halos keyword argument, or a galaxy_table keyword argument" + " but never both.") raise KeyError(msg) elif 'galaxy_table' in kwargs.keys(): galaxy_table = kwargs['galaxy_table'] operative_sec_haloprop_key = ( model_defaults.host_haloprop_prefix + self.sec_haloprop_key) elif 'halos' in kwargs.keys(): galaxy_table = kwargs['halos'] operative_sec_haloprop_key = self.sec_haloprop_key else: msg = ("The mc_"+self.galprop_key+" requires either " + "a halos keyword argument, or a galaxy_table keyword argument") raise KeyError(msg) self.add_new_haloprops(galaxy_table) # All at once, draw all the randoms we will need np.random.seed(seed=seed) all_randoms = np.random.random(len(galaxy_table)*2) galprop_cumprob = all_randoms[0:len(galaxy_table)] galprop_scatter = all_randoms[len(galaxy_table):] # Initialize the output array output_galprop = np.zeros(len(galaxy_table)) # Determine binning and loop range if 'galaxy_table_slice_array' not in kwargs.keys(): binned_prim_galprop = np.digitize( galaxy_table[self.prim_galprop_key], self.prim_galprop_bins) prim_galprop_loop_range = set(binned_prim_galprop) else: prim_galprop_loop_range = range(len(self.one_point_lookup_table)) for i in prim_galprop_loop_range: # Determine the slice corresponding to the i^th prim_galprop bin if 'galaxy_table_slice_array' not in kwargs.keys(): idx_bini = np.where(binned_prim_galprop==i)[0] num_bini = len(idx_bini) else: idx_bini = kwargs['galaxy_table_slice_array'][i] num_bini = len(galaxy_table[idx_bini]) if len(idx_bini) > 0: # Fetch the appropriate number of randoms # for the i^th prim_galprop bin galprop_cumprob_bini = galprop_cumprob[idx_bini] galprop_scatter_bini = galprop_scatter[idx_bini] # Fetch the halos in the i^th prim_galprop bin, # and determine how they are sorted haloprop_bini = galaxy_table[idx_bini][operative_sec_haloprop_key] idx_sorted_haloprop_bini = np.argsort(haloprop_bini) galprop_bini = self._condition_matched_galprop( haloprop_bini[idx_sorted_haloprop_bini], galprop_cumprob_bini, i, galprop_scatter_bini, self.tol) # Assign the final values to the # appropriately sorted subarray of output_galprop output_galprop[idx_bini[idx_sorted_haloprop_bini]] = galprop_bini return output_galprop
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_comparison_func(self, adjective):\n return self.SONG_ADJECTIVES.get(adjective, {}).get(\"comparison\")", "def _set_primary_behaviors(self):\n\n for component_model in self.model_dictionary.values():\n gal_type = component_model.gal_type\n feature_name = component_model.feature_name\n\n try:\n component_model_galprop_dtype = component_model._galprop_dtypes_to_allocate\n except AttributeError:\n component_model_galprop_dtype = np.dtype([])\n\n methods_to_inherit = list(set(\n component_model._methods_to_inherit))\n\n for methodname in methods_to_inherit:\n new_method_name = methodname + '_' + gal_type\n new_method_behavior = self._update_param_dict_decorator(\n component_model, methodname)\n setattr(self, new_method_name, new_method_behavior)\n setattr(getattr(self, new_method_name), \n '_galprop_dtypes_to_allocate', component_model_galprop_dtype)\n setattr(getattr(self, new_method_name), 'gal_type', gal_type)\n setattr(getattr(self, new_method_name), 'feature_name', feature_name)\n\n attrs_to_inherit = list(set(\n component_model._attrs_to_inherit))\n for attrname in attrs_to_inherit:\n new_attr_name = attrname + '_' + gal_type\n attr = getattr(component_model, attrname)\n setattr(self, new_attr_name, attr)\n\n # Repeatedly overwrite self.threshold \n # This is harmless provided that all gal_types are ensured to have the same threshold, \n # which is guaranteed by the _test_dictionary_consistency method\n if hasattr(component_model, 'threshold'):\n setattr(self, 'threshold_' + gal_type, component_model.threshold)\n self.threshold = getattr(self, 'threshold_' + gal_type)", "def matching_function(self):\n return self.matching", "def compute_conditions(self, level, model, properties):\n mro = [el.__name__ for el in model.__class__.mro()]\n filtered = [(target, fn) for (cls, lvl, target, fn) in self.matchconditions\n if (cls in mro) and lvl == level]\n\n return dict((target, fn(model, properties)) for (target, fn) in filtered)", "def on_matching_rules(self, matching_rules):\n pass", "def _transfer_rule(self, ratio, r, ref_r, prod_name, m, t):\n prod = getattr(m, prod_name)\n return prod[r, t] == prod[ref_r, t] * ratio # TODO tolerance??", "def run(self):\n # If the name doesn't match, do nothing.\n if self.nameregex and \\\n not self.nameregex.match(self.propname): return 0\n\n # If the value doesn't match, do nothing.\n if self.valueregex and \\\n not self.valueregex.search(self.propvalue): return 0\n\n # Execute the child actions.\n return super(FilterRevProp, self).run()", "def lookup_filter(self, pbc, name=None, flags={}):\n d = []\n uplookup = None\n updesc = None\n for desc in pbc.descriptions:\n # pick methods but ignore already-bound methods, which can come\n # from an instance attribute\n if (isinstance(desc, description.MethodDesc)\n and desc.selfclassdef is None):\n methclassdef = desc.originclassdef\n if methclassdef is not self and methclassdef.issubclass(self):\n pass # subclasses methods are always candidates\n elif self.issubclass(methclassdef):\n # upward consider only the best match\n if uplookup is None or methclassdef.issubclass(uplookup):\n uplookup = methclassdef\n updesc = desc\n continue\n # for clsdef1 >= clsdef2, we guarantee that\n # clsdef1.lookup_filter(pbc) includes\n # clsdef2.lookup_filter(pbc) (see formal proof...)\n else:\n continue # not matching\n # bind the method by giving it a selfclassdef. Use the\n # more precise subclass that it's coming from.\n desc = desc.bind_self(methclassdef, flags)\n d.append(desc)\n if uplookup is not None:\n d.append(updesc.bind_self(self, flags))\n\n if d:\n return SomePBC(d, can_be_None=pbc.can_be_None)\n elif pbc.can_be_None:\n return s_None\n else:\n return s_ImpossibleValue", "def _match(df: DataFrame,\r\n prob_mod: mlc.Model,\r\n method: str,\r\n metric_col: str,\r\n match_kwargs: Optional[dict] = None):\r\n\r\n functions_dict = {\r\n 'assignment': _assignment_match,\r\n 'quantile': _quantile_match\r\n }\r\n # _assignment_match doesnt currently have any kwargs, so match_kwargs should be empty\r\n df, match_info = functions_dict[method](df, prob_mod, metric_col, **match_kwargs)\r\n\r\n return df, match_info", "def __matches__(component, dispatch_key):\n # Override in subclasses.\n return False", "def _criteria(self, record):\n\n process = True\n switch = False\n if record.aaf[0] > 0.5:\n switch = True\n return process, switch", "def check_call_similarity(self):\r\n \r\n if self.old and not self.new:\r\n self.similarity = \"LOSS\"\r\n elif not self.old and self.new:\r\n self.similarity = \"GAIN\"\r\n else:\r\n if not self.old.is_variant and self.new.is_variant:\r\n self.similarity = \"GAIN\" \r\n elif self.old.is_variant and not self.new.is_variant:\r\n self.similarity = \"LOSS\" \r\n\r\n else:\r\n self.similarity = \"SAME\"", "def __init__(\n self,\n propensity_transform=None,\n caliper=None,\n with_replacement=True,\n n_neighbors=1,\n matching_mode=\"both\",\n metric=\"mahalanobis\",\n knn_backend=\"sklearn\",\n ):\n self.matching = Matching(\n propensity_transform=propensity_transform,\n caliper=caliper,\n with_replacement=with_replacement,\n n_neighbors=n_neighbors,\n matching_mode=matching_mode,\n metric=metric,\n knn_backend=knn_backend,\n )", "def define_rules(threshold, similar_ci, similar_rel, similar_attr_ci, similar_attr_rel):\n rules[\"ci_types\"] = {db_ci: list(similar_ci.get(db_ci).keys())[\n 0] for db_ci in similar_ci if float(similar_ci.get(db_ci).get(list(similar_ci.get(db_ci).keys())[\n 0])) > threshold}\n\n rules[\"rel_types\"] = {db_rel: list(similar_rel.get(db_rel).keys())[\n 0] for db_rel in similar_rel if float(similar_rel.get(db_rel).get(list(similar_rel.get(db_rel).keys())[\n 0])) > threshold}\n\n inverse_cis = {x: y for y, x in rules[\"ci_types\"].items()}\n inverse_rels = {x: y for y, x in rules[\"rel_types\"].items()}\n\n for cmdb_ci, atrs in similar_attr_ci.items():\n db_ci = inverse_cis.get(cmdb_ci)\n if db_ci in rules[\"ci_types\"]:\n attr = {}\n for cmdb_at in atrs:\n db_at = list(atrs.get(cmdb_at).keys())[0]\n if float(atrs.get(cmdb_at).get(db_at)) > threshold:\n attr[db_at] = cmdb_at\n rules[\"ci_attributes\"][db_ci] = attr\n\n for cmdb_rel, atrs in similar_attr_rel.items():\n db_rel = inverse_rels.get(cmdb_rel)\n if db_rel in rules[\"rel_types\"]:\n attr = {}\n for cmdb_at in atrs:\n db_at = list(atrs.get(cmdb_at).keys())[0]\n if float(atrs.get(cmdb_at).get(db_at)) > threshold:\n attr[db_at] = cmdb_at\n rules[\"rel_attributes\"][db_rel] = attr", "def __call__(self, k):\n self.virtual_method(k)\n return self.gk, self.g_mink, self.g_maxk", "def _cmp_dispatcher(other_method_name):\n\n def dispatched_cmp(self, other):\n try:\n other_method = getattr(other, other_method_name)\n except AttributeError:\n return False\n return other_method(self)\n return dispatched_cmp", "def _extract_lookup(self, key):\n parts = key.rsplit(\"__\", 1)\n\n if len(parts) > 1 and parts[1] in operators:\n op = parts[1]\n attribute = parts[0]\n else:\n # 'exact' is the default lookup if there was no explicit comparison op in `key`\n op = \"exact\"\n attribute = key\n\n # Construct and assign the lookup class as a filter criteria\n return attribute, self.get_lookup(op)", "def _is_relation_applicable(self, key, version, rule):\n if key == '<':\n return ComparableVersion(version) < rule\n elif key == '>':\n return ComparableVersion(version) > rule\n elif key == '=':\n return ComparableVersion(version) == rule\n elif key == '<=':\n return ComparableVersion(version) <= rule\n elif key == '>=':\n return ComparableVersion(version) >= rule\n elif key == '*':\n return True\n return False", "def _is_relation_applicable(self, key, version, rule):\n if key == '<':\n return ComparableVersion(version) < rule\n elif key == '>':\n return ComparableVersion(version) > rule\n elif key == '=':\n return ComparableVersion(version) == rule\n elif key == '<=':\n return ComparableVersion(version) <= rule\n elif key == '>=':\n return ComparableVersion(version) >= rule\n elif key == '*':\n return True\n return False", "def isBACKUP(ra=None, dec=None, gaiagmag=None, primary=None):\n if primary is None:\n primary = np.ones_like(gaiagmag, dtype='?')\n\n # ADM restrict all classes to dec >= -30.\n primary &= dec >= -30.\n\n isbackupbright = primary.copy()\n isbackupfaint = primary.copy()\n isbackupveryfaint = primary.copy()\n\n # ADM determine which sources are close to the Galaxy.\n in_gal = is_in_Galaxy([ra, dec], radec=True)\n\n # ADM bright targets are 10 < G < 16.\n isbackupbright &= gaiagmag >= 10\n isbackupbright &= gaiagmag < 16\n\n # ADM faint targets are 16 < G < 18.\n isbackupfaint &= gaiagmag >= 16\n isbackupfaint &= gaiagmag < 18.\n # ADM and are \"far from\" the Galaxy.\n isbackupfaint &= ~in_gal\n\n # ADM very faint targets are 18. < G < 19.\n isbackupveryfaint &= gaiagmag >= 18.\n isbackupveryfaint &= gaiagmag < 19\n # ADM and are \"far from\" the Galaxy.\n isbackupveryfaint &= ~in_gal\n\n return isbackupbright, isbackupfaint, isbackupveryfaint", "def _apply_gating_logic(self):\n if self._mode != QcQuantizeOpMode.LEARN_ENCODINGS:\n return\n\n applied_quantizers = set()\n def apply_logic(name, quantizer):\n if quantizer in self._grouped_quantizers.values():\n if quantizer in applied_quantizers:\n return\n\n name, *_ = [n for n, q in self._grouped_quantizers.items() if q == quantizer]\n\n if quantizer.enabled:\n if quantizer.bitwidth == 32 or quantizer.data_type == QuantizationDataType.float:\n return\n set_encoding_min_max_gating_threshold(\n getattr(self, name + '_encoding_min'),\n getattr(self, name + '_encoding_max'))\n applied_quantizers.add(quantizer)\n\n for name, quantizer in self.input_quantizers.items():\n apply_logic(name, quantizer)\n for name, quantizer in self.output_quantizers.items():\n apply_logic(name, quantizer)\n for name, quantizer in self._param_quantizers.items():\n apply_logic(name, quantizer)", "def global_threshold(img, threshold_method):\n pass", "def rule_backward_if_elimination(self, p, g):\r\n\r\n if p.type == self.PT.implies:\r\n # p = IF A(x) THEN B(x)\r\n if self.matching(p.v2, g):\r\n return [self.atom_prop_replace_properties(p.v1, new_arg_id=g.v1.arg_id,\r\n new_is_name=g.v1.is_name,\r\n new_hat=g.v1.hat)]\r\n return None", "def matchedKey(self, value):\n if self._fullname.lower() == value.lower():\n #print(f'FullName attribute matched: {self._fullname} == {value}')\n return True\n if self._shortname.lower() == value.lower():\n #print(f'Shortname attribute matched: {self._shortname} == {value}')\n return True\n return False", "def _compare(self, value, target):\n result = getattr(self.reg, target) - value\n self.reg.N = result >> 7\n self.reg.C = getattr(self.reg, target) >= value\n self.reg.Z = result == 0", "def update(self):\n #self.consider_deactivation() if self.active_flag else self.consider_activation()\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n if self.active_flag:\n self.sense_and_act()\n self.weight = self.match_degree*self.priority", "def update(self):\n #self.consider_deactivation() if self.active_flag else self.consider_activation()\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n if self.active_flag:\n self.sense_and_act()\n self.weight = self.match_degree*self.priority", "def update(self):\n #self.consider_deactivation() if self.active_flag else self.consider_activation()\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n if self.active_flag:\n self.sense_and_act()\n self.weight = self.match_degree*self.priority", "def update(self):\n #self.consider_deactivation() if self.active_flag else self.consider_activation()\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n if self.active_flag:\n self.sense_and_act()\n self.weight = self.match_degree*self.priority", "def match(self, product):\n\n raise NotImplementedError, 'need impletent match method'", "def match(self, grp, healpixIDs, pixRA, pixDec):\n\n # print('hello', grp.columns)\n pixRA_rad = np.deg2rad(pixRA)\n pixDec_rad = np.deg2rad(pixDec)\n\n # convert data position in rad\n pRA = np.median(grp[self.RACol])\n pDec = np.median(grp[self.DecCol])\n pRA_rad = np.deg2rad(pRA)\n pDec_rad = np.deg2rad(pDec)\n\n # gnomonic projection of pixels on the focal plane\n x, y = proj_gnomonic_plane(pRA_rad, pDec_rad, pixRA_rad, pixDec_rad)\n # x, y = proj_gnomonic_plane(np.deg2rad(self.LSST_RA-pRA),np.deg2rad(self.LSST_Dec-pDec), pixRA_rad, pixDec_rad)\n\n # get LSST FP with the good scale\n # pnew = LSSTPointing(0., 0., area=np.pi*self.fpscale**2)\n fpnew = LSSTPointing_circular(0., 0., maxbound=self.fpscale)\n # fpnew = LSSTPointing(np.deg2rad(self.LSST_RA-pRA),np.deg2rad(self.LSST_Dec-pDec),area=np.pi*self.fpscale**2)\n # maxbound=self.fpscale)\n\n \"\"\"\n import matplotlib.pyplot as plt\n fig, ax = plt.subplots()\n ax.plot(x, y, 'ko')\n pf = PolygonPatch(fpnew, facecolor=(0, 0, 0, 0), edgecolor='red')\n ax.add_patch(pf)\n plt.show()\n \"\"\"\n\n # print(shapely.vectorized.contains(\n # fpnew, x, y), self.fpscale, fpnew.area)\n\n idf = shapely.vectorized.contains(fpnew, x, y)\n\n pixID_matched = list(healpixIDs[idf])\n pixRA_matched = list(pixRA[idf])\n pixDec_matched = list(pixDec[idf])\n\n # names = [grp.name]*len(pixID_matched)\n df_pix = pd.DataFrame({'healpixID': pixID_matched,\n 'pixRA': pixRA_matched,\n 'pixDec': pixDec_matched, })\n # 'groupName': names})\n\n return df_pix\n \"\"\"\n n_index = len(grp.index.values)\n\n arr_index = grp.index.values\n\n n_pix = len(df_pix)\n if n_pix > 1:\n arr_index = arr_index.repeat(n_pix)\n if n_index > 1:\n df_pix = df_pix.append([df_pix]*(n_index-1), ignore_index=True)\n\n df_pix.loc[:, 'index'] = arr_index\n \n return df_pix\n \"\"\"", "def __le__(self, other):\n return isinstance(other, GPred) and (self.name == '?' or self.name == other.name)", "def handleMatch(self, m):\r\n pass", "def _cmp_(self, other):\n if(not isinstance(other, VVHarmonicWeakMaassForms)):\n return False\n eq = (self.multiplier() == other.WR) and (self._weight_rat == other._weight_rat)\n eq = eq and (self.prec == other.prec) and (self._sym_type == other._sym_type)\n eq = eq and (self._is_dual_rep == other._is_dual_rep)\n return eq", "def flag_bump_overcap_passengers(iteration, simulation_iteration, bump_iter, pathset_paths_df, pathset_links_df, veh_loaded_df):\n\n # 1) Look at which vehicle links are over capacity\n # overcap = how many people are problematic\n # overcap_frac = what percentage of boards are problematic\n veh_loaded_df[Trip.SIM_COL_VEH_OVERCAP ] = veh_loaded_df[Trip.SIM_COL_VEH_ONBOARD] - veh_loaded_df[Trip.VEHICLES_COLUMN_TOTAL_CAPACITY]\n veh_loaded_df[Assignment.SIM_COL_PAX_OVERCAP_FRAC] = 0.0\n\n # Keep negatives - that means we have space\n # veh_loaded_df.loc[veh_loaded_df[Trip.SIM_COL_VEH_OVERCAP]<0, Trip.SIM_COL_VEH_OVERCAP ] = 0 # negatives - don't care, set to zero\n veh_loaded_df.loc[veh_loaded_df[Trip.SIM_COL_VEH_BOARDS ]>0, Assignment.SIM_COL_PAX_OVERCAP_FRAC] = veh_loaded_df[Trip.SIM_COL_VEH_OVERCAP]/veh_loaded_df[Trip.SIM_COL_VEH_BOARDS]\n\n # only need to do this once\n # TODO: figure out MSA with iteration/simulation_iteration/bump_iter\n if iteration==1 and simulation_iteration==0 and bump_iter==0:\n veh_loaded_df[Trip.SIM_COL_VEH_MSA_OVERCAP] = veh_loaded_df[Trip.SIM_COL_VEH_MSA_ONBOARD] - veh_loaded_df[Trip.VEHICLES_COLUMN_TOTAL_CAPACITY]\n veh_loaded_df.loc[veh_loaded_df[Trip.SIM_COL_VEH_MSA_OVERCAP]<0, Trip.SIM_COL_VEH_MSA_OVERCAP] = 0 # negatives - don't care, set to zero\n\n # These are the trips/stops AT capacity -- first, make sure it's clear they successfully boarded\n atcap_df = veh_loaded_df.loc[veh_loaded_df[Trip.SIM_COL_VEH_OVERCAP] == 0]\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() %d vehicle trip/stops at capacity: (showing head)\\n%s\" % \\\n (len(atcap_df), atcap_df.head().to_string()))\n\n # Join pathset links to atcap_df; now passenger links alighting at a bump stop will have Trip.STOPTIMES_COLUMN_STOP_SEQUENCE set\n pathset_links_df = pandas.merge(left =pathset_links_df,\n left_on =[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM, \"A_seq\"],\n right =atcap_df[[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM, Trip.STOPTIMES_COLUMN_STOP_SEQUENCE]],\n right_on=[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM, Trip.STOPTIMES_COLUMN_STOP_SEQUENCE],\n how =\"left\")\n\n # these folks boarded\n pathset_links_df.loc[(pandas.notnull(pathset_links_df[Trip.STOPTIMES_COLUMN_STOP_SEQUENCE])) & \\\n (pathset_links_df[Assignment.SIM_COL_PAX_CHOSEN]>=0), Assignment.SIM_COL_PAX_BUMPSTOP_BOARDED] = 1\n\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() pathset_links_df chosen, at capacity\\n%s\" % # pathset_links_df.head().to_string())\n pathset_links_df.loc[ (pandas.notnull(pathset_links_df[Trip.STOPTIMES_COLUMN_STOP_SEQUENCE])) & \\\n (pathset_links_df[Assignment.SIM_COL_PAX_CHOSEN]>=0) ].to_string())\n pathset_links_df.drop(Trip.STOPTIMES_COLUMN_STOP_SEQUENCE, axis=1, inplace=True)\n\n # These are trips/stops over capacity\n overcap_df = veh_loaded_df.loc[veh_loaded_df[Trip.SIM_COL_VEH_OVERCAP] > 0]\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() %d vehicle trip/stops over capacity: (showing head)\\n%s\" % \\\n (len(overcap_df), overcap_df.head().to_string()))\n\n # If none, we're done\n if len(overcap_df) == 0:\n FastTripsLogger.info(\" No over-capacity vehicles\")\n return (0, pathset_paths_df, pathset_links_df, veh_loaded_df)\n\n # 2) Look at the trip-stops where the *first people* board after we're at capacity (impossible boards) if any\n bump_stops_df = overcap_df.groupby([Trip.STOPTIMES_COLUMN_TRIP_ID]).aggregate('first').reset_index()\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() bump_stops_df iter=%d sim_iter=%d bump_iter=%d (%d rows, showing head):\\n%s\" %\n (iteration, simulation_iteration, bump_iter, len(bump_stops_df), bump_stops_df.head().to_string()))\n\n\n if Assignment.CAPACITY_CONSTRAINT:\n # One stop at a time -- slower but more accurate\n # 3) If Assignment.BUMP_ONE_AT_A_TIME, select the first such stop by arrival time\n # Otherwise, select the first such stop for each vehicle trip\n if Assignment.BUMP_ONE_AT_A_TIME:\n bump_stops_df.sort_values(by=[Trip.STOPTIMES_COLUMN_ARRIVAL_TIME], inplace=True)\n bump_stops_df = bump_stops_df.iloc[:1]\n\n FastTripsLogger.info(\" Need to bump %d passengers from %d trip-stops\" % (bump_stops_df.overcap.sum(), len(bump_stops_df)))\n\n # debug -- see the whole trip\n if True:\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() Trips with bump stops:\\n%s\\n\" % \\\n pandas.merge(\n left=veh_loaded_df[[Trip.STOPTIMES_COLUMN_TRIP_ID,\n Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,\n Trip.STOPTIMES_COLUMN_STOP_SEQUENCE,\n Trip.STOPTIMES_COLUMN_STOP_ID,\n Trip.STOPTIMES_COLUMN_STOP_ID_NUM,\n Trip.VEHICLES_COLUMN_TOTAL_CAPACITY,\n Trip.SIM_COL_VEH_BOARDS,\n Trip.SIM_COL_VEH_ALIGHTS,\n Trip.SIM_COL_VEH_ONBOARD,\n Trip.SIM_COL_VEH_OVERCAP,\n Assignment.SIM_COL_PAX_OVERCAP_FRAC]],\n right=bump_stops_df[[Trip.STOPTIMES_COLUMN_TRIP_ID]],\n how='inner').to_string())\n\n # 4) Join these stops to pathset_links_df, so pathset_links_df now has column Assignment.SIM_COL_PAX_OVERCAP_FRAC\n pathset_links_df = Assignment.find_passenger_vehicle_times(pathset_links_df, veh_loaded_df)\n\n # 5) If not Assignment.CAPACITY_CONSTRAINT, return (and drop the column named Trip.SIM_COL_VEH_OVERCAP from veh_loaded_df)\n # (If we're not actually bumping passengers, we're done; the pathset_links_df have overcap and overcap_frac information)\n if not Assignment.CAPACITY_CONSTRAINT:\n\n veh_loaded_df.drop(Assignment.SIM_COL_PAX_OVERCAP_FRAC, axis=1, inplace=True)\n return (0, pathset_paths_df, pathset_links_df, veh_loaded_df)\n\n # join pathset links to bump_stops_df; now passenger links alighting at a bump stop will have Trip.STOPTIMES_COLUMN_STOP_SEQUENCE set\n pathset_links_df = pandas.merge(left =pathset_links_df,\n left_on =[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM, \"A_seq\"],\n right =bump_stops_df[[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM, Trip.STOPTIMES_COLUMN_STOP_SEQUENCE]],\n right_on=[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM, Trip.STOPTIMES_COLUMN_STOP_SEQUENCE],\n how =\"left\")\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() pathset_links_df (%d rows, showing head):\\n%s\" % (len(pathset_links_df), pathset_links_df.head().to_string()))\n\n # bump candidates: boarding at bump stops, chosen paths, unbumped and overcap\n bumpstop_boards = pathset_links_df.loc[pandas.notnull(pathset_links_df[Trip.STOPTIMES_COLUMN_STOP_SEQUENCE])& # board at bump_stops_df stop\n (pathset_links_df[Assignment.SIM_COL_PAX_CHOSEN] >=0)& # chosen\n (pathset_links_df[Assignment.SIM_COL_PAX_BUMP_ITER] < 0)].copy() # unbumped\n # unchosen bump candidates (to hedge against future choosing of paths with at-capacity links)\n unchosen_atcap_boards = pathset_links_df.loc[(pandas.notnull(pathset_links_df[Trip.STOPTIMES_COLUMN_STOP_SEQUENCE]))& # board at bump_stops_df stop\n (pathset_links_df[Assignment.SIM_COL_PAX_CHOSEN] <0)& # path not chosen (yet)\n (pathset_links_df[Assignment.SIM_COL_PAX_BUMP_ITER] <0)] # unbumped\n\n # bump off later arrivals, later trip_list_num\n bumpstop_boards.sort_values(by=[ \\\n Assignment.SIM_COL_PAX_A_TIME, # I think this is correct\n Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,\n \"A_seq\",\n Passenger.PF_COL_PAX_A_TIME,\n Passenger.TRIP_LIST_COLUMN_TRIP_LIST_ID_NUM],\n ascending=[True, True, True, False, False], inplace=True)\n bumpstop_boards.reset_index(drop=True, inplace=True)\n\n # For each trip_id, stop_seq, stop_id, we want the first *overcap* rows\n # group to trip_id, stop_seq, stop_id and count off\n bpb_count = bumpstop_boards.groupby([Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,\n \"A_seq\",\n \"A_id_num\"]).cumcount()\n bpb_count.name = 'bump_index'\n\n # Add the bump index to our passenger-paths/stops\n bumpstop_boards = pandas.concat([bumpstop_boards, bpb_count], axis=1)\n\n # 1 mean boarded, 0 means we got bumped\n bumpstop_boards[\"new_bumpstop_boarded\"] = 1\n bumpstop_boards.loc[ bumpstop_boards[\"bump_index\"] < bumpstop_boards[Trip.SIM_COL_VEH_OVERCAP], \"new_bumpstop_boarded\"] = 0 # these folks got bumped\n\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() bumpstop_boards (%d rows, showing head):\\n%s\" % \n (len(bumpstop_boards), bumpstop_boards.head(50).to_string()))\n\n # filter to unique passengers/paths who got bumped\n bump_paths = bumpstop_boards.loc[ bumpstop_boards[\"new_bumpstop_boarded\"] == 0,\n [Passenger.TRIP_LIST_COLUMN_PERSON_ID, Passenger.TRIP_LIST_COLUMN_TRIP_LIST_ID_NUM, Passenger.PF_COL_PATH_NUM]].drop_duplicates()\n chosen_paths_bumped = len(bump_paths)\n\n # if we have unchosen paths that board here, add those too\n if len(unchosen_atcap_boards) > 0:\n\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() unchosen_atcap_boards (%d rows, showing head):\\n%s\" % \\\n (len(unchosen_atcap_boards), unchosen_atcap_boards.head().to_string()))\n unchosen_atcap_boards = unchosen_atcap_boards[[Passenger.TRIP_LIST_COLUMN_PERSON_ID,\n Passenger.TRIP_LIST_COLUMN_TRIP_LIST_ID_NUM,\n Passenger.PF_COL_PATH_NUM]].drop_duplicates()\n bump_paths = pandas.concat([bump_paths, unchosen_atcap_boards], axis=0)\n\n bump_paths['bump_iter_new'] = bump_iter\n\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() bump_paths (%d rows, showing head):\\n%s\" % \\\n (len(bump_paths), bump_paths.head().to_string()))\n\n # Kick out the bumped passengers -- update bump_iter on all pathset_links_df\n pathset_links_df = pandas.merge(left =pathset_links_df, right=bump_paths, how =\"left\")\n pathset_links_df.loc[ pandas.notnull(pathset_links_df['bump_iter_new']), Assignment.SIM_COL_PAX_BUMP_ITER] = pathset_links_df['bump_iter_new']\n\n # Keep record of if they boarded at a bumpstop\n pathset_links_df = pandas.merge(left=pathset_links_df,\n right=bumpstop_boards[[Passenger.TRIP_LIST_COLUMN_PERSON_ID,\n Passenger.TRIP_LIST_COLUMN_TRIP_LIST_ID_NUM,\n Passenger.PF_COL_PATH_NUM,\n Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,\n \"A_seq\",\"new_bumpstop_boarded\"]],\n how=\"left\")\n pathset_links_df.loc[ pandas.notnull(pathset_links_df['new_bumpstop_boarded']), Assignment.SIM_COL_PAX_BUMPSTOP_BOARDED] = pathset_links_df[\"new_bumpstop_boarded\"]\n\n new_bump_wait = bumpstop_boards[[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,\n Trip.STOPTIMES_COLUMN_STOP_SEQUENCE,\n \"A_id_num\",\n Passenger.PF_COL_PAX_A_TIME]].groupby( \\\n [Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,Trip.STOPTIMES_COLUMN_STOP_SEQUENCE,\"A_id_num\"]).first()\n new_bump_wait.reset_index(drop=False, inplace=True)\n new_bump_wait.rename(columns={\"A_id_num\":Trip.STOPTIMES_COLUMN_STOP_ID_NUM}, inplace=True)\n\n FastTripsLogger.debug(\"new_bump_wait (%d rows, showing head):\\n%s\" %\n (len(new_bump_wait), new_bump_wait.head().to_string(formatters=\\\n {Passenger.PF_COL_PAX_A_TIME:Util.datetime64_formatter})))\n\n # incorporate it into the bump wait df\n if type(Assignment.bump_wait_df) == type(None):\n Assignment.bump_wait_df = new_bump_wait\n else:\n Assignment.bump_wait_df = pandas.concat([Assignment.bump_wait_df, new_bump_wait], axis=0)\n\n FastTripsLogger.debug(\"flag_bump_overcap_passengers() bump_wait_df (%d rows, showing head):\\n%s\" %\n (len(Assignment.bump_wait_df), Assignment.bump_wait_df.head().to_string()))\n\n Assignment.bump_wait_df.drop_duplicates(subset=[Trip.STOPTIMES_COLUMN_TRIP_ID_NUM,\n Trip.STOPTIMES_COLUMN_STOP_SEQUENCE], inplace=True)\n\n # drop unnecessary columns before returning\n pathset_links_df.drop([ \\\n Trip.STOPTIMES_COLUMN_STOP_SEQUENCE, # adding this\n 'bump_iter_new',\n 'new_bumpstop_boarded'\n ], axis=1, inplace=True)\n\n veh_loaded_df.drop(Assignment.SIM_COL_PAX_OVERCAP_FRAC, axis=1, inplace=True)\n\n FastTripsLogger.debug(\"flag_bump_overcap_passengers(): return pathset_links_df.head():\\n%s\\n\" % pathset_links_df.head().to_string())\n FastTripsLogger.debug(\"flag_bump_overcap_passengers(): return veh_loaded_df.head():\\n%s\\n\" % veh_loaded_df.head().to_string())\n\n return (chosen_paths_bumped, pathset_paths_df, pathset_links_df, veh_loaded_df)", "def bv_weight(self, output_prop):\n raise NotImplementedError(\"subclasses need to override this method\")", "def generalize_unchanging_condition_attribute(\n self, randomfunc: Callable = random.choice) -> bool:\n if len(self.specified_unchanging_attributes) > 0:\n ridx = randomfunc(self.specified_unchanging_attributes)\n self.condition.generalize(ridx)\n return True\n\n return False", "def altloc_match(self, other: AtomKey) -> bool:\n ...", "def membershipFunction(self):\n \"\"\" Location MF \"\"\"\n self.very_less_demand = fuzz.trapmf(self.location,[0,0,500,600])\n self.less_demand = fuzz.trapmf(self.location,[500,600,1000,1100])\n self.average_demand = fuzz.trapmf(self.location,[1000,1100,1500,1600])\n self.high_demand = fuzz.trapmf(self.location,[1500,1600,2000,2100])\n self.very_high_demand = fuzz.trapmf(self.location,[2000,2100,4000,4000])\n\n \"\"\" Bedroom MF \"\"\"\n self.less_bed = fuzz.trapmf(self.bedroom,[0,0,2,3])\n self.average_bed = fuzz.trimf(self.bedroom,[2,3,4])\n self.more_bed = fuzz.trapmf(self.bedroom,[3,4,11,11])\n\n \"\"\" Bathroom MF \"\"\"\n self.less_bath = fuzz.trapmf(self.bathroom,[0,0,1,2])\n self.average_bath = fuzz.trimf(self.bathroom,[1,2,3])\n self.more_bath = fuzz.trapmf(self.bathroom,[2,3,11,11])\n\n \"\"\" Facilities MF \"\"\"\n self.less_fac = fuzz.trapmf(self.facilities,[0,0,3,4])\n self.average_fac = fuzz.trapmf(self.facilities,[3,4,6,7])\n self.high_fac = fuzz.trapmf(self.facilities,[6,7,11,11])\n\n \"\"\" Funishing MF \"\"\"\n self.unfun = fuzz.trimf(self.funishing,[0,0,1])\n self.partially_fun = fuzz.trimf(self.funishing,[0,1,2])\n self.full_fun = fuzz.trimf(self.funishing,[1,2,3])\n\n \"\"\" Area size MF \"\"\"\n self.very_small_area = fuzz.trapmf(self.areaSize,[0,0,500, 600])\n self.small_area = fuzz.trapmf(self.areaSize,[500, 600, 900, 1000])\n self.average_area = fuzz.trapmf(self.areaSize,[900, 1000, 1200, 1300])\n self.large_area = fuzz.trapmf(self.areaSize,[1200, 1300, 3100, 3100]) \n\n \"\"\" Accessibility MF \"\"\"\n self.bad_access = fuzz.trapmf(self.accessibility,[0,0,4,6])\n self.average_access = fuzz.trimf(self.accessibility, [4,5,6])\n self.good_access = fuzz.trapmf(self.accessibility,[4,6,11,11])\n \n \"\"\" PSF MF\"\"\"\n self.below_price = fuzz.trapmf(self.psf, [0,100,200,300])\n self.standard_price = fuzz.trapmf(self.psf, [200,300,400,500])\n self.above_price = fuzz.trapmf(self.psf, [400, 500, 700, 800])\n self.high_price = fuzz.trapmf(self.psf, [700, 800, 900, 1000])", "def _npclass_score(self,\n obj,\n target,\n method='mix',\n obj_classes=None,\n target_classes=None):\n # todo: make subroutines\n # assess what is obj and target\n spec_like = obj\n bgc_like = target\n spec_like_classes_tup = obj_classes\n bgc_like_classes_dict = target_classes\n bgc_to_spec = False\n if isinstance(obj, BGC) or isinstance(obj, GCF):\n bgc_like = obj\n spec_like = target\n bgc_like_classes_dict = obj_classes\n spec_like_classes_tup = target_classes\n bgc_to_spec = True\n\n # assess method - move to get_links?\n assert method in self.method_options, \\\n (f\"NPClass method should be one of method options: {self.method_options}, if your method is not \" +\n \"in the options check if the class predictions (canopus, etc.) are loaded correctly\")\n\n # gather correct classes if not provided, dict for bgcs, tup for spec\n if not bgc_like_classes_dict:\n bgc_like_classes_dict = self._get_gen_classes(bgc_like)\n if not spec_like_classes_tup:\n spec_like_classes_tup = self._get_met_classes(spec_like, method)\n # unpack spec_like classes - both are lists\n spec_like_classes, spec_like_classes_names_inds = spec_like_classes_tup\n\n scores = [\n ] # this will be returned if one of the class sides is absent\n std_score = 0 # if link not recorded in scores (mibig) return this score\n # loop through classes that are possible to link (names in class_match object)\n for bgc_class_name in self.npl.class_matches.bgc_class_names:\n if bgc_class_name == \"mibig_classes\":\n # treat specially as bigscape class needs to be translated to mibig class\n bigscape_class = bgc_like_classes_dict[\"bigscape_class\"]\n # convert bigscape class to mibig class\n bgc_like_classes = [\n self.npl.class_matches.bigscape_mibig_conversion.get(\n bigscape_class)\n ]\n else:\n bgc_like_classes = bgc_like_classes_dict.get(bgc_class_name)\n if bgc_like_classes and spec_like_classes: # check for classes from both sides\n for bgc_class in bgc_like_classes:\n for chem_class_name in self.npl.class_matches.chem_class_names:\n # does info exist for this spectrum class level, return index for class level\n spec_class_level_i = spec_like_classes_names_inds.get(\n chem_class_name)\n if spec_class_level_i:\n spec_class_tup = spec_like_classes[\n spec_class_level_i]\n if spec_class_tup: # if there is a class at this lvl\n # is a tuple of (name, score) so take [0]\n spec_class = spec_class_tup[0]\n # determine direction of scoring: BGC -> spectrum\n if bgc_to_spec:\n score = self.npl.class_matches.\\\n class_matches[bgc_class_name]\\\n [chem_class_name].get(bgc_class, {})\\\n .get(spec_class, std_score)\n result_tuple = (score, bgc_class_name,\n chem_class_name, bgc_class,\n spec_class)\n else: # spectrum -> BGC\n score = self.npl.class_matches.\\\n class_matches[chem_class_name]\\\n [bgc_class_name].get(spec_class, {})\\\n .get(bgc_class, std_score)\n result_tuple = (score, chem_class_name,\n bgc_class_name, spec_class,\n bgc_class)\n scores.append(result_tuple)\n return sorted(scores, reverse=True)", "def testMatchSwarpLanczos2Exposure(self):\n self.compareToSwarp(\"lanczos2\", useWarpExposure=True)", "def _match_rule(self, option_rule):\n\n # These keys are used to preserve backwards compatibility with\n # bde_build.pl and should be ignored.\n ignore_keys = ('XLC_INTERNAL_PREFIX1',\n 'XLC_INTERNAL_PREFIX2',\n 'AIX_GCC_PREFIX',\n 'SUN_CC_INTERNAL_PREFIX',\n 'SUN_GCC_INTERNAL_PREFIX',\n 'LINUX_GCC_PREFIX',\n 'WINDOWS_CC_PREFIX',\n 'RETRY_ON_SIGNAL')\n\n if option_rule.key in ignore_keys:\n return False\n\n global DEFAULT_COMPILER\n if not optionsutil.match_uplid(self._uplid, option_rule.uplid,\n DEFAULT_COMPILER):\n return False\n if not optionsutil.match_ufid(self._ufid, option_rule.ufid):\n return False\n\n return True", "def _is_better_than(self, metric_name, new, ref):\n if metric_name == f'{self.name}_mean':\n return ref is None or new > ref\n return None", "def test_210_boosted_goal_difference_for_home_models_with_various_lower_away_win_threshold(self):\n\n def create_model_fn(fn_team: str):\n team_stat = Stats.n_sample_stats_for_team(cursor=db_in_cursor,\n team=fn_team,\n last_sample_date=self.model_date,\n n_samples=self.num_samples,\n normalize_by_matches=True)\n\n\n return FeatureModel(\n input_data=[self.home_boost + team_stat.goal_diff, team_stat.goal_diff],\n id=team_stat.team_name\n )\n\n default_threshold_lower = 0.3\n default_threshold_upper = 0.9\n\n explore_range = (-2.0, default_threshold_upper)\n num_steps_wanted = 60\n step_size = (explore_range[1] - explore_range[0])/num_steps_wanted\n\n threshold_upper = default_threshold_upper\n for threshold_lower in StatsPredictionPremierLeague.crange(first=explore_range[0], test=lambda x: x <= explore_range[1],\n update=lambda x: x + step_size):\n for match_date in played_home_OR_away_before_dates:\n ####\n #  Build model up to the day before the match\n ####\n self.home_boost = 0.72\n self.model_date = match_date - timedelta(days=1)\n self.num_samples = num_matches_in_season\n\n models: {str: FeatureModel} = FeatureModel.create_models_for_all_teams(\n model_making_fn=create_model_fn, entities=teams)\n\n self.persist_models(model_gen_date=self.model_date, model_description=self.shortDescription(), models=models)\n\n # variant_string = 'threshold_lower=%f, threshold_upper=%f' % (threshold_lower, threshold_upper)\n self.make_and_store_predictions_for_date(match_date=match_date, models=models, draw_range=(threshold_lower, threshold_upper),\n variants=threshold_lower)", "def _match(self, key, attributes=None, context=None):\n matching_data = Sanitizer.ensure_int(self._get_matcher_input(key, attributes))\n if matching_data is None:\n return False\n return self._lower <= self.input_parsers[self._data_type](matching_data) <= self._upper", "def _disk_match_func(self, disk_type, instance):\n raise NotImplementedError()", "def interpretingMF(self):\n self.location_is_veryLessDemand = fuzz.interp_membership(self.location, self.very_less_demand, self.demandLocation)\n self.location_is_lessDemand = fuzz.interp_membership(self.location, self.less_demand, self.demandLocation)\n self.location_is_averageDemand = fuzz.interp_membership(self.location, self.average_demand, self.demandLocation)\n self.location_is_highDemand = fuzz.interp_membership(self.location, self.high_demand, self.demandLocation)\n self.location_is_veryHighDemand = fuzz.interp_membership(self.location, self.very_high_demand, self.demandLocation)\n\n self.bed_is_less = fuzz.interp_membership(self.bedroom, self.less_bed, self.numberOfBedroom)\n self.bed_is_average = fuzz.interp_membership(self.bedroom, self.average_bed, self.numberOfBedroom)\n self.bed_is_more = fuzz.interp_membership(self.bedroom, self.more_bed, self.numberOfBedroom)\n\n self.bath_is_less = fuzz.interp_membership(self.bathroom, self.less_bath, self.numberOfBathroom)\n self.bath_is_average = fuzz.interp_membership(self.bathroom, self.average_bath, self.numberOfBathroom)\n self.bath_is_more = fuzz.interp_membership(self.bathroom, self.more_bath, self.numberOfBathroom)\n\n self.fac_is_low = fuzz.interp_membership(self.facilities, self.less_fac, self.providedFacilities)\n self.fac_is_average = fuzz.interp_membership(self.facilities, self.average_fac, self.providedFacilities)\n self.fac_is_high = fuzz.interp_membership(self.facilities, self.high_fac, self.providedFacilities)\n\n self.unfunishing = fuzz.interp_membership(self.funishing, self.unfun, self.houseFunishing)\n self.partially_funishing = fuzz.interp_membership(self.funishing, self.partially_fun, self.houseFunishing)\n self.fully_funishing = fuzz.interp_membership(self.funishing, self.full_fun, self.houseFunishing)\n\n self.area_is_verysmall = fuzz.interp_membership(self.areaSize, self.very_small_area, self.houseAreaSize)\n self.area_is_small = fuzz.interp_membership(self.areaSize, self.small_area, self.houseAreaSize)\n self.area_is_average = fuzz.interp_membership(self.areaSize, self.average_area, self.houseAreaSize)\n self.area_is_large = fuzz.interp_membership(self.areaSize, self.large_area, self.houseAreaSize)\n\n self.access_is_bad = fuzz.interp_membership(self.accessibility, self.bad_access, self.accessArea)\n self.access_is_average = fuzz.interp_membership(self.accessibility, self.average_access, self.accessArea)\n self.access_is_good = fuzz.interp_membership(self.accessibility, self.good_access, self.accessArea)", "def matches(self, feature):\n pass", "def applyProbabilityCutoffs(imageCollection, params): \n \n #Define function to map across imageCollection\n def probabilityFilter(image):\n \n #Get the classifications from the class with the highest probability\n classifications = npv.probabilityToClassification(image)\n \n #Loop through parameters\n for param in params:\n #Load parameter values\n class_name = param.get('class_name')\n class_value = param.get('class_value')\n filter_name = param.get('filter')\n threshold = param.get('threshold')\n \n if filter_name=='gt':\n #Find where the class_name is greater than threshold\n prob_mask = image.select(class_name).gt(ee.Image.constant(threshold))\n #Replace those pixels with the class value\n classifications = classifications.where(prob_mask,class_value)\n \n elif filter_name=='gte':\n #Find where the class_name is greater than or equal to threshold\n prob_mask = image.select(class_name).gte(ee.Image.constant(threshold))\n #Replace those pixels with the class value\n classifications = classifications.where(prob_mask,class_value)\n \n elif filter_name == 'lte':\n #Find where the class_name is less than or equal to threshold\n prob_mask = image.select(class_name).lte(ee.Image.constant(threshold))\n #Find where classifications are equal to class value\n class_mask = classifications.eq(class_value)\n #We only want to replace pixels where the class probability<=threshold AND classification==class_value\n reclass_mask = prob_mask.bitwiseAnd(class_mask)\n \n #Define square kernel of surrounding pixels\n kernel = ee.Kernel.square(1)\n #Convert to a multiband image, one band for each neighbor\n neighs = classifications.neighborhoodToBands(kernel)\n #Reduce to find the majority class in neighborhood\n majority = neighs.reduce(ee.Reducer.mode())\n \n #Replace pixels where the class probability<=threshold AND classification==class_value with the neighborhood majority class\n classifications = classifications.where(reclass_mask,majority)\n \n else:\n #Find where the class_name is less than or equal to threshold\n prob_mask = image.select(class_name).lt(ee.Image.constant(threshold))\n #Find where classifications are equal to class value\n class_mask = classifications.eq(class_value)\n #We only want to replace pixels where the class probability<=threshold AND classification==class_value\n reclass_mask = prob_mask.bitwiseAnd(class_mask)\n \n #Define square kernel of surrounding pixels\n kernel = ee.Kernel.square(1)\n #Convert to a multiband image, one band for each neighbor\n neighs = classifications.neighborhoodToBands(kernel)\n #Reduce to find the majority class in neighborhood\n majority = neighs.reduce(ee.Reducer.mode())\n \n #Replace pixels where the class probability<=threshold AND classification==class_value with the neighborhood majority class\n classifications = classifications.where(reclass_mask,majority)\n \n return ee.Image(classifications)\n return ee.ImageCollection(imageCollection.map(probabilityFilter))", "def compare_thresholded_data_with_models(self):\n pass", "def test_220_boosted_goal_difference_for_home_models_with_various_upper_home_win_threshold(self):\n\n def create_model_fn(fn_team: str):\n team_stat = Stats.n_sample_stats_for_team(cursor=db_in_cursor,\n team=fn_team,\n last_sample_date=self.model_date,\n n_samples=self.num_samples,\n normalize_by_matches=True)\n\n\n return FeatureModel(\n input_data=[self.home_boost + team_stat.goal_diff, team_stat.goal_diff],\n id=team_stat.team_name\n )\n\n default_threshold_lower = 0.3\n default_threshold_upper = 0.9\n\n explore_range = (default_threshold_lower, 5.0)\n num_steps_wanted = 60\n step_size = (explore_range[1] - explore_range[0])/num_steps_wanted\n\n threshold_lower = default_threshold_lower\n for threshold_upper in StatsPredictionPremierLeague.crange(first=explore_range[0], test=lambda x: x <= explore_range[1],\n update=lambda x: x + step_size):\n for match_date in played_home_OR_away_before_dates:\n ####\n #  Build model up to the day before the match\n ####\n self.home_boost = 0.72\n self.model_date = match_date - timedelta(days=1)\n self.num_samples = num_matches_in_season\n\n models: {str: FeatureModel} = FeatureModel.create_models_for_all_teams(\n model_making_fn=create_model_fn, entities=teams)\n\n self.persist_models(model_gen_date=self.model_date, model_description=self.shortDescription(), models=models)\n\n # variant_string = 'threshold_lower=%f, threshold_upper=%f' % (threshold_lower, threshold_upper)\n self.make_and_store_predictions_for_date(match_date=match_date, models=models, draw_range=(threshold_lower, threshold_upper),\n variants=threshold_upper)", "def match(self, dc):\n raise NotImplemented", "def ImprovedScore(tgtName, yourName, teamModels, featureImprove, teamTgtPipe, teamYourPipe, Imp = 0.1):\n\n ## Put featureImprove into target model - need to reverse _op and non _op\n coef = teamModels[tgtName]\n # Get stats of Team A's win and lose matches - only 20 features are saved\n features = list(coef['features'])\n # Get stats of Team B: Revert the features of A to retrieve features for B\n featureYour = []\n featureTgt = []\n for ii in features:\n if '_op' in ii:\n featureYour.append(ii[:-3])\n else:\n featureTgt.append(ii)\n\n dfTgt = teamTgtPipe[teamTgtPipe['season'] == 1415].ix[:, featureTgt]\n dfYour = teamYourPipe[teamYourPipe['season'] == 1415].ix[:, featureYour]\n dfYour.columns = dfYour.columns + '_op'\n\n # Get mean and reorder into the original feature order\n bb = pd.concat([dfTgt.mean(), dfYour.mean()])\n bb = bb.reindex(features)\n model = coef['model']\n\n for ii in bb.iteritems():\n if ((ii[0] + '_op') in featureImprove) or ((ii[0][:-3]) in featureImprove):\n if model.coef_[0][features.index(ii[0])] < 0:\n bb[ii[0]] *= 1 + Imp\n else:\n bb[ii[0]] *= 1 - Imp\n\n probTgt = model.predict_proba(bb)[0][1]\n\n ## Put featureImprove into your model\n coef = teamModels[yourName]\n # Get stats of Team A's win and lose matches - only 20 features are saved\n features = list(coef['features'])\n # Get stats of Team B: Revert the features of A to retrieve features for B\n featureYour = []\n featureTgt = []\n for ii in features:\n if '_op' in ii:\n featureTgt.append(ii[:-3])\n else:\n featureYour.append(ii)\n\n dfTgt = teamTgtPipe[teamTgtPipe['season'] == 1415].ix[:, featureTgt]\n dfYour = teamYourPipe[teamYourPipe['season'] == 1415].ix[:, featureYour]\n dfTgt.columns = dfTgt.columns + '_op'\n\n # Get mean and reorder into the original feature order\n bb = pd.concat([dfTgt.mean(), dfYour.mean()])\n bb = bb.reindex(features)\n\n model = coef['model']\n for ii in bb.iteritems():\n if ii[0] in featureImprove:\n if model.coef_[0][features.index(ii[0])] > 0:\n bb[ii[0]] *= 1 + Imp\n else:\n bb[ii[0]] *= 1 - Imp\n\n probYour = model.predict_proba(bb)[0][1]\n\n return round(probYour / (probYour + probTgt), 2)", "def matches_property_name(fun):\n return callable(fun) and getattr(fun, annotation, None) == value", "def provides_defaults_for(self, rule):\n return not self.is_build_only and \\\n self.endpoint == rule.endpoint and self != rule and \\\n self.arguments == rule.arguments", "def __getattr__(self, item):\n # Early-exit for properties other than check_-methods.\n if not item.startswith(\"check_\"):\n return super().__getattribute__(self, item)\n\n # Lookup the appropriate method. If not found, complain.\n name = item.replace(\"check_\", \"\")\n try:\n func, on_true, on_false = self.checks[name]\n except KeyError:\n raise NotImplementedError(\"No check for '{}'.\".format(name))\n else:\n return partial(self._base_check, func, on_true, on_false)", "def hgmwithamplifiedfilter_evaluation(input_generator,branches,iden_method,Plot,reference=None):\n input_signal = input_generator.GetOutput()\n filter_spec_tofind = nlsp.log_bpfilter(branches=branches,input=input_signal,amplify=True)\n ref_nlsystem = nlsp.HammersteinGroupModel_up(input_signal=input_signal,\n nonlinear_functions=nlsp.nl_branches(nlsp.function_factory.power_series,branches),\n filter_irs=filter_spec_tofind,\n max_harmonics=range(1,branches+1))\n\n found_filter_spec, nl_functions = iden_method(input_generator,ref_nlsystem.GetOutput(),branches)\n iden_nlsystem = nlsp.HammersteinGroupModel_up(input_signal=input_signal,\n nonlinear_functions=nl_functions,\n filter_irs=found_filter_spec,\n max_harmonics=range(1,branches+1))\n if reference is not None:\n reference = nlsp.change_length_signal(reference,length=len(input_signal))\n ref_nlsystem.SetInput(reference)\n iden_nlsystem.SetInput(reference)\n if Plot is True:\n plot.relabelandplot(sumpf.modules.FourierTransform(ref_nlsystem.GetOutput()).GetSpectrum(),\"Reference System\",show=False)\n plot.relabelandplot(sumpf.modules.FourierTransform(iden_nlsystem.GetOutput()).GetSpectrum(),\"Identified System\",show=True)\n print \"SNR between Reference and Identified output with differently amplified filters: %r\" %nlsp.snr(ref_nlsystem.GetOutput(),\n iden_nlsystem.GetOutput())", "def qc_prop_matching(self, rel_cols, label):\n\n cols = rel_cols[::]\n\n # create reduced copies of the dataframes for propensity score quality control\n qc_dfs = []\n for df in self:\n qc_dfs.append(df[cols])\n\n # exclude label if included into columns\n if label in cols:\n cols.remove(label)\n\n # construct formula\n formula = construct_formula(label, cols)\n\n # create Matcher\n m = Matcher(*qc_dfs, yvar=label, formula=formula)\n # train classifier to asses predictability\n m.fit_scores(balance=True, nmodels=10)\n # calculate and visualize propensity scores\n m.predict_scores()\n m.plot_scores()", "def calculate3(pred_ccm, pred_ad, truth_ccm, truth_ad, method=\"sym_pseudoV\", weights=None, verbose=False, pseudo_counts=True, full_matrix=True, in_mat=2):\n larger_is_worse_methods = ['sym_pseudoV_nc', 'sym_pseudoV', 'pseudoV_nc', 'pseudoV', \"simpleKL_nc\", 'simpleKL'] # methods where a larger score is worse\n\n \n pc_pred_ccm, pc_pred_ad, pc_truth_ccm, pc_truth_ad = pred_ccm, pred_ad, truth_ccm, truth_ad\n y = np.array(pc_pred_ad.shape)[1]\n nssms = int(np.ceil(0.5 * (2*y + 1) - 0.5 * np.sqrt(4*y + 1)))\n\n if isinstance(method, list):\n res = [calculate3_onemetric(pc_pred_ccm, pc_pred_ad, pc_truth_ccm, pc_truth_ad,\n method=m, verbose=verbose, in_mat=in_mat) for m in method] # calculate the score for each method\n\n # normalize the scores to be between (worst of NCluster score and OneCluster score) and (Truth score)\n ncluster_ccm, ncluster_ad = add_pseudo_counts(mb.get_ccm('NClusterOneLineage', nssms=nssms), mb.get_ad('NClusterOneLineage', nssms=nssms))\n ncluster_score = [calculate3_onemetric(ncluster_ccm, ncluster_ad, pc_truth_ccm, pc_truth_ad,\n method=m, verbose=verbose, full_matrix=full_matrix, in_mat=in_mat) for m in method]\n del ncluster_ccm, ncluster_ad\n onecluster_ccm, onecluster_ad = add_pseudo_counts(mb.get_ccm('OneCluster', nssms=nssms), mb.get_ad('OneCluster', nssms=nssms))\n onecluster_score = [calculate3_onemetric(onecluster_ccm, onecluster_ad, pc_truth_ccm, pc_truth_ad,\n method=m, verbose=verbose, full_matrix=full_matrix, in_mat=in_mat) for m in method]\n del onecluster_ccm, onecluster_ad\n for i in range(len(method)):\n if method[i] in larger_is_worse_methods: # normalization for methods where a larger score is worse\n worst_score = max(ncluster_score[i], onecluster_score[i]) # worst of NCluster and OneCluster scores\n res[i] = 1 - (res[i] / worst_score) # normalize the score\n else: # normalization for methods where a smaller score is worse\n worst_score = min(ncluster_score[i], onecluster_score[i])\n res[i] = (res[i] - worst_score) / (1 - worst_score)\n\n\n if weights is None: # if weights are not specified or if they cannot be normalized then default to equal weights\n weights = [1] * len(method)\n elif sum(weights) == 0:\n Warning('Weights sum to zero so they are invalid, defaulting to equal weights')\n weights = [1] * len(method)\n\n weights = np.array(weights) / float(sum(weights)) # normalize the weights\n score = sum(np.multiply(res, weights))\n else:\n \n score = calculate3_onemetric(pc_pred_ccm, pc_pred_ad, pc_truth_ccm, pc_truth_ad,\n method=method, verbose=verbose, full_matrix=full_matrix, in_mat=in_mat)\n del pc_pred_ccm\n del pc_pred_ad\n # normalize the score to be between (worst of NCluster score and OneCluster score) and (Truth score) - similar to above\n ncluster_ccm, ncluster_ad = add_pseudo_counts(mb.get_ccm('NClusterOneLineage', nssms=nssms), mb.get_ad('NClusterOneLineage', nssms=nssms))\n ncluster_score = calculate3_onemetric(ncluster_ccm, ncluster_ad, pc_truth_ccm, pc_truth_ad,\n method=method, verbose=verbose, full_matrix=full_matrix, in_mat=in_mat)\n del ncluster_ccm, ncluster_ad\n onecluster_ccm, onecluster_ad = add_pseudo_counts(mb.get_ccm('OneCluster', nssms=nssms), mb.get_ad('OneCluster', nssms=nssms))\n \n onecluster_score = calculate3_onemetric(onecluster_ccm, onecluster_ad, pc_truth_ccm, pc_truth_ad,\n method=method, verbose=verbose, full_matrix=full_matrix, in_mat=in_mat)\n del onecluster_ccm, onecluster_ad\n\n #print score, ncluster_score, onecluster_score\n if method in larger_is_worse_methods:\n worst_score = max(ncluster_score, onecluster_score)\n score = 1 - (score / worst_score)\n else:\n worst_score = min(ncluster_score, onecluster_score)\n score = (score - worst_score) / (1 - worst_score)\n return score", "def match_flav_(self, light, charm, bottom, flav):\n ret = deepcopy(light)\n is_c = (flav == 4)\n is_b = (flav == 5)\n ret[is_c] = charm[is_c]\n ret[is_b] = bottom[is_b]\n return ret", "def testMatchingMetrics(self):\n self.assertDictEqual(self.test_class._matching_metrics, {\n 'response': 1.0,\n 'spend': 0.0,\n })\n default_class = TrimmedMatchGeoXDesign(\n GeoXType.HEAVY_UP,\n pretest_data=self.test_data,\n time_window_for_design=self.design_window,\n time_window_for_eval=self.evaluation_window)\n self.assertDictEqual(default_class._matching_metrics, {\n 'response': 1.0,\n 'spend': 0.01,\n })", "def _default_eval_func(a, b):\n emphasis = \"accuracy\"\n a_value = getattr(a, emphasis)\n b_value = getattr(b, emphasis)\n return a_value > b_value", "def match(self) -> bool:", "def detectByRefAdv(this, **kargs):\n\t\t\n\t\t# Arguments\n\t\tseuil = kargs.get('seuil', 100)\n\t\tref = kargs.get('ref', this._REF)\n\t\tframe = kargs.get('frame', this._FRAME)\n\t\tcoef = kargs.get('coef', 1)\n\t\t\n\t\t# On fait la différence et on extrait les composantes RGB\n\t\tdiff = cv2.absdiff(frame, ref)\n\t\t\n\t\t# Zblah\n\t\tsat = diff.copy()\n\t\tweight = 1 + (cv2.cvtColor(ref, cv2.COLOR_BGR2GRAY) / 255.0) * coef\n\t\tsat[:,:,0] *= weight\n\t\tsat[:,:,1] *= weight\n\t\tsat[:,:,2] *= weight\n\t\t\n\t\t# Petit seuillage des familles\n\t\tthis._BINARY = delta = EmptyFrom(sat, 1)\n\t\tdelta[:,:] = ((sat[:,:,2] + sat[:,:,1] + sat[:,:,0]) > seuil) * 255\n\t\t\n\t\treturn pyon(\n\t\t\tAbsDiff = diff,\n\t\t\tWeight = weight % 1,\n\t\t\tWeighted = sat,\n\t\t\tThreshold = delta\n\t\t)", "def applies_to(self, instance):\n\n # traversing all feature-values of the instance\n for fv in instance:\n # checking if a feature value is in the rule\n if fv.feat in self.by_name:\n for rval in self.by_name[fv.feat]:\n # checking if the value in the rule is the same but\n # the literal in the rule is opposite,\n # or if the value is different in the rule\n if (rval[0] == fv.val and rval[1] == False) or (rval[0] != fv.val and rval[1] == True):\n return False\n\n # no failure indicates that the rule applies to this instance\n return True", "def __checkPropBonus(self, track):\n result = False\n if self.battle.getInteractivePropTrackBonus() == track:\n result = True\n return result", "def __init__(self, G, population, condition_axelrod, condition_centola):\n super(ExpandableAlgorithm, self).__init__(G, population)\n self._overlap_function = overlap_similarity\n self._post_args = None\n self.condition_axelrod = condition_axelrod\n self.condition_centola = condition_centola", "def iterate(self):\n ret = super(ExpandableAlgorithm, self).pre_iteration()\n if ret is None:\n return None\n active, passive, neighbors, features_active, features_passive = ret\n params = [features_active, features_passive]\n if self._post_args:\n params += self._post_args\n s = self._overlap_function(*params)\n if self.condition_axelrod:\n if self.__condition_axelrod(s, features_active, features_passive):\n return True\n if self.condition_centola:\n if self.__condition_centola(s, active, passive, neighbors):\n return True", "def goal_test(c):\n return c == GOAL_CUBE", "def test_205_boosted_goal_difference_for_home_models_with_thresholds(self):\n\n def create_model_fn(fn_team: str):\n team_stat = Stats.n_sample_stats_for_team(cursor=db_in_cursor,\n team=fn_team,\n last_sample_date=self.model_date,\n n_samples=self.num_samples,\n normalize_by_matches=True)\n\n\n return FeatureModel(\n input_data=[self.home_boost + team_stat.goal_diff, team_stat.goal_diff],\n id=team_stat.team_name\n )\n\n\n for match_date in played_home_OR_away_before_dates:\n ####\n #  Build model up to the day before the match\n ####\n self.home_boost = 0.72\n self.model_date = match_date - timedelta(days=1)\n self.num_samples = num_matches_in_season\n\n models: {str: FeatureModel} = FeatureModel.create_models_for_all_teams(\n model_making_fn=create_model_fn, entities=teams)\n\n self.persist_models(model_gen_date=self.model_date, model_description=self.shortDescription(), models=models)\n\n self.make_and_store_predictions_for_date(match_date=match_date, models=models, draw_range=(-0.792, 1.945))", "def _transform(df: DataFrame,\r\n prob_mod: mlc.Model,\r\n method: Optional[str],\r\n metric: Optional[str],\r\n match_kwargs: Optional[dict] = None) -> Tuple[DataFrame, dict]:\r\n\r\n # interpret input args:\r\n # only support quantile or assignment matching right now\r\n if match_kwargs is None:\r\n match_kwargs = {}\r\n\r\n logging.getLogger(__name__).info(\"method is {method}\".format(method=str(method)))\r\n\r\n if method is None:\r\n method = 'auto'\r\n logging.getLogger(__name__).info(\"assigning default arg 'auto'\")\r\n elif method not in ['assignment', 'quantile', 'auto']:\r\n logging.getLogger(__name__).critical(\"invalid method argument\")\r\n raise NotImplementedError(\"method {method} not implemented\".format(method=method))\r\n if method == 'auto':\r\n label_col = prob_mod.getOrDefault('labelCol')\r\n\r\n _persist_if_unpersisted(df)\r\n pos_count = df.where(F.col(label_col) == 1).count()\r\n neg_count = df.where(F.col(label_col) == 0).count()\r\n if ((pos_count**2)*neg_count) <= SMALL_MATCH_THRESHOLD:\r\n method = 'assignment'\r\n logging.getLogger(__name__).info(\"auto method is assignment\")\r\n else:\r\n method = 'quantile'\r\n logging.getLogger(__name__).info(\"auto method is quantile\")\r\n\r\n logging.getLogger(__name__).info(\"metric is {metric}\".format(metric=str(metric)))\r\n if metric is None:\r\n metric = 'probability'\r\n logging.getLogger(__name__).info(\"assigning default metric 'probability'\")\r\n elif metric not in ['probability']:\r\n logging.getLogger(__name__).critical(\"invalid metric argument\")\r\n raise NotImplementedError(\"metric {metric} not implemented\".format(metric=metric))\r\n\r\n # step 1 calculate match metric\r\n df, metric_col = _get_metric(df, prob_mod, metric)\r\n # step 2 match\r\n df, match_info = _match(df, prob_mod, method, metric_col, match_kwargs)\r\n\r\n return df, match_info", "def condensate_abovedew(Bg, Bgi, Gp, Gpi):\n Eg = Bg - Bgi\n F = Bg * (Gp - Gpi)\n return(F, Eg)", "def _build_filter(self, **kwargs):\n\n def object_filter(obj):\n for key, value in kwargs.items():\n # we replace dango-like lookup by dots, so attrgetter can do his job\n\n getter = utils.attrgetter(key)\n if hasattr(value, '__call__'):\n # User passed a callable for a custom comparison\n if not value(getter(obj)):\n return False\n else:\n if not getter(obj) == value:\n return False\n return True\n\n return object_filter", "def version_check(self):\n # anchor_matcher --> matcher\n if hasattr(self, \"anchor_matcher\"):\n self.matcher = self.anchor_matcher\n if hasattr(self, \"head_in_features\"):\n self.in_features = self.head_in_features\n if hasattr(self, \"test_topk_candidates\"):\n self.topk_candidates = self.test_topk_candidates\n if hasattr(self, \"test_score_thresh\"):\n self.score_threshold = self.test_score_thresh", "def compute_l_ratios(\n sorting,\n recording,\n num_channels_to_compare=LRatio.params['num_channels_to_compare'],\n max_spikes_per_cluster=LRatio.params['max_spikes_per_cluster'],\n pca_scores_params=get_pca_scores_params(),\n recording_params=get_recording_params(),\n epoch_params=get_epoch_params(),\n feature_params=get_feature_params(),\n save_as_property=True,\n seed=LRatio.params['seed'],\n verbose=LRatio.params['verbose'],\n unit_ids=None\n):\n rp_dict, ps_dict, ep_dict, fp_dict = update_param_dicts(recording_params=recording_params, \n pca_scores_params=pca_scores_params, \n epoch_params=epoch_params,\n feature_params=feature_params)\n if unit_ids is None:\n unit_ids = sorting.get_unit_ids()\n\n md = MetricData(\n sorting=sorting,\n sampling_frequency=recording.get_sampling_frequency(),\n recording=recording,\n apply_filter=rp_dict[\"apply_filter\"],\n freq_min=rp_dict[\"freq_min\"],\n freq_max=rp_dict[\"freq_max\"],\n unit_ids=unit_ids,\n epoch_tuples=ep_dict[\"epoch_tuples\"],\n epoch_names=ep_dict[\"epoch_names\"],\n verbose=verbose\n )\n\n md.compute_pca_scores(\n n_comp=ps_dict[\"n_comp\"],\n ms_before=ps_dict[\"ms_before\"],\n ms_after=ps_dict[\"ms_after\"],\n dtype=ps_dict[\"dtype\"],\n max_spikes_per_unit=fp_dict[\"max_spikes_per_unit\"],\n max_spikes_for_pca=ps_dict[\"max_spikes_for_pca\"],\n recompute_info=fp_dict['recompute_info'],\n save_features_props=fp_dict['save_features_props'],\n seed=seed,\n )\n\n l_ratio = LRatio(metric_data=md)\n l_ratio_epochs = l_ratio.compute_metric(num_channels_to_compare, max_spikes_per_cluster, seed, save_as_property)\n return l_ratio_epochs", "def match_tracks(\n self,\n others,\n subset=None,\n method=\"simple\",\n interpolate_to=\"other\",\n thresh_dist=250.0,\n time_frac=0.5,\n return_dist_matrix=False,\n beta=100.0,\n r_planet=EARTH_RADIUS,\n ):\n # Recursive call for each of the available categies\n if subset is None:\n if self.is_categorised:\n result = {}\n for subset_key in self.cat_labels:\n result[subset_key] = self.match_tracks(\n others,\n subset=subset_key,\n method=method,\n interpolate_to=interpolate_to,\n thresh_dist=thresh_dist,\n time_frac=time_frac,\n return_dist_matrix=return_dist_matrix,\n beta=beta,\n r_planet=r_planet,\n )\n return result\n else:\n subset = \"all\"\n\n # Select subset\n sub_gb = self[subset].gb\n if len(sub_gb) == 0 or len(others) == 0:\n return []\n if isinstance(others, list):\n # match against a list of DataFrames of tracks\n other_gb = pd.concat(\n [OctantTrack.from_df(df) for df in others],\n keys=range(len(others)),\n names=self._mux_names,\n ).gb\n elif isinstance(others, self.__class__):\n # match against another TrackRun\n other_gb = others[subset].gb\n else:\n raise ArgumentError('Argument \"others\" ' f\"has a wrong type: {type(others)}\")\n match_pairs = []\n if method == \"intersection\":\n for idx, ot in self._pbar(sub_gb): # , desc=\"self tracks\"):\n for other_idx, other_ot in self._pbar(other_gb, leave=False):\n times = other_ot.time.values\n time_match_thresh = time_frac * (times[-1] - times[0]) / HOUR\n\n intersect = pd.merge(other_ot, ot, how=\"inner\", left_on=\"time\", right_on=\"time\")\n n_match_times = intersect.shape[0]\n if n_match_times > 0:\n _tstep_h = intersect.time.diff().values[-1] / HOUR\n dist = intersect[[\"lon_x\", \"lon_y\", \"lat_x\", \"lat_y\"]].apply(\n lambda x: great_circle(*x.values, r_planet=r_planet), axis=1\n )\n prox_time = (dist < (thresh_dist * KM2M)).sum() * _tstep_h\n if (\n n_match_times * _tstep_h > time_match_thresh\n ) and prox_time > time_match_thresh:\n match_pairs.append((idx, other_idx))\n break\n\n elif method == \"simple\":\n # TODO: explain\n ll = [\"lon\", \"lat\"]\n match_pairs = []\n for other_idx, other_ct in self._pbar(other_gb): # , desc=\"other tracks\"):\n candidates = []\n for idx, ct in self._pbar(sub_gb, leave=False): # , desc=\"self tracks\"):\n if interpolate_to == \"other\":\n df1, df2 = ct.copy(), other_ct\n elif interpolate_to == \"self\":\n df1, df2 = other_ct, ct.copy()\n l_start = max(df1.time.values[0], df2.time.values[0])\n e_end = min(df1.time.values[-1], df2.time.values[-1])\n if (e_end - l_start) / HOUR > 0:\n # df1 = df1.set_index('time')[ll]\n # ts = pd.Series(index=df2.time)\n # new_df1 = (pd.concat([df1, ts]).sort_index()\n # .interpolate(method='values')\n # .loc[ts.index])[ll]\n tmp_df2 = pd.DataFrame(\n data={\"lon\": np.nan, \"lat\": np.nan, \"time\": df2.time}, index=df2.index\n )\n new_df1 = (\n pd.concat([df1[[*ll, \"time\"]], tmp_df2], ignore_index=True, keys=\"time\")\n .set_index(\"time\")\n .sort_index()\n .interpolate(method=\"values\")\n .loc[tmp_df2.time]\n )[ll]\n new_df1 = new_df1[~new_df1.lon.isnull()]\n\n # thr = (time_frac * 0.5\n # * (df2.time.values[-1] - df2.time.values[0]\n # + df1.time.values[-1] - df2.time.values[0]))\n thr = time_frac * df2.shape[0]\n dist_diff = np.full(new_df1.shape[0], FILLVAL)\n for i, ((x1, y1), (x2, y2)) in enumerate(\n zip(new_df1[ll].values, df2[ll].values)\n ):\n dist_diff[i] = great_circle(x1, x2, y1, y2, r_planet=r_planet)\n within_r_idx = dist_diff < (thresh_dist * KM2M)\n # if within_r_idx.any():\n # if (new_df1[within_r_idx].index[-1]\n # - new_df1[within_r_idx].index[0]) > thr:\n # candidates.append((idx, within_r_idx.sum()))\n if within_r_idx.sum() > thr:\n candidates.append((idx, within_r_idx.sum()))\n if len(candidates) > 0:\n candidates = sorted(candidates, key=lambda x: x[1])\n final_idx = candidates[-1][0]\n match_pairs.append((final_idx, other_idx))\n\n elif method == \"bs2000\":\n # sub_list = [i[0] for i in list(sub_gb)]\n sub_indices = list(sub_gb.indices.keys())\n other_indices = list(other_gb.indices.keys())\n dist_matrix = np.full((len(sub_gb), len(other_gb)), FILLVAL)\n for i, (_, ct) in enumerate(self._pbar(sub_gb, leave=False)): # , desc=\"self tracks\"):\n x1, y1, t1 = ct.coord_view\n for j, (_, other_ct) in enumerate(self._pbar(other_gb, leave=False)):\n x2, y2, t2 = other_ct.coord_view\n dist_matrix[i, j] = distance_metric(\n x1, y1, t1, x2, y2, t2, beta=float(beta), r_planet=r_planet\n )\n for i, idx1 in enumerate(np.nanargmin(dist_matrix, axis=0)):\n for j, idx2 in enumerate(np.nanargmin(dist_matrix, axis=1)):\n if i == idx2 and j == idx1:\n match_pairs.append((sub_indices[idx1], other_indices[idx2]))\n if return_dist_matrix:\n return match_pairs, dist_matrix\n else:\n raise ArgumentError(f\"Unknown method: {method}\")\n\n return match_pairs", "def hook(images, augmenter, parents, default):\n return augmenter.__class__.__name__ in MASK_AUGMENTERS", "def mutate(self, radiation): # currently works for FloatProperties only\n assert(radiation >= 0)\n\n props = properties(MushroomProps)\n nprops = len(props)\n\n nmutations = clip(round(random.gauss(nprops*radiation/200, math.sqrt(nprops))), 1, nprops)\n mutatingProps = random.sample(props.items(), nmutations)\n radiation /= math.sqrt(nmutations) # the more aspects change, the less each of them changes\n\n descendant = Mushroom.load(self)\n for name, params in mutatingProps:\n current = descendant.__getattribute__(name)\n if params[\"type\"] is bpy.props.BoolProperty:\n newVal = (not current) if radiation/100 < random.random() else current\n else:\n span = optionalKey(params, \"soft_max\", optionalKey(params, \"max\")) - optionalKey(params, \"soft_min\", optionalKey(params, \"min\"))\n span *= radiation/100 # percent to factor\n\n def fuzzyClamp(val, curr): # clamp that allows flowing over soft min/max with some probability\n val = propClamp(val, params)\n # if val exceeds the soft bounds, chances decrease to go further away\n if optionalKey(params, \"soft_min\") is not None and val < params[\"soft_min\"] and val < curr:\n if random.random() < span/(span + params[\"soft_min\"] - val):\n return val\n return curr if curr < params[\"soft_min\"] else params[\"soft_min\"]\n if optionalKey(params, \"soft_max\") is not None and val > params[\"soft_max\"] and val > curr:\n if random.random() < span/(span + val - params[\"soft_max\"]):\n return val\n return curr if curr > params[\"soft_max\"] else params[\"soft_max\"]\n return val\n\n if params[\"type\"] is bpy.props.FloatProperty:\n newVal = fuzzyClamp(random.gauss(current, span), current)\n if params[\"type\"] is bpy.props.FloatVectorProperty:\n i = random.randrange(params[\"size\"]) # evolve only one entry of the vector\n newVal = current.copy()\n newVal[i] = fuzzyClamp(random.gauss(current[i], span), current[i])\n #TODO handle other property types\n descendant.__setattr__(name, newVal)\n\n return descendant", "def rules(self):\n self.rule1 = min(self.location_is_lessDemand, self.area_is_small, self.unfunishing)\n self.rule2 = min(self.location_is_lessDemand, max(self.area_is_small, self.area_is_average), self.access_is_good)\n self.rule3 = min(self.location_is_veryHighDemand, self.area_is_average, self.fac_is_low, self.access_is_average)\n self.rule4 = min(self.location_is_veryLessDemand, self.area_is_verysmall, self.fully_funishing)\n self.rule5 = min(self.location_is_lessDemand, self.fac_is_average, max(self.area_is_small, self.area_is_average))\n self.rule6 = min(max(self.location_is_lessDemand, self.location_is_averageDemand), self.access_is_good)\n self.rule7 = min(self.location_is_lessDemand, self.access_is_good, self.area_is_large, self.partially_funishing)\n self.rule8 = min(self.location_is_highDemand, self.access_is_good, max(self.bed_is_less, self.bath_is_average))\n self.rule9 = min(self.location_is_veryHighDemand, self.area_is_large, self.unfunishing)\n self.rule10 = min(self.access_is_good, self.area_is_average, (1 - self.unfunishing))\n self.rule11 = min(self.access_is_good, self.area_is_large, self.partially_funishing, self.bed_is_more, self.bath_is_more)", "def _init_is_better(self, mode: str, threshold: float, threshold_mode: str) -> None:\n if mode not in {\"min\", \"max\"}:\n raise ValueError(\"mode \" + mode + \" is unknown!\")\n if threshold_mode not in {\"rel\", \"abs\"}:\n raise ValueError(\"threshold mode \" + threshold_mode + \" is unknown!\")\n\n if mode == \"min\":\n self.mode_worse = math.inf\n else: # mode == 'max':\n self.mode_worse = -math.inf\n\n self.mode = mode\n self.threshold = threshold\n self.threshold_mode = threshold_mode", "def fuzzy_match(self, other):\n magic, fuzzy = False, False\n try:\n magic = self.alias == other.magic\n except AttributeError:\n pass\n\n if '.' in self.alias:\n major = self.alias.split('.')[0]\n fuzzy = major == other.alias \n return magic or fuzzy", "def _resolve_objective_function(self) -> Scorer:\n\n objective = self.cfg_.objective\n if objective == 'accuracy':\n return make_scorer(ex.accuracy_score_round_inputs)\n if objective.startswith('precision'):\n if objective.endswith('macro'):\n return make_scorer(ex.precision_score_round_inputs,\n average='macro')\n elif objective.endswith('weighted'):\n return make_scorer(ex.precision_score_round_inputs,\n average='weighted')\n if objective.startswith('f1'):\n if objective.endswith('macro'):\n return make_scorer(ex.f1_score_round_inputs,\n average='macro')\n elif objective.endswith('weighted'):\n return make_scorer(ex.f1_score_round_inputs,\n average='weighted')\n elif objective.endswith('least_frequent'):\n return make_scorer(ex.f1_score_least_frequent_round_inputs)\n if objective == 'pearson_r':\n return make_scorer(pearson)\n if objective == 'spearman':\n return make_scorer(spearman)\n if objective == 'kendall_tau':\n return make_scorer(kendall_tau)\n if objective.startswith('uwk'):\n if objective == 'uwk':\n return make_scorer(ex.kappa_round_inputs)\n return make_scorer(ex.kappa_round_inputs,\n allow_off_by_one=True)\n if objective.startswith('lwk'):\n if objective == 'lwk':\n return make_scorer(ex.kappa_round_inputs,\n weights='linear')\n return make_scorer(ex.kappa_round_inputs,\n weights='linear',\n allow_off_by_one=True)\n if objective.startswith('qwk'):\n if objective == 'qwk':\n return make_scorer(ex.kappa_round_inputs,\n weights='quadratic')\n return make_scorer(ex.kappa_round_inputs,\n weights='quadratic',\n allow_off_by_one=True)\n return objective", "def get_bprop_less_equal(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def patternMatchingTest(matchingAlgorithm,\n observations= defaultCOMPREFdump , \n modelsFolder= defaultWRFdumpsFolder,\n #key1='0828',\n key1='',\n key2='',\n #key3='',\n verbose=True,\n makeSmallImages=True,\n smallDpi=40,\n frameDpi = 600 , # added 2013-09-23\n loadFormerAnalysisResult=False, #added 2013-09-23\n #summary_folder=summary_folder, # needed if loadFormerAnalysisResult is True\n summary_folder=summary_folder+timeString + '/', # 2013-09-27\n matching_algorithm_name=\"\", # needed if loadFormerAnalysisResult is True\n panel_cols = 5,\n panel_rows = 5,\n useCV2 = False, # switch off opencv since it doesn't work yet\n useCV = False,\n averageScoreOrdering=True, # take an average of the ordering to get a total score for the entire model over the time series\n #kwargs = {}, # keyword arguments for the matchinging algorithm, just in case\n **kwargs\n ):\n if key1 != '' or key2 != '':\n summary_folder += '_'.join([key1, key2])\n if not averageScoreOrdering:\n summary_folder += '_nonaver'\n summary_folder += '%dx%d/' % (panel_rows, panel_cols)\n\n t0 = time.time()\n if loadFormerAnalysisResult:\n mv_folder = summary_folder + matching_algorithm_name + '/'\n result = pickle.load(open(mv_folder + 'result.pydump', 'r'))\n\n \"\"\"\n result= {'best matches' : bestMatchAtEachTime, \n 'best overall match' : bestMatchOverall, \n 'all scores' : scores,\n 'matching algorithm' : matching_algorithm_name,\n 'dataset1[obs]' : observations,\n 'dataset2[models]' : modelsFolder,\n 'time spent' : time.time() - t0,\n 'mv folder' : mv_folder, #added 2013-09-23\n 'ordering' : ordering, #added 2013-09-23\n }\n \"\"\"\n bestMatchAtEachTime = result['best matches'] \n bestMatchOverall = result['best overall match']\n scores = result['all scores']\n matching_algorithm_name = result['matching algorithm']\n observations = result['dataset1[obs]']\n modelsFolder = result['dataset2[models]']\n #mv_folder = result['mv folder']\n ordering = result['ordering']\n\n # THE FOLLOWING 30 LINES ARE COPIED FROM \"ELSE...\" BELOW\n ########### ******\n ##\n #\n ds1 = pickle.load(open(observations,'r'))\n ds1.setFloor(0) # added 2013-09-27\n ds1.list = [v for v in ds1.list if (key1 in v.dataTime or key1 in v.name) \\\n and (key2 in v.dataTime or key2 in v.name)]\n ds1.list.sort(key=lambda v:v.dataTime)\n ds1.setVmin(-40) #line added 2013-09-23\n # debug\n #print ds1.list\n print 'loading list with keywords [ %s %s ]' %(key1, key2) \n ds1.load(verbose=verbose)\n # end debug\n modelsList = [v for v in os.listdir(modelsFolder) if '.pydump' in v]\n modelsList.sort(key = lambda v: int(re.findall(r'\\d+',v)[0]))\n print \"modelsList\", modelsList\n scores = {} # scores = {model number: [('dataTime', score), ('dataTime', score),...], ...}\n print 'loading ds2 (model data)'\n wrfs = [] # line added 2013-09-23\n for path in modelsList:\n modelIndex = int(re.findall(r'\\d+', path)[0])\n ds2 = pickle.load(open(modelsFolder+path, 'r'))\n ds2.setFloor(0) # added 2013-09-27\n # debug\n #ds2.list = [v for v in ds2.list if key1 in v.dataTime] # line redundant, 2013-09-23\n ds1.list, ds2.list = ds1.intersect(ds2) #line added 2013-09-23 \n ds2.setVmin(-40) #line added 2013-09-23\n wrfs.append(ds2) # line added 2013-09-23\n # end debug\n\n ####################\n\n\n else:\n # THE FOLLOWING 30 LINES ARE COPIED TO \"IF...\" ABOVE\n ########### ******\n ##\n #\n ds1 = pickle.load(open(observations,'r'))\n ds1.list = [v for v in ds1.list if (key1 in v.dataTime or key1 in v.name) \\\n and (key2 in v.dataTime or key2 in v.name)]\n ds1.list.sort(key=lambda v:v.dataTime)\n ds1.setVmin(-40) #line added 2013-09-23\n # debug\n #print ds1.list\n print 'loading list with keywords [ %s %s ]' %(key1, key2) \n ds1.load(verbose=verbose)\n ds1.setFloor(0) # added 2013-09-27\n # end debug\n modelsList = [v for v in os.listdir(modelsFolder) if '.pydump' in v]\n modelsList.sort(key = lambda v: int(re.findall(r'\\d+',v)[0]))\n print \"modelsList\", modelsList\n scores = {} # scores = {model number: [('dataTime', score), ('dataTime', score),...], ...}\n print 'loading ds2 (model data)'\n wrfs = [] # line added 2013-09-23\n ds1.commonMaskSet=False # tempory attribute added 2013-09-27\n for path in modelsList:\n modelIndex = int(re.findall(r'\\d+', path)[0])\n ds2 = pickle.load(open(modelsFolder+path, 'r'))\n # debug\n #ds2.list = [v for v in ds2.list if key1 in v.dataTime] # line redundant, 2013-09-23\n ds1.list, ds2.list = ds1.intersect(ds2) #line added 2013-09-23 \n ds2.setFloor(0) # added 2013-09-27\n ds2[0].matrix.mask += ds1[0].matrix.mask # added 2013-09-27\n ds2.setCommonMask() # added 2013-09-27\n if not ds1.commonMaskSet: # added 2013-09-27\n ds1[0].matrix.mask +=ds2[0].matrix.mask # added 2013-09-27\n ds1.setCommonMask() # added 2013-09-27\n ds2.setVmin(-40) #line added 2013-09-23\n wrfs.append(ds2) # line added 2013-09-23\n # end debug\n\n #######################################################################################################################\n # key lines\n print 'computing scores between %s and %s ...' %(ds1.name, ds2.name)\n s, matching_algorithm_name = matchingAlgorithm(ds1, ds2, **kwargs) # list of the form [('20120612.0000', 0.9),...] \n scores[modelIndex] = dict(s) # { model index: {'dataTime': score,...}, ...}\n # debug\n if verbose:\n print scores\n print '..................................................'\n #\n ########################################################################################################################\n\n print 'sorting the results...'\n #print 'scores', '\\n', scores #debug\n # bestMatchAtEachTime\n T1 = [v.dataTime for v in ds1]\n T2 = [v.dataTime for v in ds2] #any ds2 will do - they are the same, in this case we pick the last one \n T = set(T1).intersection(T2)\n dataTimeList = sorted(list(T))\n\n #print 'dataTimeList:', dataTimeList #debug\n highestScores = []\n ordering = [] #added 2013-09-23\n\n if averageScoreOrdering: # take the average scores before ordering to make it uniform\n ave_scores = [ (v, np.mean( [scores[v].values() ] ) ) for v in scores.keys()]\n ave_scores = dict(ave_scores)\n print '..taking averages...............................'\n print ave_scores\n for u in scores.keys():\n for t in scores[u].keys():\n scores[u][t] = ave_scores[u]\n print '\\n'.join([str(scores[v]) for v in scores.keys()])\n\n for t in dataTimeList:\n print t\n highestScoreAt_t = max([scores[u][t] for u in scores.keys()]) # u = model index (integer 1-20), t = dataTime (string)\n new_entry = (t, highestScoreAt_t, [u for u in scores.keys() if scores[u][t]==highestScoreAt_t])\n print new_entry\n highestScores.append(new_entry)\n #added 2013-09-23\n ordering.append([0]+sorted(range(1, len(modelsList)+1), key=lambda v: scores[v][t], reverse=True))\n\n\n\n\n # bestMatchOverall\n print scores # debug\n averageScores = [(Model, np.mean(scores[Model].values())) for Model in scores.keys()] # list of [(Model, mean score),...]\n highestScore = max([v[1] for v in averageScores])\n bestMatch = [u[0] for u in averageScores if u[1]==highestScore] \n # debug\n print 'average scores , highest average, best match', averageScores, highestScore, bestMatch\n #xx= raw_input('press enter') \n\n\n bestMatchAtEachTime = highestScores\n bestMatchOverall = (highestScore, bestMatch)\n\n # the following lines do not work well since oftentimes we have multiple pointers (names) to a single object/function\n # and so they are replaced by returning the function name in the function itself\n #allvars = vars()\n #matching_algorithm_name = [v for v in allvars if allvars[v]==matchingAlgorithm][0]\n #mv_folder = summary_folder + matching_algorithm_name + '/'\n mv_folder = summary_folder + matching_algorithm_name + '/' # added 2013-09-27\n\n result= {'best matches' : bestMatchAtEachTime, \n 'best overall match' : bestMatchOverall, \n 'all scores' : scores,\n 'matching algorithm' : matching_algorithm_name,\n 'dataset1[obs]' : observations,\n 'dataset2[models]' : modelsFolder,\n 'time spent' : time.time() - t0,\n 'mv folder' : mv_folder, #added 2013-09-23\n 'ordering' : ordering, #added 2013-09-23\n }\n\n try:\n os.makedirs(mv_folder)\n print 'folder', mv_folder, \"created!\"\n except:\n print 'folder', mv_folder, 'exists!'\n pickle.dump(result, open(mv_folder+'result.pydump','w'))\n\n ############################################################################\n # to make the videos/collected images\n # calling armor.dataStreamTools.makeVideo.makeVideo()\n # 2013-09-23\n\n from . import makeVideo as mv\n \n try:\n os.makedirs(mv_folder)\n print 'folder', mv_folder, \"created!\"\n except:\n print 'folder', mv_folder, 'exists!'\n # make images\n if makeSmallImages:\n print 'making small images'\n for ds in [ds1] + wrfs:\n #ds.imageFolder= mv_folder + ds.name + '/'\n ds.imageFolder= summary_root + ds.name + '/' #added 2013-09-27 to replace the above line - too tired waiting the plots\n ds.saveImages(drawCoast=True, flipud=True, dpi=smallDpi)\n print ds.imageFolder\n else:\n # just set the paths\n for ds in [ds1]+wrfs:\n ds.imageFolder = summary_root + ds.name +'/' # /home/k/ARMOR/data/KONG-REY/summary/WRF01.DBZ/\n for dbzPattern in ds.list:\n dbzPattern.imagePath = ds.imageFolder + ds.name + dbzPattern.dataTime + '.png'\n print dbzPattern.imagePath\n #ds.setImagePaths() # doesn't for for old pickled objects???\n\n\n # make panels\n\n mv.makeVideo(DSS = [ds1]+ wrfs, # [ds0, ds1, ds2, ds3, ds4, ...], a list of armor.pattern.DBZstream objects\n panel_cols = panel_cols, # number of colums in the panel\n panel_rows = panel_rows, # no need to be filled\n #fourcc = cv.CV_FOURCC('F', 'L', 'V', '1'),\n #fps = defaultFps,\n extension= '.avi',\n #fourcc = cv.CV_FOURCC('P', 'I', 'M', '1'),\n outputFileName =\"\",\n outputFolder= mv_folder,\n sandbox = mv_folder ,\n saveFrames = True, # saving the frames as images\n frameDpi = frameDpi,\n useCV2 = useCV2,\n useCV = useCV,\n ordering = ordering, # ordering of the models\n )\n ############################################################################\n # dumping the results\n try:\n os.makedirs(summary_folder+ 'pydumps/')\n except:\n print 'Folder exists! -- ', summary_folder+ 'pydumps/' \n\n pickle.dump(result, open(summary_folder+ 'pydumps/results_' + matching_algorithm_name+ '+' +\\\n key1 + '+' + key2 + str(int(time.time())) + '.pydump', 'w'))\n\n \n\n print \"time spent:\", time.time()-t0\n return result", "def _update(self, other):\n # NOTE: detail map properties should NEVER be overridden. NEVER. EVER. kthx.\n if other.use_alpha:\n self.use_alpha = True\n if other.mipmap:\n self.mipmap = True", "def __advanced_adaptive_comp_theorem(self):\n epsilon_history, delta_history = \\\n zip(*self._private_data_epsilon_delta_access_history)\n global_epsilon, global_delta = self._epsilon_delta\n\n delta_sum = sum(delta_history)\n epsilon_squared_sum = sum(epsilon ** 2 for epsilon in epsilon_history)\n\n first_fraction = global_epsilon ** 2 / (28.04 * log(1 / global_delta))\n\n first_sum_epsilon = sum(eps * (exp(eps) - 1) * 0.5\n for eps in epsilon_history)\n first_parentheses = epsilon_squared_sum + first_fraction\n second_parentheses = 2 + log(epsilon_squared_sum / first_fraction + 1)\n last_factor = log(2 / global_delta)\n\n privacy_loss_k = first_sum_epsilon + \\\n sqrt(first_parentheses * second_parentheses * last_factor)\n\n return privacy_loss_k > global_epsilon or \\\n delta_sum > (global_delta * 0.5)", "def computeMatchInfo( df, inpCol, predCol, true_label=\"label\", method=\"---\", seuil=0. ) :\n df_data = predict( df, inpCol, predCol, seuil=seuil )\n df_ca = computeClassAtt( df_data[true_label], df_data[predCol], method )\n df_ca[\"threshold\"] = seuil\n\n return df_data, df_ca", "def get_bprop_greater_equal(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def match(self, other):", "def set_weighting_method(args):\n weighting_methods = {\n \"COV\": \"use_cov\",\n }\n weighting_to_use = {}\n complete = False\n arguments = []\n if args.use_weighting:\n arguments = [arg.strip() for arg in args.use_weighting.split(',')]\n for method in weighting_methods.keys():\n if not complete and method in arguments:\n weighting_to_use.update({weighting_methods[method]: True})\n complete = True\n else:\n weighting_to_use.update({weighting_methods[method]: False})\n\n return argparse.Namespace(**weighting_to_use)", "def __call__(cls, image):\n\n logger.info('Gain Correcting Image')\n\n saturate = 0.\n gains = []\n for amp in decaminfo.amps:\n sec = section2slice(image['DATASEC' + amp])\n gain = image['GAIN' + amp]\n gains.append(gain)\n image.data[sec] *= gain\n\n # Adjust the weight or variance image if present:\n if image.weight is not None:\n image.weight[sec] *= 1. / (gain * gain)\n if image.variance is not None:\n image.variance[sec] *= gain * gain\n\n # Adjust keywords\n image['GAIN' + amp] = image['GAIN' + amp] / gain\n image['SATURAT' + amp] = image['SATURAT' + amp] * gain\n saturate = max(saturate, image['SATURAT' + amp])\n # Scale the SKYVAR if it's already here\n kw = 'SKYVAR' + amp\n if kw in image.header.keys():\n image[kw] = image[kw] * gain * gain\n # The FLATMED will keep track of rescalings *after* gain:\n image['FLATMED' + amp] = 1.\n\n # The SATURATE keyword is assigned to maximum of the two amps.\n image['SATURATE'] = saturate\n\n # Some other keywords that we will adjust crudely with mean gain\n # if they are present:\n gain = np.mean(gains)\n for kw in ('SKYBRITE', 'SKYSIGMA'):\n if kw in image.header.keys():\n image[kw] = image[kw] * gain\n\n # One other keyword to adjust:\n image['BUNIT'] = 'electrons'\n\n logger.debug('Finished applying Gain Correction')\n ret_code = 0\n return ret_code", "def __getattr__( self, attrName ):\r\n if attrName!=attrName.lower() and attrName!=\"caseSensitive\" and not self.caseSensitive and \\\r\n (attrName.startswith(\"start_\") or attrName.startswith(\"end_\")):\r\n return getattr(self,attrName.lower())\r\n raise AttributeError, attrName", "def get_over(self, filter_dict, percentage):\n pass", "def attrNavigationControlGrp(*args, adjustableColumn: int=0, adjustableColumn2: int=0,\n adjustableColumn3: int=0, adjustableColumn4: int=0,\n adjustableColumn5: int=0, adjustableColumn6: int=0, annotation:\n Union[AnyStr, bool]=\"\", attrNavDecision: Union[List[name, AnyStr],\n bool]=None, attribute: Union[name, bool]=None, backgroundColor:\n Union[List[float, float, float], bool]=None, columnAlign:\n Union[List[int, AnyStr], List[List[int, AnyStr]]]=None,\n columnAlign2: List[AnyStr, AnyStr]=None, columnAlign3: List[AnyStr,\n AnyStr, AnyStr]=None, columnAlign4: List[AnyStr, AnyStr, AnyStr,\n AnyStr]=None, columnAlign5: List[AnyStr, AnyStr, AnyStr, AnyStr,\n AnyStr]=None, columnAlign6: List[AnyStr, AnyStr, AnyStr, AnyStr,\n AnyStr, AnyStr]=None, columnAttach: Union[List[int, AnyStr, int],\n List[List[int, AnyStr, int]]]=None, columnAttach2: List[AnyStr,\n AnyStr]=None, columnAttach3: List[AnyStr, AnyStr, AnyStr]=None,\n columnAttach4: List[AnyStr, AnyStr, AnyStr, AnyStr]=None,\n columnAttach5: List[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]=None,\n columnAttach6: List[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr,\n AnyStr]=None, columnOffset2: List[int, int]=None, columnOffset3:\n List[int, int, int]=None, columnOffset4: List[int, int, int,\n int]=None, columnOffset5: List[int, int, int, int, int]=None,\n columnOffset6: List[int, int, int, int, int, int]=None,\n columnWidth: Union[List[int, int], List[List[int, int]]]=None,\n columnWidth1: int=0, columnWidth2: List[int, int]=None,\n columnWidth3: List[int, int, int]=None, columnWidth4: List[int,\n int, int, int]=None, columnWidth5: List[int, int, int, int,\n int]=None, columnWidth6: List[int, int, int, int, int, int]=None,\n connectAttrToDropped: Union[Script, bool]=None,\n connectNodeToDropped: Union[Script, bool]=None, connectToExisting:\n Union[Script, bool]=None, createNew: Union[Script, bool]=None,\n defaultTraversal: Union[Script, bool]=None, defineTemplate:\n AnyStr=\"\", delete: Union[AnyStr, bool]=\"\", disconnect:\n Union[Script, bool]=None, docTag: Union[AnyStr, bool]=\"\",\n dragCallback: Script=None, dropCallback: Script=None, enable:\n bool=True, enableBackground: bool=True, enableKeyboardFocus:\n bool=True, exists: bool=True, extraButton: bool=True,\n extraButtonCommand: Script=None, extraButtonIcon: Union[AnyStr,\n bool]=\"\", fullPathName: bool=True, height: Union[int, bool]=0,\n highlightColor: Union[List[float, float, float], bool]=None,\n ignore: Union[Script, bool]=None, ignoreNotSupported: bool=True,\n isObscured: bool=True, label: Union[AnyStr, bool]=\"\", manage:\n bool=True, noBackground: bool=True, noIgnorableMenu: bool=True,\n noKeyableMenu: bool=True, numberOfPopupMenus: bool=True, parent:\n Union[AnyStr, bool]=\"\", popupMenuArray: bool=True,\n preventOverride: bool=True, relatedNodes: Union[Script, bool]=None,\n rowAttach: Union[List[int, AnyStr, int], List[List[int, AnyStr,\n int]]]=None, statusBarMessage: AnyStr=\"\", unignore: Union[Script,\n bool]=None, useTemplate: AnyStr=\"\", visible: bool=True,\n visibleChangeCommand: Union[Script, bool]=None, width: Union[int,\n bool]=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def get_bprop_equal(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n\n return bprop", "def _default_eval_func(a, b):\n emphasis = \"r2\"\n a_value = getattr(a, emphasis)\n b_value = getattr(b, emphasis)\n return a_value > b_value", "def hook(images, augmenter, parents, default):\n return (augmenter.__class__.__name__ in MASK_AUGMENTERS)", "def apply_algorithm(self):\n pass", "def virtual_method(self, k):\n self.gk = [norm_2(self.quaternion[k])]\n self.g_mink = [1.]\n self.g_maxk = [1.]", "def compute_rule(rule, profile, weights=None, missing_rule=None):\n if rule == \"per_quota\" or rule == \"per_quota_min\" \\\n or rule == \"per_quota_mod\":\n voters = weights[0].keys()\n else:\n voters = weights.keys()\n if missing_rule == \"empty\":\n profile = copy.deepcopy(profile)\n for voter in voters:\n if voter not in profile.voters:\n profile.voters.append(voter)\n profile.approval_sets[voter] = []\n elif missing_rule == \"all\":\n profile = copy.deepcopy(profile)\n for voter in voters:\n if voter not in profile.voters:\n profile.voters.append(voter)\n profile.approval_sets[voter] = list(profile.cands)\n\n elif missing_rule == \"ignore\":\n pass\n else:\n for voter in voters:\n if voter not in profile.voters:\n raise Exception(\"Missing voter\")\n if profile.has_empty_sets():\n raise Exception(\"Voters with empty approval sets\")\n\n if rule == \"per_pav\":\n return per_pav(profile, weights)\n elif rule == \"per_consensus\":\n return per_consensus(profile, weights)\n elif rule == \"per_unitcost\":\n return per_unitcost(profile, weights)\n elif rule == \"per_reset\":\n return per_reset(profile, weights)\n elif rule == \"per_nash\":\n return per_nash(profile, weights)\n elif rule == \"per_equality\":\n return per_equality(profile, weights)\n elif rule == \"av\":\n return av(profile)\n elif rule == \"per_phragmen\":\n return per_phragmen(profile, weights)\n elif rule == \"per_quota\":\n return per_quota(profile, weights)\n elif rule == \"per_quota_mod\":\n return per_quota_mod(profile, weights)\n elif rule == \"per_quota_min\":\n return per_quota_min(profile, weights)\n elif rule == \"random_dictatorship\":\n return random_dictatorship(profile)\n elif rule == \"random_serial_dictatorship\":\n return random_serial_dictatorship(profile)\n elif rule == \"per_majority\":\n return per_majority(profile, weights)\n elif rule == \"per_2nd_prize\":\n return per_2nd_prize(profile, weights)\n elif rule == \"rotating_dictatorship\":\n return rotating_dictatorship(profile, weights)\n elif rule == \"rotating_serial_dictatorship\":\n return rotating_serial_dictatorship(profile, weights)\n elif rule == \"per_minmax_dryspell\":\n return per_minmax_dryspell(profile, weights)\n else:\n raise NotImplementedError(\"rule \" + str(rule) + \" unknown\")" ]
[ "0.52170765", "0.51648027", "0.49750897", "0.48653188", "0.48162937", "0.48143739", "0.47746363", "0.4698766", "0.46927482", "0.46442884", "0.46392474", "0.46255955", "0.46191144", "0.45699182", "0.45459855", "0.4538789", "0.45209238", "0.45171022", "0.45171022", "0.44952714", "0.4466709", "0.44502747", "0.44156227", "0.43906692", "0.43858176", "0.43601447", "0.43601447", "0.43601447", "0.43601447", "0.4357063", "0.43566504", "0.434893", "0.4342156", "0.43402815", "0.43334243", "0.43133068", "0.43090996", "0.42960092", "0.42926893", "0.42908016", "0.42894748", "0.42871803", "0.42840385", "0.42750037", "0.42714792", "0.4270969", "0.42667392", "0.42614657", "0.42583197", "0.4257132", "0.4250311", "0.4237513", "0.42370874", "0.42354187", "0.42324558", "0.4218877", "0.4218595", "0.42156717", "0.42114985", "0.42082074", "0.4198353", "0.4197008", "0.41969028", "0.41957912", "0.4192442", "0.41887146", "0.41779968", "0.41778454", "0.41771197", "0.41759846", "0.41721705", "0.4169924", "0.41627303", "0.41578096", "0.41545352", "0.41541585", "0.4150862", "0.41471595", "0.41441426", "0.4143967", "0.41430956", "0.41396648", "0.41394088", "0.41328347", "0.41285804", "0.41269964", "0.4125143", "0.4123102", "0.41212088", "0.41174543", "0.4117266", "0.41136378", "0.41086188", "0.41078407", "0.4104139", "0.41008332", "0.41006544", "0.40981102", "0.40940604", "0.40909678" ]
0.49315447
3
Method computes lookup tables of the cumulative ``galprop`` PDF defined by ``input_galaxy_table``.
def build_one_point_lookup_table(self, **kwargs): galaxy_table = kwargs['input_galaxy_table'] prim_galprop_bins = kwargs['prim_galprop_bins'] self.one_point_lookup_table = np.zeros( len(prim_galprop_bins)+1, dtype=object) binned_prim_galprop = np.digitize( galaxy_table[self.prim_galprop_key], self.prim_galprop_bins) for i in range(len(self.one_point_lookup_table)): idx_bini = np.where(binned_prim_galprop == i)[0] if model_helpers.custom_len(idx_bini) > self.minimum_sampling: gals_bini = galaxy_table[idx_bini] abcissa = np.arange(len(gals_bini))/float(len(gals_bini)-1) ordinates = np.sort(gals_bini[self.galprop_key]) self.one_point_lookup_table[i] = ( model_helpers.custom_spline(abcissa, ordinates, k=2) ) # For all empty lookup tables, fill them with the nearest lookup table unfilled_lookup_table_idx = np.where( self.one_point_lookup_table == 0)[0] filled_lookup_table_idx = np.where( self.one_point_lookup_table != 0)[0] if len(unfilled_lookup_table_idx) > 0: msg = ("When building the one-point lookup table from input_galaxy_table, " + "there were some bins of prim_galprop_bins that contained fewer than " + str(self.minimum_sampling)+ " galaxies. In such cases, the lookup table " + "of the nearest sufficiently populated bin will be chosen.") warn(msg) for idx in unfilled_lookup_table_idx: closest_filled_idx_idx = array_utils.find_idx_nearest_val( filled_lookup_table_idx, idx) closest_filled_idx = filled_lookup_table_idx[closest_filled_idx_idx] self.one_point_lookup_table[idx] = ( self.one_point_lookup_table[closest_filled_idx])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _mc_galprop(self, seed=None, **kwargs):\n model_helpers.update_param_dict(self, **kwargs)\n self._set_correlation_strength()\n\n if ('galaxy_table' in kwargs.keys()) & ('halos' in kwargs.keys()):\n msg = (\"The mc_\"+self.galprop_key+\" method accepts either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\" + \n \" but never both.\")\n raise KeyError(msg)\n elif 'galaxy_table' in kwargs.keys():\n galaxy_table = kwargs['galaxy_table']\n operative_sec_haloprop_key = (\n model_defaults.host_haloprop_prefix + self.sec_haloprop_key)\n elif 'halos' in kwargs.keys():\n galaxy_table = kwargs['halos']\n operative_sec_haloprop_key = self.sec_haloprop_key\n else:\n msg = (\"The mc_\"+self.galprop_key+\" requires either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\")\n raise KeyError(msg)\n\n self.add_new_haloprops(galaxy_table)\n\n # All at once, draw all the randoms we will need\n np.random.seed(seed=seed)\n all_randoms = np.random.random(len(galaxy_table)*2)\n galprop_cumprob = all_randoms[0:len(galaxy_table)]\n galprop_scatter = all_randoms[len(galaxy_table):]\n\n # Initialize the output array\n output_galprop = np.zeros(len(galaxy_table))\n\n # Determine binning and loop range\n if 'galaxy_table_slice_array' not in kwargs.keys():\n binned_prim_galprop = np.digitize(\n galaxy_table[self.prim_galprop_key], \n self.prim_galprop_bins)\n prim_galprop_loop_range = set(binned_prim_galprop)\n else:\n prim_galprop_loop_range = range(len(self.one_point_lookup_table))\n\n for i in prim_galprop_loop_range:\n\n # Determine the slice corresponding to the i^th prim_galprop bin\n if 'galaxy_table_slice_array' not in kwargs.keys():\n idx_bini = np.where(binned_prim_galprop==i)[0]\n num_bini = len(idx_bini)\n else:\n idx_bini = kwargs['galaxy_table_slice_array'][i]\n num_bini = len(galaxy_table[idx_bini])\n\n if len(idx_bini) > 0:\n # Fetch the appropriate number of randoms\n # for the i^th prim_galprop bin\n galprop_cumprob_bini = galprop_cumprob[idx_bini]\n galprop_scatter_bini = galprop_scatter[idx_bini]\n\n # Fetch the halos in the i^th prim_galprop bin, \n # and determine how they are sorted\n haloprop_bini = galaxy_table[idx_bini][operative_sec_haloprop_key]\n idx_sorted_haloprop_bini = np.argsort(haloprop_bini)\n\n galprop_bini = self._condition_matched_galprop(\n haloprop_bini[idx_sorted_haloprop_bini], \n galprop_cumprob_bini, i, galprop_scatter_bini, self.tol)\n\n # Assign the final values to the \n # appropriately sorted subarray of output_galprop\n output_galprop[idx_bini[idx_sorted_haloprop_bini]] = galprop_bini\n\n return output_galprop", "def make_derived_table(filename):\r\n column_keys, get_data = get_csv(filename)\r\n\r\n year_column = column_keys[1:].index('Year')\r\n pcg_column = column_keys[1:].index('PrimaryConditionGroup')\r\n \r\n #pcg_keys = list(PCG_LUT.keys())\r\n \r\n t0 = time.clock()\r\n \r\n NUM_GROUPS = 100\r\n num_rows = 0\r\n for group in range(NUM_GROUPS):\r\n derived_dict = {'ALL':{}, 'Y1':{}, 'Y2':{}, 'Y3':{}}\r\n print 'group=%d' % group\r\n _, get_data = get_csv(filename)\r\n for k,v in get_data():\r\n if (int(k) % NUM_GROUPS) != group:\r\n continue\r\n year = v[year_column]\r\n pcg = get_pcg_index(v[pcg_column])\r\n #if not v[pcg_column] in pcg_keys:\r\n # pcg_keys.append(v[pcg_column])\r\n #print '>', v[pcg_column]\r\n #print '\"%s\" => %d' % (v[pcg_column], pcg)\r\n \r\n if num_rows and num_rows % 10000 == 0:\r\n t = time.clock() - t0\r\n eta = int(t * (2668990 - num_rows)/num_rows)\r\n print ' %8d row (%4.1f%%) %7.1f sec, %4d rows/sec, eta = %6d sec' % (num_rows, \r\n 100.0 * num_rows/2668990, t, int(num_rows/t), eta) \r\n\r\n for y in (year, 'ALL'):\r\n if not k in derived_dict[y].keys():\r\n derived_dict[y][k] = [0, {}] \r\n derived_dict[y][k][0] += 1\r\n derived_dict[y][k][1][pcg] = derived_dict[y][k][1].get(pcg, 0) + 1 \r\n \r\n num_rows += 1\r\n \r\n print 'Coallescing' \r\n for year in derived_dict:\r\n for k in derived_dict[year].keys():\r\n if int(k) % NUM_GROUPS != group:\r\n continue\r\n derived_dict[year][k][1] = get_max_key(derived_dict[year][k][1]) \r\n pickled_path = make_group_name(group) \r\n pkl_file = open(pickled_path , 'wb')\r\n pickle.dump(derived_dict, pkl_file, -1) # Pickle the data using the highest protocol available.\r\n pkl_file.close() \r\n\r\n derived_dict = {'ALL':{}, 'Y1':{}, 'Y2':{}, 'Y3':{}} \r\n for group in range(NUM_GROUPS):\r\n pickled_path = make_group_name(group) \r\n pkl_file = open(pickled_path , 'rb')\r\n part_dict = pickle.load(pkl_file) \r\n pkl_file.close()\r\n for y,d in part_dict.items():\r\n for k,v in d.items():\r\n derived_dict[y][k] = (part_dict[y][k][0], part_dict[y][k][1]) \r\n\r\n if False:\r\n print '-' *80\r\n for k in pcg_keys:\r\n print \" '%s',\" % k \r\n exit() \r\n \r\n for year in derived_dict:\r\n derived_filename = '%s%s_%s' % (DERIVED_PREFIX, year, filename)\r\n data_writer = csv.writer(open(derived_filename , 'wb'), delimiter=',', quotechar='\"')\r\n data_writer.writerow(DERIVED_COLUMN_KEYS)\r\n for k in sorted(derived_dict[year].keys()):\r\n v = derived_dict[year][k]\r\n #print ' ', derived_dict[year][k], v2\r\n data_writer.writerow([k, str(v[0]), str(v[1])])", "def compute_lookuptable(self):\n\n if self.uselookuptable:\n # Evaluation lookup tables \n self.action_isok = np.zeros( ( self.nodes_n , self.actions_n ) , dtype = bool )\n self.x_next = np.zeros( ( self.nodes_n , self.actions_n , self.DS.n ) , dtype = float ) # lookup table for dynamic\n \n # For all state nodes \n for node in range( self.nodes_n ): \n \n x = self.nodes_state[ node , : ]\n \n # For all control actions\n for action in range( self.actions_n ):\n \n u = self.actions_input[ action , : ]\n \n # Compute next state for all inputs\n x_next = self.DS.fc( x , u ) * self.dt + x\n \n # validity of the options\n x_ok = self.DS.isavalidstate(x_next)\n u_ok = self.DS.isavalidinput(x,u)\n \n self.x_next[ node, action, : ] = x_next\n self.action_isok[ node, action] = ( u_ok & x_ok )", "def tabulate_pdf(self):\n\n from mitsuba.core import Float, Vector2f, ScalarVector2f\n\n extents = self.bounds.extents()\n endpoint = self.bounds.max - extents / ScalarVector2f(self.res)\n\n # Compute a set of nodes where the PDF should be evaluated\n x, y = ek.meshgrid(\n ek.linspace(Float, self.bounds.min.x, endpoint.x, self.res.x),\n ek.linspace(Float, self.bounds.min.y, endpoint.y, self.res.y)\n )\n\n endpoint = extents / ScalarVector2f(self.res)\n eps = 1e-4\n nx = ek.linspace(Float, eps, endpoint.x * (1 - eps), self.ires)\n ny = ek.linspace(Float, eps, endpoint.y * (1 - eps), self.ires)\n wx = [1 / (self.ires - 1)] * self.ires\n wy = [1 / (self.ires - 1)] * self.ires\n wx[0] = wx[-1] = wx[0] * .5\n wy[0] = wy[-1] = wy[0] * .5\n\n integral = 0\n\n self.histogram_start = time.time()\n for yi, dy in enumerate(ny):\n for xi, dx in enumerate(nx):\n xy = self.domain.map_forward(Vector2f(x + dx, y + dy))\n pdf = self.pdf_func(xy)\n integral = ek.fmadd(pdf, wx[xi] * wy[yi], integral)\n self.histogram_end = time.time()\n\n self.pdf = integral * (ek.hprod(extents / ScalarVector2f(self.res))\n * self.sample_count)\n\n # A few sanity checks\n pdf_min = ek.hmin(self.pdf) / self.sample_count\n if not pdf_min >= 0:\n self._log('Failure: Encountered a cell with a '\n 'negative PDF value: %f' % pdf_min)\n self.fail = True\n\n self.pdf_sum = ek.hsum(self.pdf) / self.sample_count\n if self.pdf_sum > 1.1:\n self._log('Failure: PDF integrates to a value greater '\n 'than 1.0: %f' % self.pdf_sum)\n self.fail = True", "def create_exgauss_lookup_table(self):\n return self.exgauss_cdf_nparray(range(self.xmin,self.xmax, self.dx)).tolist(), range(self.xmin,self.xmax, self.dx)", "def pc_project(\n mt: hl.MatrixTable,\n loadings_ht: hl.Table,\n loading_location: str = \"loadings\",\n af_location: str = \"pca_af\",\n) -> hl.Table:\n mt = pc_hwe_gt(mt, loadings_ht, loading_location, af_location)\n mt = mt.annotate_cols(scores=hl.agg.array_sum(mt.pca_loadings * mt.GTN))\n return mt.cols().select(\"scores\")", "def create_smarter_lookup_table(self, y=0.95):\n # First determine an approximate starting point for the lookup taqble by halving the max value till the point \n # where the cdf value is less than the cdf value we are looking for\n xold = self.xmax\n xnew = self.xmax\n y_calc = self.exgauss_cdf(xnew)\n while y_calc > y:\n xold = xnew\n xnew = xnew/2.\n y_calc = self.exgauss_cdf(xnew)\n \n # Make sure the interval over which this is being constructed is okay\n npts = 10. # Number of data pts in case the interval xold-xnew is smaller than self.dx\n if xold-xnew < self.dx:\n dx = int((xold-xnew)/npts)\n else: \n dx = self.dx\n # Now start building the lookup table from the value of x\n return self.exgauss_cdf_nparray(range(int(xnew),int(xold), dx)).tolist(), range(int(xnew),int(xold), dx)", "def _sql_gen_intermediate_pi_aggregate(params, table_name=\"df_e\"):\n\n gamma_cols_expr = \", \".join(params._gamma_cols)\n\n sql = f\"\"\"\n select {gamma_cols_expr}, sum(match_probability) as expected_num_matches, sum(1- match_probability) as expected_num_non_matches, count(*) as num_rows\n from {table_name}\n group by {gamma_cols_expr}\n \"\"\"\n return sql", "def _compute_pTable(self, expand=False, factor=False,\n simplify=False):\n if self._has(\"p\"):\n return\n if not self._has(\"k\"):\n self.kTable(expand=expand, factor=factor, simplify=simplify)\n if not self._has(\"m\"):\n self.multiplicities(expand=expand, factor=factor,\n simplify=simplify)\n p = Array3D(self._.d + 1)\n self._compute_parameters(p, self._.P, self._.m, integral=True,\n name=PARAMETER, sym=SYMBOL)\n self._.p = p\n self.check_handshake()", "def _engprop(l): # {{{1\n print(\" \\\\begin{tabular}[t]{rcrrl}\")\n print(\" \\\\multicolumn{4}{c}{\\\\small\"\n \"\\\\textbf{Laminate stacking}}\\\\\\\\[0.1em]\")\n print(\" \\\\toprule %% \\\\usepackage{booktabs}\")\n print(\" Layer & Weight & Angle & vf & Fiber type\\\\\\\\\")\n print(\" & [g/m$^2$] & [$\\\\circ$] & [\\\\%]\\\\\\\\\")\n print(\" \\\\midrule\")\n for ln, la in enumerate(l.layers, start=1):\n s = \" {} & {:4.0f} & {:5.0f} & {:.3g} & {}\\\\\\\\\"\n texfname = la.fiber.name.replace('_', '\\_')\n print(s.format(ln, la.fiber_weight, la.angle, la.vf*100, texfname))\n print(\" \\\\bottomrule\")\n print(\" \\\\end{tabular}\\\\hspace{0.02\\\\textwidth}\")\n print(\" \\\\begin{tabular}[t]{rrl}\")\n print(\" \\\\multicolumn{3}{c}{\\\\small\\\\textbf{Engineering\"\n \" properties}}\\\\\\\\[0.1em]\")\n print(\" \\\\toprule\")\n print(\" Property & Value & Dimension\\\\\\\\\")\n print(\" \\\\midrule\")\n print(\" $\\\\mathrm{{v_f}}$ & {:.3g} &\\\\%\\\\\\\\\".format(l.vf*100))\n print(\" $\\\\mathrm{{w_f}}$ & {:.3g} &\\\\%\\\\\\\\\".format(l.wf*100))\n print(\" thickness & {:.3g} & mm\\\\\\\\\".format(l.thickness))\n print(\" density & {:.3g} & g/cm$^3$\\\\\\\\\".format(l.ρ))\n s = \" weight & {:.0f} & g/m$^2$\\\\\\\\\"\n print(s.format(l.fiber_weight+l.resin_weight))\n print(\" resin & {:.0f} & g/m$^2$\\\\\\\\\".format(l.resin_weight))\n print(\" \\\\midrule\")\n print(\" $\\\\mathrm{{E_x}}$ & {:8.0f} & MPa\\\\\\\\\".format(l.Ex))\n print(\" $\\\\mathrm{{E_y}}$ & {:8.0f} & MPa\\\\\\\\\".format(l.Ey))\n print(\" $\\\\mathrm{{G_{{xy}}}}$ & {:8.0f} & MPa\\\\\\\\\".format(l.Gxy))\n print(\" $\\\\mathrm{{\\\\nu_{{xy}}}}$ & {:g} &-\\\\\\\\\".format(l.νxy))\n print(\" $\\\\mathrm{{\\\\nu_{{yx}}}}$ & {:g} &-\\\\\\\\\".format(l.νyx))\n s = \" $\\\\mathrm{{\\\\alpha_x}}$ & {:g} & K$^{{-1}}$\\\\\\\\\"\n print(s.format(l.αx))\n s = \" $\\\\mathrm{{\\\\alpha_y}}$ & {:g} & K$^{{-1}}$\\\\\\\\\"\n print(s.format(l.αy))\n print(\" \\\\bottomrule\")\n print(\" \\\\end{tabular}\")", "def do_latex_table_middle(self, tabletype, injkey):\n if tabletype == 'fiducial_fit_params':\n h0_params, h1_params = self.get_resulting_hypo_params(\n injkey=injkey\n )\n data_params = self.get_injected_params()\n for param in h0_params.keys():\n # Get the units for this parameter\n val, param_units = self.parse_pint_string(\n pint_string=h0_params[param]\n )\n # Get priors if they exists\n if 'gaussian' in self.all_params['h0_params'][param]['prior']:\n h0stddev, h0maximum = self.extract_gaussian(\n prior_string=self.all_params['h0_params'][\n param]['prior'],\n units=param_units\n )\n else:\n h0stddev = None\n h0maximum = None\n if 'gaussian' in self.all_params['h1_params'][param]['prior']:\n h1stddev, h1maximum = self.extract_gaussian(\n prior_string=self.all_params['h1_params'][\n param]['prior'],\n units=param_units\n )\n else:\n h1stddev = None\n h1maximum = None\n # Include injected parameter, fitted parameters and\n # differences with appropriate formatting.\n if data_params is not None:\n tableline = \" \"\n tableline += \"%s \"%self.tex_axis_label(param)\n if param == 'deltam31':\n tableline += r\" / $10^{-3}$ \"\n if param_units != 'dimensionless':\n tableline += \"(%s) &\"%self.tex_axis_label(param_units)\n else:\n tableline += \"&\"\n if param in data_params.keys():\n dataval = self.extract_paramval(\n injparams=data_params,\n systkey=param\n )\n if param == 'deltam31':\n dataval *= 1000.0\n if (np.abs(dataval) < 1e-2) and (dataval != 0.0):\n tableline += \"%.2e &\"%dataval\n else:\n tableline += \"%.3g &\"%dataval\n # If no injected parameter, show this and the\n # deltas with a line\n else:\n dataval = '--'\n tableline += \"%s &\"%dataval\n h0val = self.extract_paramval(\n injparams=h0_params,\n systkey=param\n )\n if param == 'deltam31':\n h0val *= 1000.0\n tableline += self.format_table_line(\n val=h0val,\n dataval=dataval,\n stddev=h0stddev,\n maximum=h0maximum\n )\n h1val = self.extract_paramval(\n injparams=h1_params,\n systkey=param\n )\n if param == 'deltam31':\n h1val *= 1000.0\n tableline += self.format_table_line(\n val=h1val,\n dataval=dataval,\n stddev=h1stddev,\n maximum=h1maximum,\n last=True\n )\n tableline += r\" \\\\\\\\ \\hline\\n\"\n self.texfile.write(tableline)\n # If no injected parameters it's much simpler\n else:\n h0val = self.extract_paramval(\n injparams=h0_params,\n systkey=param\n )\n h1val = self.extract_paramval(\n injparams=h1_params,\n systkey=param\n )\n if (np.abs(h0val) < 1e-2) and (h0val != 0.0):\n self.texfile.write(r\" %s & %.2e & %.2e\\n\"%(\n self.tex_axis_label(param), h0val, h1val))\n else:\n self.texfile.write(r\" %s & %.3g & %.3g\\n\"%(\n self.tex_axis_label(param), h0val, h1val))\n elif tabletype == \"fiducial_fit_metrics\":\n h0_fid_metric = self.fid_values[injkey][\n 'h0_fit_to_%s'%(self.labels.dict['data'])]['metric_val']\n h1_fid_metric = self.fid_values[injkey][\n 'h1_fit_to_%s'%(self.labels.dict['data'])]['metric_val']\n\n # Need the type of metric here. Doesn't matter which\n # fit that comes from so just choose h0_fit_to_h0_fid\n # since it will always exist.\n metric_type = self.values[injkey][\n 'h0_fit_to_h0_fid']['metric_val']['type']\n # In the case of likelihood, the maximum metric is the better fit.\n # With chi2 metrics the opposite is true, and so we must multiply\n # everything by -1 in order to apply the same treatment.\n if 'chi2' not in metric_type:\n logging.info(\n \"Converting likelihood metric to chi2 equivalent.\"\n )\n h0_fid_metric *= -1\n h1_fid_metric *= -1\n\n # If truth is known, report the fits the correct way round\n if self.labels.dict['data_name'] is not None:\n if self.labels.dict['data_name'] in \\\n self.labels.dict['h0_name']:\n delta = h1_fid_metric-h0_fid_metric\n elif self.labels.dict['data_name'] in \\\n self.labels.dict['h1_name']:\n delta = h0_fid_metric-h1_fid_metric\n else:\n logging.warning(\n \"Truth is known but could not be identified in \"\n \"either of the hypotheses. The difference between\"\n \" the best fit metrics will just be reported as \"\n \"positive and so will not necessarily reflect if \"\n \"the truth was recovered.\"\n )\n if h1_fid_metric > h0_fid_metric:\n delta = h0_fid_metric-h1_fid_metric\n else:\n delta = h1_fid_metric-h0_fid_metric\n # Else just report it as delta between best fits\n else:\n if h1_fid_metric > h0_fid_metric:\n delta = h0_fid_metric-h1_fid_metric\n else:\n delta = h1_fid_metric-h0_fid_metric\n # Write this in the file\n newline = \" %.3g \"%h0_fid_metric\n newline += \"& %.3g \"%h1_fid_metric\n newline += \"& %.3g \"%delta\n newline += r\"\\\\\\\\ \\hline\\n\"\n self.texfile.write(newline)\n else:\n raise ValueError(\n \"This function is only for adding the content to metric\"\n \" or fit param tables in LaTeX. Got type %s\"%tabletype\n )", "def calc_table(resolution, i_dc_max, u_pn_max, lut_fn, log_fn=None):\n grid_res = [resolution, resolution, resolution]\n\n if log_fn is not None:\n log_file = open(log_fn, mode='w')\n else:\n log_file = sys.stderr\n\n i_dc_range = np.linspace(0, i_dc_max, num=grid_res[0])\n u_pn_range = np.linspace(0, u_pn_max, num=grid_res[1])\n u_bc_range = np.linspace(0, 0.5, num=grid_res[2])\n\n opt_mode = np.zeros(grid_res) # optimizer return code (error code, 0 means success)\n grid_res.append(4)\n sw_times = np.zeros(grid_res)\n n_not_solved = 0\n\n log_file.write('resolution: {}\\n'.format(resolution))\n log_file.write('i_dc_max: {}\\n'.format(i_dc_max))\n log_file.write('u_pn_max: {}\\n'.format(u_pn_max))\n\n time.clock()\n total_pts = len(i_dc_range) * len(u_pn_range) * len(u_bc_range)\n pts_done = 0\n\n # sweep the 3D grid, u_bc must be the inner most loop for convergence reasons\n for (k1, i_dc) in enumerate(i_dc_range):\n log_file.write('---------------------\\n')\n for (k2, u_pn) in enumerate(u_pn_range):\n log_file.write('--------\\n')\n log_file.write('k1={0:} k2={1:}\\n'.format(k1,k2))\n\n last_t_opt = []\n\n # traverse starting with u2=05 for which we operate like a conventional DAB were we have a closed\n # analytic solution. This is then used as starting point for the next point\n for (k3, u_bc) in reversed(list(enumerate(u_bc_range))):\n u_ac = 1 # this is our normalization ref voltage\n u_ab = u_ac - u_bc\n u = [u_ab, u_bc, u_pn]\n log_file.write('u={0:} i_dc={1:.7f}\\n'.format(u, i_dc))\n\n t_opt, m = calc_t_opt(u, i_dc, i_dc, last_t_opt, do_print=False)\n\n if m == 0:\n # double check the validity of the obtained solution\n m = check_solution(u, t_opt, i_dc)\n\n opt_mode[k1, k2, k3] = m\n sw_times[k1, k2, k3, 0:4] = t_opt\n\n if m != 0:\n n_not_solved += 1\n log_file.write('^ not solved\\n')\n # mark point in table so the user can investigate the problem\n else :\n last_t_opt = t_opt # keep a copy of our initial conditions\n # show a progress bar in the terminal\n pts_done = pts_done + 1\n suffix = 'elapsed: {}s'.format(int(time.clock()))\n print_progress(pts_done, total_pts, prefix='Progress', suffix=suffix, decimals=1, bar_length=80)\n\n log_file.write('\\nnumber of points not solved: {}\\n'.format(n_not_solved))\n if log_fn is not None:\n log_file.close()\n sys.stderr.write('\\nnumber of points not solved: {}\\n'.format(n_not_solved))\n # write LUT data to file\n export_csv(lut_fn, grid_res, i_dc_range, u_pn_range, u_bc_range, sw_times)", "def table_gen(NamesL_pairs, p_pL, m_mL, p_mL, m_pL, p_valsL, p_vals_BonferoniL, RatiosL, p_valsL_divergent_convergent,\n p_valsL_divergent_convergent_BonferoniL, RatiosL_divergent_convergent, output_table):\n datafile = open(output_table, \"w\")\n datafile.write(\n \"Feature_1\" + '\\t' + \"Feature_2\" + \"\\t\" + \"plus_plus\" + '\\t' + \"minus_minus\" + '\\t' + \"plus_minus\" + '\\t' + \"minus_plus\" + '\\t' + \"p_value_same_opposite\" + '\\t' + \"p-value_same_opposite_Bonferoni_corrected\" + '\\t' + \"Ratio_same_opposite\" + '\\t' + \"p_value_divergent_convergent\" + '\\t' + \"p_value_divergent_convergent Bonferoni corrected\" + '\\t' + \"Ratio divergent convergent\" + '\\n')\n for i in range(len(NamesL_pairs)):\n datafile.write(\n NamesL_pairs[i][0] + '\\t' + NamesL_pairs[i][1] + '\\t' + str(p_pL[i]) + '\\t' + str(m_mL[i]) + '\\t' + str(\n p_mL[i]) + '\\t' + str(m_pL[i]) + '\\t' + str(p_valsL[i]) + '\\t' + str(p_vals_BonferoniL[i]) + '\\t' + str(\n RatiosL[i]) + '\\t' + str(p_valsL_divergent_convergent[i]) + '\\t' + str(\n p_valsL_divergent_convergent_BonferoniL[i]) + '\\t' + str(RatiosL_divergent_convergent[i]) + '\\n')\n datafile.close()\n return", "def calc_prior_path_prob(self, output_filenm=\"\"):\n logger.info(\"Calculating prior map\")\n programs_map = {}\n unique_cluster_ids = set() # have to do this since the assigned cluster ids doesnt seems to be contiguous or start from 0 or end at K-1\n for c in self.args.cluster_assignments:\n unique_cluster_ids.add(c)\n for c in unique_cluster_ids:\n for _, ((e1, r), e2_list) in enumerate(tqdm((self.train_map.items()))):\n if self.args.cluster_assignments[self.entity_vocab[e1]] != c:\n # if this entity does not belong to this cluster, don't consider.\n continue\n if c not in programs_map:\n programs_map[c] = {}\n if r not in programs_map[c]:\n programs_map[c][r] = {}\n all_paths_around_e1 = self.all_paths[e1]\n nn_answers = e2_list\n for nn_ans in nn_answers:\n programs = self.get_programs(e1, nn_ans, all_paths_around_e1)\n for p in programs:\n p = tuple(p)\n if len(p) == 1:\n if p[0] == r: # don't store query relation\n continue\n if p not in programs_map[c][r]:\n programs_map[c][r][p] = 0\n programs_map[c][r][p] += 1\n for c, r in programs_map.items():\n for r, path_counts in programs_map[c].items():\n sum_path_counts = 0\n for p, p_c in path_counts.items():\n sum_path_counts += p_c\n for p, p_c in path_counts.items():\n programs_map[c][r][p] = p_c / sum_path_counts\n\n if not output_filenm:\n dir_name = os.path.join(args.data_dir, \"data\", self.args.dataset_name, \"linkage={}\".format(self.args.linkage))\n if not os.path.exists(dir_name):\n os.makedirs(dir_name)\n output_filenm = os.path.join(dir_name, \"path_prior_map.pkl\")\n\n logger.info(\"Dumping path prior pickle at {}\".format(output_filenm))\n with open(output_filenm, \"wb\") as fout:\n pickle.dump(programs_map, fout)", "def map_cell_property(**kwargs):\n\n GR = glo.global_results()\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n counter = 0\n fignum = 1\n if p.gal_index == 'all':\n for gal_index in range(GR.N_gal):\n\n if counter == 0:\n fig, axes = plt.subplots(3, 3, figsize=(20,15))\n axs = [axes[0,0],axes[0,1],axes[0,2],axes[1,0],axes[1,1],axes[1,2],axes[2,0],axes[2,1],axes[2,2]]\n counter = 9\n\n gal_ob = gal.galaxy(GR=GR, gal_index=gal_index)\n print('Now mapping %s' % gal_ob.name)\n isrf_ob = gal.isrf(gal_index)\n\n # Load SKIRT output\n wavelengths,bin_width = aux.read_probe_wavelengths(isrf_ob.name)\n N_start,N_stop = aux.FUV_index(wavelengths)\n image_data,units = isrf_ob._get_cut_probe(orientation=p.orientation)\n\n # Plot\n ax1 = axs[9 - counter]\n if p.prop == 'FUV':\n # FUV_xy_image = np.array([np.trapz(image_data[N_start:N_stop,:,:],x=wavelengths[N_start:N_stop]) \\\n # for i in range(len(df))])\n FUV_xy_image = image_data[N_start:N_stop,:,:].sum(axis=0) * 4 * np.pi\n FUV_xy_image = ndimage.rotate(FUV_xy_image, 0, reshape=True)\n # FUV_xy_image = np.fliplr(FUV_xy_image)\n FUV_xy_image[FUV_xy_image <= 0] = np.min(FUV_xy_image[FUV_xy_image > 0])\n im = ax1.imshow(np.log10(FUV_xy_image),\\\n extent=[-isrf_ob.radius,isrf_ob.radius,-isrf_ob.radius,isrf_ob.radius],\\\n vmin=p.vmin,\\\n cmap='twilight')\n lab = 'FUV flux [W/m$^2$/micron]'\n\n # pdb.set_trace()\n\n ax1.set_xlabel('x [kpc]'); ax1.set_ylabel('y [kpc]')\n # Limit axes limits a bit to avoid area with no particles...\n ax1.set_xlim([-0.8*gal_ob.radius,0.8*gal_ob.radius])\n ax1.set_ylim([-0.8*gal_ob.radius,0.8*gal_ob.radius])\n if p.prop == 'm':\n ax1.text(0.05,0.85,'M$_{gas}$=%.2eM$_{\\odot}$' % np.sum(simgas.m),\\\n fontsize=14,transform=ax1.transAxes,color='white')\n\n counter -= 1\n\n\n if counter == 0:\n cbar = fig.colorbar(im, ax=axes.ravel().tolist(), shrink=0.95, label=lab)\n # fig.colorbar(im,shrink=0.8,label=lab)\n\n if counter == 0 or gal_index == GR.N_gal-1:\n figname = p.d_plot + 'cell_data/map_%s_%s_gals_%s_%i.png' % (p.prop,p.z1,p.orientation,fignum)\n print('Saving in ' + figname)\n # plt.tight_layout()\n plt.savefig(figname, format='png', dpi=250, facecolor='w')\n fignum += 1\n pdb.set_trace()\n else:\n fig, ax1 = plt.subplots(figsize=(10,10))\n gal_ob = gal.galaxy(GR=GR, gal_index=p.gal_index)\n simgas = aux.load_temp_file(gal_ob=gal_ob,data_type='cell_data')\n print(simgas.keys())\n map2D,lab,max_scale = make_projection_map(simgas,prop=p.prop)\n\n # Plot\n Rmax = max_scale/2\n if p.log:\n map2D[map2D < 10.**p.vmin] = 10.**p.vmin/2\n map2D = np.log10(map2D)\n if not p.log: map2D[map2D < p.vmin] = p.vmin/2 #np.min(map2D[map2D > 0])\n im = ax1.imshow(map2D,\\\n extent=[-Rmax,Rmax,-Rmax,Rmax],vmin=p.vmin,cmap=p.cmap)\n # Limit axes limits a bit to avoid area with no particles...\n ax1.set_xlim([-2/3*gal_ob.radius,2/3*gal_ob.radius])\n ax1.set_ylim([-2/3*gal_ob.radius,2/3*gal_ob.radius])\n fig.colorbar(im,shrink=0.8,ax=ax1,label=lab)\n ax1.set_xlabel('x [kpc]'); ax1.set_ylabel('y [kpc]')\n\n print('Saving in ' + p.d_plot + 'sim_data/map_%s_G%i.png' % (p.prop,p.gal_index))\n if not os.path.isdir(p.d_plot + 'cell_data/'): os.mkdir(p.d_plot + 'cell_data/')\n plt.savefig(p.d_plot + 'cell_data/map_%s_G%i.png' % (p.prop,p.gal_index), format='png', dpi=250, facecolor='w')", "def dp_cal_and_pro_only(foods, cal_goal, pro_goal):\n macros = init_two_d_array((cal_goal, pro_goal), 999999999)\n foods_used = init_two_d_array((cal_goal, pro_goal), {})\n\n for i in range(cal_goal):\n for j in range(pro_goal):\n for n in range(len(foods)):\n food = foods[n]\n if (int(food['calories']) > i and int(food['protein']) > j):\n continue\n if (macros[i - int(food['calories'])]\n [j - int(food['protein'])]\n == 999999999):\n prev_cost = 0\n prev_foods_used = {}\n else:\n prev_cost = (macros[i - int(food['calories'])]\n [j - int(food['protein'])])\n prev_foods_used = \\\n (foods_used[i - int(food['calories'])]\n [j - int(food['protein'])]).copy()\n new_cal = calories(foods, prev_foods_used) + food['calories']\n new_pro = protein(foods, prev_foods_used) + food['protein']\n if (macros[i][j] > prev_cost + food['serving_cost']\n and new_cal > i - 50 and new_cal < i + 10\n and new_pro > j - 5 and new_pro < j + 5):\n macros[i][j] = prev_cost + food['serving_cost']\n try:\n prev_foods_used[n] += 1\n except KeyError:\n prev_foods_used[n] = 1\n foods_used[i][j] = prev_foods_used\n return foods_used[cal_goal - 1][pro_goal - 1]", "def computeProp(self):\n self.chem = {}\n for key in self.config.C:\n if key in ['P', 'T', 'Z', 'DZ']:\n continue\n self.chem[key] = chemistry.ConstituentProperties(key)\n\n # nAtm = len(self.gas[self.config.C['P']])\n self.property = []\n for op in self.config.LP:\n self.property.append([])\n zOffset = 0.0\n iOffset = 0\n psep = 1.0E6\n for i, zv in enumerate(self.gas[self.config.C['Z']]): # find the nearest z value at p_ref\n P = self.gas[self.config.C['P']][i]\n if abs(P - self.config.p_ref) < psep:\n psep = abs(P - self.config.p_ref)\n iOffset = i\n zOffset = self.gas[self.config.C['Z']][iOffset]\n z_at_p_ref = self.config.Req\n\n for i, zv in enumerate(self.gas[self.config.C['Z']]):\n T = self.gas[self.config.C['T']][i]\n P = self.gas[self.config.C['P']][i]\n self.property[self.config.LP['P']].append(P)\n self.property[self.config.LP['Z']].append(zv)\n rr = z_at_p_ref + zv - zOffset\n # note that this is the \"actual\"z along equator referenced to planet center (aka radius)\n self.property[self.config.LP['R']].append(rr)\n # ##set mean amu\n amulyr = 0.0\n for key in self.chem:\n amulyr += self.chem[key].amu * self.gas[self.config.C[key]][i]\n self.property[self.config.LP['AMU']].append(amulyr)\n # ##set GM pre-calc (normalized further down) and get lapse rate\n if not i:\n self.property[self.config.LP['GM']].append(0.0)\n self.property[self.config.LP['LAPSE']].append(0.0)\n self.property[self.config.LP['LAPSEP']].append(0.0)\n else:\n rho = (amulyr * P) / (chemistry.R * T)\n dr = abs(zv - self.gas[self.config.C['Z']][i - 1])\n dV = 4.0 * np.pi * (rr**2) * dr\n dM = 1.0e11 * rho * dV\n GdM = self.property[self.config.LP['GM']][i - 1] + chemistry.GravConst * dM\n # in km3/s2\n # mass added as you make way into atmosphere by radius r (times G)\n self.property[self.config.LP['GM']].append(GdM)\n dT = abs(T - self.gas[self.config.C['T']][i - 1])\n dP = abs(P - self.gas[self.config.C['P']][i - 1])\n self.property[self.config.LP['LAPSE']].append(dT / dr)\n self.property[self.config.LP['LAPSEP']].append(dT / dP)\n # ##set refractivity and index of refraction\n refrlyr = 0.0\n for key in self.chem:\n refrlyr += self.chem[key].refractivity(T=T) * self.gas[self.config.C[key]][i]\n refrlyr = refrlyr * P * (293.0 / T)\n self.property[self.config.LP['REFR']].append(refrlyr)\n nlyr = refrlyr / 1.0E6 + 1.0\n self.property[self.config.LP['N']].append(nlyr)\n\n # ##Now need to normalize GM to planet and calculate scale height (H)\n GMnorm = self.property[self.config.LP['GM']][iOffset] # G*(Mass added by p_ref)\n for i, mv in enumerate(self.property[self.config.LP['GM']]):\n gm = self.config.GM_ref - (mv - GMnorm)\n self.property[self.config.LP['GM']][i] = gm\n little_g = gm / self.property[self.config.LP['R']][i]**2\n m_bar = self.property[self.config.LP['AMU']][i]\n T = self.gas[self.config.C['T']][i]\n self.property[self.config.LP['H']].append((chemistry.R * T) /\n (little_g * m_bar) / 1000.0)\n self.property[self.config.LP['g']].append(little_g)\n self.property = np.array(self.property)", "def lookup_capacity(lookup, environment, ant_type, frequency,\n bandwidth, generation):\n if (environment, ant_type, frequency, bandwidth, generation) not in lookup:\n raise KeyError(\"Combination %s not found in lookup table\",\n (environment, ant_type, frequency, bandwidth, generation))\n\n density_capacities = lookup[\n (environment, ant_type, frequency, bandwidth, generation)\n ]\n\n return density_capacities", "def get_table(casedata, controldata, locus):\n import numpy, pandas\n tables = [] # - a list of lists\n for casecol,controlcol in pairs.items():\n # get ploidy of pop\n pop = casecol.split('.FREQ')[0]\n pop_ploidy = ploidy[pop]\n\n # get case-control frequencies of ALT allele\n case_freq = get_freq(casedata.loc[locus, casecol])\n cntrl_freq = get_freq(controldata.loc[locus, controlcol])\n\n # see if either freq is np.nan, if so, skip this pop\n if sum([x!=x for x in [case_freq, cntrl_freq]]) > 0:\n continue\n\n # collate info for locus (create contingency table data)\n t = []\n for freq in [cntrl_freq, case_freq]:\n t.extend([(1-freq)*pop_ploidy,\n freq*pop_ploidy])\n tables.append(t)\n # return contingency tables (elements of list) for this locus stratified by population (list index)\n return [numpy.reshape(x.tolist(), (2, 2)) for x in numpy.asarray(tables)]", "def compute_probability_for(fixation):\n probabilities = np.zeros(Number_of_locs) #MOD Number_of_locs deleted\n for possible_target_location in xrange(Number_of_locs): #MOD Number_of_locs deleted\n probabilities[possible_target_location] = integrate.quad(\n integral_function,\n -np.inf, np.inf,\n args=(possible_target_location,Dprime_map[fixation]),\n epsabs=0,\n limit=100,\n full_output=1\n )[0] #MOD Dprime_map deleted\n return np.sum(Post_probs * probabilities) #MOD Post_probs deleted", "def lookup_cost(lookup, strategy, environment):\n if (strategy, environment) not in lookup:\n raise KeyError(\"Combination %s not found in lookup table\",\n (strategy, environment))\n\n density_capacities = lookup[\n (strategy, environment)\n ]\n\n return density_capacities", "def set_lookup_qn(diagram, p_cm, p_max, gammas, skip=True, verbose=0):\n\n lookup_p = set_lookup_p(p_max, p_cm, diagram, skip)\n lookup_g = set_lookup_g(gammas, diagram)\n\n # TODO: A more elegant solution for combining lookup_p and lookup_g is welcome\n # maybe Multiindex.from_product()\n tmp = it.product(lookup_p, lookup_g)\n lookup_qn = []\n for t in tmp:\n lookup_qn.append(t[0]+t[1])\n lookup_qn = DataFrame(lookup_qn, columns=['p_{so}', 'p_{si}', '\\gamma_{so}', '\\gamma_{si}'])\n# lookup_qn['p_{so}'] = qn['p_{so}'].apply(np.array)\n# lookup_qn['p_{si}'] = qn['p_{si}'].apply(np.array)\n \n return lookup_qn", "def _get_feature_tables_for_protein(feature_table, accession) -> str:\n if not feature_table:\n return \"\"\n\n if accession not in feature_table:\n return \"\"\n\n ft_str = \"\"\n for key in feature_table[accession].keys():\n if key == \"VARIANT\":\n for ft_var in feature_table[accession][key]:\n if len(ft_var[0]) == 3: # CASE Replacement\n ft_str += (\n ('''\\nFT VARIANT {position}\\n''' +\n '''FT /note=\"{from_aa} -> {to_aa} (in GEN_BY_PG; {desc})\"\\n''' +\n '''FT /id=\"CUSTOM_{id}\"''').format(\n position=ft_var[0][2], from_aa=ft_var[0][0], to_aa=ft_var[0][1],\n desc=ft_var[1], id=ft_var[2]\n )\n )\n elif len(ft_var[0]) == 2: # CASE Replacement\n ft_str += (\n ('''\\nFT VARIANT {position}\\n''' +\n '''FT /note=\"Missing (in GEN_BY_PG; {desc})\"\\n''' +\n '''FT /id=\"CUSTOM_{id}\"''').format(\n position=ft_var[0][1],\n desc=ft_var[1], id=ft_var[2]\n )\n )\n\n return ft_str", "def compute_perlin(self, x, y, permutation_table):\n\n xi, yi = x.astype(int), y.astype(int)\n xg, yg = x - xi, y - yi\n xf, yf = self.compute_fade(xg), self.compute_fade(yg)\n\n p00 = permutation_table[permutation_table[xi] + yi]\n p01 = permutation_table[permutation_table[xi] + yi + 1]\n p10 = permutation_table[permutation_table[xi + 1] + yi]\n p11 = permutation_table[permutation_table[xi + 1] + yi + 1]\n\n n00 = self.compute_gradient(p00, xg, yg)\n n01 = self.compute_gradient(p01, xg, yg - 1)\n n10 = self.compute_gradient(p10, xg - 1, yg)\n n11 = self.compute_gradient(p11, xg - 1, yg - 1)\n\n x1 = self.compute_lerp(n00, n10, xf)\n x2 = self.compute_lerp(n01, n11, xf)\n return self.compute_lerp(x1, x2, yf)", "def merge_tables():\r\n filename = \"ppxf_results_best.dat\"\r\n s1 = np.genfromtxt(filename, usecols=(0,), dtype=None).tolist()\r\n sref = s1[:]\r\n sref.sort()\r\n x, y = get_positions(sref).T\r\n r = np.sqrt(x * x + y * y)\r\n pa = np.rad2deg(np.arctan2(x, y))\r\n pa[pa < 0.] += 360.\r\n data1 = np.loadtxt(filename, usecols=np.arange(1, 11))\r\n ##########################################################################\r\n # Account for difference in resolution\r\n # Not used anymore because the resolution is now matched in pPXF\r\n # fwhm_dif = (2.5 - 2.1) * c / 5500. / 2.3548\r\n # data1[:,2] = np.sqrt(data1[:,2]**2 - fwhm_dif**2)\r\n ##########################################################################\r\n data1 = match_data(s1, sref, data1)\r\n results = np.column_stack((sref, x, y, r, pa, data1))\r\n header = ['FILE', \"X[kpc]\", \"Y[kpc]\",\r\n \"R[kpc]\", \"PA\",\r\n 'V', 'dV', 'S', 'dS', 'h3', 'dh3',\r\n 'h4', 'dh4', 'chi/DOF', 'S/N']\r\n with open(outtable, \"w\") as f:\r\n for i, field in enumerate(header):\r\n print \"# {0} : {1}\\n\".format(i, field)\r\n f.write(\"# {0} : {1}\\n\".format(i, field))\r\n np.savetxt(f, results, fmt=\"%s\")\r\n return", "def PDF(gal_index,**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n # PDF PLACEHOLDER\n lognHs = np.linspace(-5,8,200)\n total_PDF = np.zeros(len(lognHs))\n\n # READ CELL DATA\n gal_ob = gal.galaxy(gal_index)\n df = gal_ob.cell_data.get_dataframe()\n\n bins = 50\n\n # READ FIT PARAMS OF PDF\n if '_arepoPDF' in p.table_ext:\n fit_params_SFR = np.load(p.d_table+'fragment/PDFs%s_%ipc.npy' % (p.table_ext,p.res),allow_pickle=True).item()\n fit_params = fit_params_SFR['fit_params']\n\n # OPTIONAL : SELECT PART OF FITS\n # fit_params_SFR['SFR_bins'] = fit_params_SFR['SFR_bins'][0:-2]\n # fit_params = fit_params[:,0:-2,:]\n # fit_params_collapse = fit_params_collapse[:,0:-2,:]\n\n fit_lognH_bins = fit_params_SFR['n_vw_bins'] # log\n fit_nSFR_bins = fit_params_SFR['SFR_bins'] # log\n fit_lognH_bins_c = fit_lognH_bins[0:-1] + (fit_lognH_bins[-1]-fit_lognH_bins[-2])/2\n fit_nSFR_bins_c = fit_nSFR_bins[0:-1] + (fit_nSFR_bins[-1]-fit_nSFR_bins[-2])/2\n lognSFR_bins = fit_nSFR_bins#np.linspace(fit_nSFR_bins.min(),fit_nSFR_bins.max(),bins)\n print('log nH bins:')\n print(fit_lognH_bins_c)\n print('log SFR bins:')\n print(fit_nSFR_bins_c)\n if '_arepoPDF' not in p.table_ext:\n lognSFR_bins = np.linspace(-10,1,bins)\n\n # BIN CELL DATA TO REDUCE COMPUTATION TIME\n lognH_bins = np.linspace(-8,2,bins)\n lognH_bins_c = lognH_bins[0:-1] + (lognH_bins[1] - lognH_bins[0])/2\n lognSFR_bins_c = lognSFR_bins[0:-1] + (lognSFR_bins[1] - lognSFR_bins[0])/2\n\n # ADD THIS LOWER VALUE TO INCLUDE ALL CELLS (except density = 0)\n lognH_bins[0] = -30\n lognSFR_bins[0] = -30\n lognSFR_bins[-1] = 10\n\n df.SFR_density[df.SFR_density <= 10.**lognSFR_bins.min()] = 10.**(lognSFR_bins.min()+1)\n df.SFR_density[np.isnan(df.SFR_density)] = 10.**(lognSFR_bins.min()+1)\n\n if not p.add:\n fig = plt.figure(figsize=(15,6))\n ax = fig.add_subplot(1,2,1)\n\n print('Number of cells: ',len(df))\n if p.ow == False:\n try:\n PDF = pd.read_pickle(p.d_XL_data + 'data/cell_data/PDFs/%s%s_%s%s_%s' % (p.sim_name,p.sim_run,gal_ob.name,p.table_ext,p.res))\n total_PDF = PDF['total_PDF'].values\n lognHs = PDF['lognHs'].values\n except:\n p.ow = True\n if p.ow == True:\n print('Re-calculating PDF')\n i = 0\n poly1 = 0\n N_cells = 0\n \n for i_lognH in range(len(lognH_bins)-1):\n for i_lognSFR in range(len(lognSFR_bins)-1):\n \n df_cut = df[(df.nH >= 10**(lognH_bins[i_lognH])) & \\\n (df.nH < 10**(lognH_bins[i_lognH+1]))].reset_index(drop=True)\n if i_lognSFR > 0:\n # (for the first bin in nSFR, doesn't matter if cell has no nSFR)\n df_cut = df_cut[(df_cut.SFR_density >= 10**(lognSFR_bins[i_lognSFR])) & \\\n (df_cut.SFR_density < 10**(lognSFR_bins[i_lognSFR+1]))].reset_index(drop=True)\n N_cells += len(df_cut)\n lognH_mean, lognSFR = lognH_bins_c[i_lognH], lognSFR_bins_c[i_lognSFR]\n \n if '_arepoPDF' in p.table_ext:\n # print(lognH_mean,lognSFR,len(df_cut))\n if (lognH_bins[i_lognH] >= fit_lognH_bins[0]):\n print(lognH_bins[i_lognH],len(df_cut))\n i_fit_lognH_bins = np.argmin(np.abs(fit_lognH_bins_c - lognH_mean))\n i_fit_lognSFR_bins = np.argmin(np.abs(fit_nSFR_bins_c - lognSFR))\n fit_params_1 = fit_params[i_fit_lognH_bins,i_fit_lognSFR_bins,:]\n print(lognH_mean,lognSFR,fit_params_1)\n \n if np.sum(fit_params_1) != 0:\n PDF_integrated = 10.**aux.parametric_PDF(lognHs,lognH_mean,fit_params_1[1],fit_params_1[2])\n if fit_params_1[2] == -1.5:\n PDF_integrated = 10.**aux.parametric_PDF(lognHs,fit_params_1[0],fit_params_1[1],fit_params_1[2])\n poly1 += 1\n \n if np.sum(fit_params_1) == 0:\n print('uhoh',lognH_mean,lognSFR)\n PDF_integrated = aux.lognormal_PDF(10.**lognHs,10.**lognH_mean,Mach=1)\n \n if (lognH_mean < fit_lognH_bins[0]):\n PDF_integrated = aux.lognormal_PDF(10.**lognHs,10.**lognH_mean,Mach=10)\n PDF_integrated[np.isnan(PDF_integrated)] = 0\n if (lognH_mean < -4):\n PDF_integrated = aux.lognormal_PDF(10.**lognHs,10.**lognH_mean,Mach=1)\n PDF_integrated[np.isnan(PDF_integrated)] = 0\n \n if p.table_ext == '_M10':\n PDF_integrated = aux.lognormal_PDF(10.**lognHs,10.**lognH_mean,Mach=10)\n PDF_integrated[np.isnan(PDF_integrated)] = 0\n \n # Add to total PDF, weigthed by the mass of that cell\n total_PDF += PDF_integrated * np.sum(df_cut.m)/np.sum(df.m)\n if not p.add: ax.plot(10.**lognHs,PDF_integrated * np.sum(df_cut.m)/np.sum(df.m),color='grey',lw=1,alpha=0.3)\n if np.isnan(np.sum(total_PDF)):\n print(np.sum(df_cut.m)/np.sum(df.m),PDF_integrated)\n pdb.set_trace()\n i += 1\n # if i == 10: pdb.set_trace()\n \n print('Total number of cells processed: ',N_cells)\n print('Total number of bins: ',bins**2)\n print('Number of bins with parametric PDFs: %i' % (poly1))\n total_PDF = total_PDF / np.sum(total_PDF)\n PDF = pd.DataFrame({'lognHs':lognHs,'total_PDF':total_PDF})\n PDF.to_pickle(p.d_XL_data + 'data/cell_data/PDFs/%s%s_%s%s_%s' % (p.sim_name,p.sim_run,gal_ob.name,p.table_ext,p.res))\n\n print('TEST!!!')\n total_PDF = total_PDF[(lognHs >= -4) & (lognHs <= 7)]\n lognHs = lognHs[(lognHs >= -4) & (lognHs <= 7)]\n total_PDF = total_PDF / np.sum(total_PDF)\n if not p.add:\n # First figure: One panel of individual binned PDFs and one panel of total PDF\n ax.set_xscale('log')\n ax.set_yscale('log')\n ax.set_xlabel(getlabel('lnH'))\n ax.set_ylabel('dM/dlognH')\n ax.set_ylim([1e-12,1e-1])\n ax.set_xlim([1e-4,1e7])\n \n ax2 = fig.add_subplot(1,2,2)\n ax2.plot(10.**lognHs,total_PDF)\n ax2.set_xscale('log')\n ax2.set_yscale('log')\n ax2.set_xlabel(getlabel('lnH'))\n ax2.set_ylabel('dM/dlognH')\n ax2.set_ylim([1e-4,1e-1])\n ax2.set_xlim([1e-4,1e5])\n \n if not os.path.isdir(p.d_plot + 'cell_data/PDFs/'): os.mkdir(p.d_plot + 'cell_data/PDFs/') \n plt.savefig(p.d_plot + 'cell_data/PDFs/PDF_%s%s_%s.png' % (gal_ob.name,p.table_ext,p.res), format='png', dpi=250, facecolor='w')\n\n labels = {'_M10':'Mach = 10','_arepoPDF_M51':'AREPO-M51 parametrized PDF','_arepoPDF_CMZ':'AREPO-CMZ parametrized PDF'}\n\n # New figure: One panel of PDF and cumulative mass function (optional)\n if p.add:\n ax1 = p.ax#plt.gca()\n else:\n fig,ax1 = plt.subplots(figsize=(8,6))\n ax1.plot(lognHs,total_PDF,ls=p.ls,lw=2.5,color=p.color,label=labels[p.table_ext])\n ax1.set_yscale('log')\n if not p.add:\n ax1.set_xlabel('log nH [cm$^{-3}$]')\n ax1.set_ylabel('Mass fraction per bin')\n ax1.set_xlim([-4,7])\n ax1.set_ylim([1e-4,1e-1])\n ax1.grid(axis='x')\n #if p.add: ax1.legend()\n if not p.add:\n ax2 = ax1.twinx()\n ax2.plot(lognHs,np.cumsum(total_PDF),'--')\n ax2.grid(axis='y')\n ax2.set_ylim([0,1])\n ax2.set_ylabel('Cumulative mass fraction')\n ax2.text(0.4,0.1,'Mass fraction at nH > 1e3: %.1f %%' % (100*np.sum(total_PDF[lognHs >= 3])),\\\n transform=ax1.transAxes,fontsize=15,bbox=dict(facecolor='white', alpha=0.7))\n if not os.path.isdir(p.d_plot + 'cell_data/PDFs'): os.mkdir(p.d_plot + 'cell_data/PDFs') \n if not p.add: plt.savefig(p.d_plot + 'cell_data/PDFs/simple_PDF_%s%s_%s.png' % (gal_ob.name,p.table_ext,p.res), format='png', dpi=250, facecolor='w')\n\n # pdb.set_trace()", "def calc_resources(self):\n self.popula = self.energy = self.popula_used = self.energy_used = 0\n self.cnt_public = self.cnt_shop = self.cnt_1 = self.cnt_2 = self.cnt_3 = self.cnt_4 = self.cnt_5 = self.cnt_office = 0\n self.popula += self.extra_pop\n for i in range(20):\n b = self.b[i]\n if b == 'T':\n self.popula += self.f[i] * 2\n self.energy_used += 1\n elif b == 'O':\n self.popula_used += 1\n self.energy_used += 1\n self.cnt_office += self.f[i]\n elif b == 'U':\n self.popula_used += 1\n self.cnt_public += 1\n elif b == 'S':\n self.energy_used += 1\n self.cnt_shop += 1\n elif b == '1':\n self.popula += 1\n self.energy += 1\n self.popula_used += 1\n self.cnt_1 += 1\n elif b == '2':\n self.popula_used += 1\n self.cnt_2 += 1\n elif b == '3':\n self.popula_used += 1\n self.cnt_3 += 1\n elif b == '4':\n self.popula += 2\n self.popula_used += 1\n self.cnt_4 += 1\n elif b == '5':\n self.energy += 2\n self.popula_used += 1\n self.cnt_5 += 1\n elif b == 'A':\n self.energy += 2\n self.popula_used += 1\n elif b == 'F':\n self.energy += 3\n self.popula_used += 1\n elif b == 'G':\n self.popula += 1\n if 'tvst' in args.exp:\n self.popula += self.cnt_shop\n if 'ward' in args.exp:\n self.popula += 3\n if 'elec' in args.exp:\n self.energy += 3\n if 'capi' in args.exp:\n self.popula_used += 2\n if 'fire' in args.exp:\n self.popula_used += 1\n if 'park' in args.exp:\n self.popula_used += 1", "def set_lookup_g(gammas, diagram):\n\n if diagram == 'C20':\n lookup_so = it.product([g for gamma in gammas for g in gamma[:-1]])\n lookup_so, lookup_si = it.tee(lookup_so, 2)\n elif diagram == 'C2+':\n lookup_so = it.product([5])\n lookup_so, lookup_si = it.tee(lookup_so, 2)\n elif diagram == 'C3+':\n lookup_so = it.product([5], [5]) \n lookup_si = it.product([g for gamma in gammas for g in gamma[:-1]])\n elif diagram.startswith('C4'):\n lookup_so = it.product([5], [5]) \n lookup_so, lookup_si = it.tee(lookup_so, 2)\n else:\n print 'in set_lookup_g: diagram unknown! Quantum numbers corrupted.'\n return\n# indices = [[1,2,3],[10,11,12],[13,14,15]]\n# lookup_g2 = [list(it.product([i[j] for i in indices], repeat=2)) for j in range(len(indices[0]))]\n# lookup_g = [item for sublist in lookup_g2 for item in sublist]\n\n lookup_g = it.product(lookup_so, lookup_si)\n return list(lookup_g)", "def cloudy_table_map(x_index='lognHs',y_index='lognSFRs',**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n cloudy_library = clo.library()\n lookup_table = cloudy_library._restore_lookup_table()\n print(lookup_table.nH_mw.min())\n print(lookup_table.nH_mw.max())\n\n fig,ax = plt.subplots(figsize=(8,5))\n\n key_const1, key_const2, key_const3, key_const4 = list(p.keep_const.keys())[0],list(p.keep_const.keys())[1],list(p.keep_const.keys())[2],list(p.keep_const.keys())[3]\n value_const1, value_const2, value_const3, value_const4 = list(p.keep_const.values())[0],list(p.keep_const.values())[1],list(p.keep_const.values())[2],list(p.keep_const.values())[3]\n\n # for key, value in p.cloudy_param.items():\n # key = key\n # value = value\n\n # cloudy_parameters = np.array(['logNHs','lognHs','lognSFRs','logZs','logFUVs'])\n\n # x_index = cloudy_parameters[(cloudy_parameters != key) & (cloudy_parameters != 'Machs')][0]\n # y_index = cloudy_parameters[(cloudy_parameters != key) & (cloudy_parameters != 'Machs')][1]\n\n print('%s table values:' % key_const1)\n print(np.unique(lookup_table[key_const1]))\n print('kept fixed at %f' % value_const1)\n\n print('%s table values:' % key_const2)\n print(np.unique(lookup_table[key_const2]))\n print('kept fixed at %f' % value_const2)\n\n print('%s table values:' % key_const3)\n lookup_table[key_const3] = np.round(lookup_table[key_const3]*10.)/10.\n print(np.unique(lookup_table[key_const3]))\n print('kept fixed at %f' % value_const3)\n\n print('%s table values:' % key_const4)\n print(np.unique(lookup_table[key_const4]))\n print('kept fixed at %f' % value_const4)\n\n lookup_table_cut = lookup_table[(lookup_table[key_const1] == value_const1) & \\\n (lookup_table[key_const2] == value_const2) & \\\n (lookup_table[key_const3] == value_const3) & \\\n (lookup_table[key_const4] == value_const4)]\n x, y = lookup_table_cut[x_index].values, lookup_table_cut[y_index].values\n\n X, Y = np.meshgrid(np.unique(x), np.unique(y))\n print(lookup_table_cut.nH_mw.min())\n print(lookup_table_cut.nH_mw.max())\n\n\n if p.line == '[CII]158_CO(1-0)':\n line_lum = 10.**lookup_table_cut['[CII]158'].values / 10.**lookup_table_cut['CO(1-0)'].values\n line_lum = np.log10(line_lum)\n if p.line == 'alpha_CO':\n line_lum = 1e4 / aux.Lsun_to_K_km_s_pc2(10.**lookup_table_cut['CO(1-0)'].values,'CO(1-0)') \n try:\n line_lum = lookup_table_cut[p.line].values\n except:\n pass\n\n lum = line_lum.reshape([len(np.unique(x)), len(np.unique(y))]).T\n\n vmin = np.min(lum)\n vmax = np.max(lum)\n print(vmin,vmax)\n if p.zlim:\n vmin = p.zlim[0]\n vmax = p.zlim[1]\n lum[lum < vmin] = vmin\n lum[lum > vmax] = vmax\n if p.log: \n print('AAAA')\n lum = np.log10(lum)\n vmin,vmax = np.log10(vmin),np.log10(vmax)\n\n print('Highest and lowest value to be mapped:', np.min(lum), np.max(lum))\n print(vmin,vmax)\n\n cf = ax.contourf(X,Y, lum, cmap=\"jet\", vmin=vmin, vmax=vmax, levels=30, lw=0, rstride=1, cstride=1,alpha=0.8)\n if getlabel(p.line) == '':\n if p.log: plt.colorbar(cf,label='log '+p.line)\n if not p.log: plt.colorbar(cf,label=p.line)\n else: \n plt.colorbar(cf,label=getlabel(p.line))\n \n # Show where grid points are, but only where lum > 0\n failed_models = lookup_table_cut['fail'].values\n ax.plot(x[failed_models == 0],y[failed_models == 0],'x',ms=5,mew=2,color='w')\n\n translate_labels = {'lognHs':'lnH','logNHs':'lNH','logFUVs':'lG0','logZs':'lZ','lognSFRs':'lSFR_density'}\n ax.set_xlabel(getlabel(translate_labels[x_index]))\n ax.set_ylabel('\\n\\n' + getlabel(translate_labels[y_index]))\n if p.ylim: ax.set_ylim(p.ylim)\n if p.xlim: ax.set_xlim(p.xlim)\n plt.tight_layout()\n\n if p.savefig:\n if not os.path.isdir(p.d_plot + 'look-up/'): os.mkdir(p.d_plot + 'look-up/') \n plt.savefig(p.d_plot + 'look-up/cloudy_table%s_%s.png' % (p.grid_ext,p.line), format='png', dpi=300)", "def calories_protein(og, fg):\n\n return 0.994 * fg * real_extract(og, fg)", "def _compute_from_table_values(\n name=\"plant\", color=(0/255, 128/255, 0/255),\n germination_time=(3, 1), \n r_max=(1.0,1.0),\n maturation_time=(10, 1),\n stopping_color=(0, 0, 1),\n color_step=(10/255, 0, 0),\n c1=0.1,\n r_0=0.04\n ):\n\n c2 = 1\n k1, k2 = 0.3, 0.7\n unoccluded_c1 = c1 / k2\n h_0 = 0.1\n r_max = max(1, np.random.normal(MAX_RADIUS[name][0], MAX_RADIUS[name][1]))\n # r_max = MAX_RADIUS[name][0] + MAX_RADIUS[name][1]\n growth_time = generate_growth_time(germination_time, maturation_time, r_max, r_0, k2, c2)\n\n return {\n \"germination_time\": germination_time,\n \"maturation_time\": maturation_time,\n \"k1\": k1,\n \"k2\": k2,\n \"c1\": unoccluded_c1,\n \"c2\": c2,\n \"start_radius\": r_0,\n \"start_height\": h_0,\n \"r_max\": r_max,\n \"growth_time\": growth_time,\n \"plant_type\": name,\n \"color\": color,\n \"stopping_color\": stopping_color,\n \"color_step\": color_step\n }", "def glcmProps(P, prop='contrast'):\n\n (num_level, num_level2, num_dist, num_angle) = P.shape\n assert num_level == num_level2\n assert num_dist > 0\n assert num_angle > 0\n\n # create weights for specified property\n I, J = np.ogrid[0:num_level, 0:num_level]\n if prop == 'contrast':\n weights = (I - J) ** 2\n elif prop in ['ASM', 'energy', 'correlation']:\n pass\n elif prop == 'mean':\n weights, _ = np.mgrid[0:num_level, 0:num_level]\n elif prop == 'dissimilarity':\n weights = np.abs(I - J)\n elif prop == 'homogeneity':\n weights = 1. / (1. + (I - J) ** 2)\n else:\n raise ValueError('%s is an invalid property' % (prop))\n\n # compute property for each GLCM\n if prop == 'energy':\n asm = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]\n results = np.sqrt(asm)\n elif prop == 'ASM':\n results = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]\n elif prop == 'correlation':\n results = np.zeros((num_dist, num_angle), dtype=np.float64)\n I = np.array(range(num_level)).reshape((num_level, 1, 1, 1))\n J = np.array(range(num_level)).reshape((1, num_level, 1, 1))\n diff_i = I - np.apply_over_axes(np.sum, (I * P), axes=(0, 1))[0, 0]\n diff_j = J - np.apply_over_axes(np.sum, (J * P), axes=(0, 1))[0, 0]\n\n std_i = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_i) ** 2),\n axes=(0, 1))[0, 0])\n std_j = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_j) ** 2),\n axes=(0, 1))[0, 0])\n cov = np.apply_over_axes(np.sum, (P * (diff_i * diff_j)),\n axes=(0, 1))[0, 0]\n\n # handle the special case of standard deviations near zero\n mask_0 = std_i < 1e-15\n mask_0[std_j < 1e-15] = True\n results[mask_0] = 1\n\n # handle the standard case\n mask_1 = mask_0 == False\n results[mask_1] = cov[mask_1] / (std_i[mask_1] * std_j[mask_1])\n elif prop in ['contrast', 'dissimilarity', 'homogeneity', 'mean']:\n weights = weights.reshape((num_level, num_level, 1, 1))\n results = np.apply_over_axes(np.sum, (P * weights), axes=(0, 1))[0, 0]\n\n return results", "def lookup_table():\n datapackage = {\n 'resources': [\n {\n 'schema': {\n 'fields': [\n {\n 'name': 'foo',\n 'maps_to': 'project_id'\n },\n {\n 'name': 'bar',\n 'maps_to': 'invalid_fiscal_field'\n },\n {\n 'name': 'spam',\n 'maps_to': None\n },\n {\n 'name': 'eggs',\n },\n\n ]\n\n }\n }\n ]\n }\n return build_lookup_table(datapackage)", "def table(x, kind=\"geopotential\"):\n\n # check the kind of altitude and raise an exception if necessary\n if kind == \"geopotential\":\n alt = x\n elif kind == \"geometric\":\n alt = util.geometric_to_geopotential(x)\n else:\n raise ValueError(\n \"%s is unsupported: Use either geopotential or \" \"geometric.\" % kind\n )\n\n h = np.asarray(alt)\n\n # check if altitude is out of bound and raise an exception if necessary\n if (h < H[0]).any() or (h > H[-1]).any():\n raise ValueError(\n \"the given altitude x is out of bound, this module is \"\n \"currently only valid for a geometric altitude between 0. and 86000. m\"\n )\n\n # K, molecule-scale temperature from eq. [23] of Notes reference\n tm = f_TM(h) + f_LM(h) * (h - f_H(h))\n\n # K, absolute temperature from eq. [22] of Notes reference\n T = tm * f_M_o_M0(h)\n\n if h.shape: # if h is not a 0-d array (like a scalar)\n # Pa, intialization of the pressure vector\n p = np.zeros(len(h))\n\n # points of h for which the molecular-scale temperature gradient is\n # zero\n zero_gradient = f_LM(h) == 0.0\n\n # points of h for which the molecular-scale temperature gradient is not\n # zero\n not_zero_gradient = f_LM(h) != 0.0\n\n # Pa, pressure from eq. [33b] of Notes reference\n p[zero_gradient] = f_P(h[zero_gradient]) * np.exp(\n -constants.g\n * M_0\n * (h[zero_gradient] - f_H(h[zero_gradient]))\n / (Rs * f_TM(h[zero_gradient]))\n )\n\n # Pa, pressure from eq. [33a] of Notes reference\n p[not_zero_gradient] = f_P(h[not_zero_gradient]) * (\n f_TM(h[not_zero_gradient])\n / (\n f_TM(h[not_zero_gradient])\n + f_LM(h[not_zero_gradient])\n * (h[not_zero_gradient] - f_H(h[not_zero_gradient]))\n )\n ) ** (constants.g * M_0 / (Rs * f_LM(h[not_zero_gradient])))\n\n else:\n if f_LM(h) == 0:\n # Pa, pressure from eq. [33b] of Notes reference\n p = f_P(h) * np.exp(-constants.g * M_0 * (h - f_H(h)) / (Rs * f_TM(h)))\n else:\n # Pa, pressure from eq. [33a] of Notes reference\n p = f_P(h) * (f_TM(h) / (f_TM(h) + f_LM(h) * (h - f_H(h)))) ** (\n constants.g * M_0 / (Rs * f_LM(h))\n )\n\n rho = p * M_0 / (Rs * tm) # kg / m^3, mass density\n\n return alt, T, p, rho", "def get_table(prod):\n pgconn = get_dbconn(\"mesosite\")\n cursor = pgconn.cursor()\n xref = [1.0e20] * 256\n cursor.execute(\n \"SELECT id, filename_template, units, cf_long_name \"\n \"from iemrasters where name = %s\",\n (prod,),\n )\n (rid, template, units, long_name) = cursor.fetchone()\n cursor.execute(\n \"\"\"\n SELECT coloridx, value from iemrasters_lookup\n WHERE iemraster_id = %s and value is not null\n ORDER by coloridx ASC\n \"\"\",\n (rid,),\n )\n for row in cursor:\n xref[row[0]] = row[1]\n return np.array(xref), template, units, long_name", "def build_lookup_tables(self):\n\n for component_model in self.model_dictionary.values():\n if hasattr(component_model, 'build_lookup_tables'):\n component_model.build_lookup_tables()", "def generatelookup(num_probes = 16):\n calibration_lookup= [[0] * 3 for i in range(num_probes)]\n # print(calibration_lookup)\n day = '050119r'\n date = '050119'\n\n def _run_calib(shots, dir):\n \"\"\" Helper function \"\"\"\n for shot in shots:\n shot = day+str(shot)\n ratios = ratio_4_doc(shot, dir)\n for i in range(num_probes):\n calibration_lookup[i][dir] = calibration_lookup[i][dir] + ratios[i][dir]\n if shot == shots[-1]: #on the last shot, divide by the number of shots\n calibration_lookup[i][dir] = calibration_lookup[i][dir]/ len(shots)\n\n # shot = day+str(13)\n # ratios = ratio_4_doc(shot, 2)\n # time,eastcurrent,westcurrent = loadcurrent(shot)#u\n # plt.plot(time, eastcurrent, label = 'current')\n #\n # plt.show()\n # print(\"And the max current is %f\" %polyPeak_noPlot(time,eastcurrent))\n # #check if cm or m\n # r = get_probeLocs_calib_setup(shot)\n # print(\"r is:\", r[1])\n # print(ratios)\n #\n #\n # X - direction\n print(\"Generating X - dir\")\n shots = np.arange(17,21)\n _run_calib(shots, 0)\n\n\n # Y - direction\n print(\"Generating Y - dir\")\n shots = np.arange(21,26)\n _run_calib(shots, 1)\n\n print(\"Generating Z - dir\")\n # Z - direction\n shots = np.arange(11,16)\n _run_calib(shots, 2)\n\n\n pth = os.getcwd()\n print(\"Finished! File saved as calib-%s-4x4_lookup_5.txt in cwd\" %(date))\n savetxt(os.path.join(pth, 'calib-%s-4x4_lookup_5.txt' % (date)) , calibration_lookup)", "def tablecost(self):\n subtotal_getter = operator.attrgetter(\"subtotal\")\n\n cost = 0.0\n\n cost += sum(map(subtotal_getter, self.materials))\n cost += sum(map(subtotal_getter, self.processes))\n cost += sum(map(subtotal_getter, self.fasteners))\n cost += sum(map(subtotal_getter, self.toolings))\n\n return cost", "def tabulate_histogram(self):\n\n # Generate a table of uniform variates\n from mitsuba.core import Float, Vector2f, Vector2u, Float32, \\\n UInt64, PCG32\n\n rng = PCG32(initseq=ek.arange(UInt64, self.sample_count))\n\n samples_in = getattr(mitsuba.core, 'Vector%if' % self.sample_dim)()\n for i in range(self.sample_dim):\n samples_in[i] = rng.next_float32() if Float is Float32 \\\n else rng.next_float64()\n\n self.pdf_start = time.time()\n\n # Invoke sampling strategy\n samples_out = self.sample_func(samples_in)\n\n if type(samples_out) is tuple:\n weights_out = samples_out[1]\n samples_out = samples_out[0]\n else:\n weights_out = Float(1.0)\n\n # Map samples into the parameter domain\n xy = self.domain.map_backward(samples_out)\n\n # Sanity check\n eps = self.bounds.extents() * 1e-4\n in_domain = ek.all((xy >= self.bounds.min - eps) &\n (xy <= self.bounds.max + eps))\n if not ek.all(in_domain):\n self._log('Encountered samples outside of the specified '\n 'domain: %s' % str(ek.compress(xy, ~in_domain)))\n self.fail = True\n\n # Normalize position values\n xy = (xy - self.bounds.min) / self.bounds.extents()\n xy = Vector2u(ek.clamp(xy * Vector2f(self.res), 0,\n Vector2f(self.res - 1)))\n\n # Compute a histogram of the positions in the parameter domain\n self.histogram = ek.zero(Float, ek.hprod(self.res))\n\n ek.scatter_add(\n target=self.histogram,\n index=xy.x + xy.y * self.res.x,\n source=weights_out\n )\n\n self.pdf_end = time.time()\n\n histogram_min = ek.hmin(self.histogram)\n if not histogram_min >= 0:\n self._log('Encountered a cell with negative sample '\n 'weights: %f' % histogram_min)\n self.fail = True\n\n self.histogram_sum = ek.hsum(self.histogram) / self.sample_count\n if self.histogram_sum > 1.1:\n self._log('Sample weights add up to a value greater '\n 'than 1.0: %f' % self.histogram_sum)\n self.fail = True", "def _generate_tabular(lookup_table, interpolation='linear', points_unit=u.pix, **kwargs):\n if not isinstance(lookup_table, u.Quantity):\n raise TypeError(\"lookup_table must be a Quantity.\") # pragma: no cover\n\n ndim = lookup_table.ndim\n TabularND = tabular_model(ndim, name=f\"Tabular{ndim}D\")\n\n # The integer location is at the centre of the pixel.\n points = [(np.arange(size) - 0) * points_unit for size in lookup_table.shape]\n if len(points) == 1:\n points = points[0]\n\n kwargs = {\n 'bounds_error': False,\n 'fill_value': np.nan,\n 'method': interpolation,\n **kwargs\n }\n\n t = TabularND(points, lookup_table, **kwargs)\n\n # TODO: Remove this when there is a new gWCS release\n # Work around https://github.com/spacetelescope/gwcs/pull/331\n t.bounding_box = None\n\n return t", "def generate_exptime_table(self, ):\n\n # Perform calculation for all stars in biased sample\n Ndraw = self.NBIAS\n\n np.random.seed(seed=None)\n\n # Allocate memory for exposure times\n t_tots = np.zeros(Ndraw)\n tpbpcs = []\n pct_obs_iwas = []\n lammax_obs_iwas = []\n specs = []\n\n \"\"\"\n Calculate the exposure times and spectra in each bandpass for each\n star in biased sample\n \"\"\"\n\n # Loop over stars in this sample\n for i in range(Ndraw):\n #print(\"HIP %i, %.2f pc, %s \" %(hip[i], dist[i], stype[i]))\n\n # Set system parameters for this star\n self.prep_ith_star(i)\n\n # Calculate the time to observe the complete spectrum\n t_tots[i], tpbpc, spectrum, iwa = self.complete_spectrum_time()\n\n tpbpcs.append(tpbpc)\n pct_obs_iwas.append(iwa[0])\n specs.append(spectrum)\n\n # Calculate channel widths\n deltas = []\n for channel in CHANNELS:\n l = default_luvoir(channel=channel)\n deltas.append(l.lammax - l.lammin)\n self.deltas = np.array(deltas)\n\n # Calculate channel fractional completeness\n self.channel_weights = (self.deltas / np.sum(self.deltas))\n\n # Calculate completeness for each star in sample\n self.completeness = np.sum(np.array(pct_obs_iwas) * self.channel_weights, axis = 1)\n\n \"\"\"\n Make a Lookup Table of Exposure times for each star in sample\n \"\"\"\n\n tpbpcs_rect = [] # Time per bandpass\n tpcs_rect = [] # Time per channel\n\n # Loop over all the stars in sample\n for idrew in range(self.NBIAS):\n\n tpbpcs_rect.append([])\n tpcs_rect.append([])\n bp_names = []\n bp_chan = []\n\n # Loop over all the LUVOIR channels\n for ichan in range(len(CHANNELS)):\n\n tpcs_rect[idrew].append(0.0)\n\n # Loop over all the bands in this channel\n for iband in range(len(tpbpcs[0][ichan])):\n\n bp_names.append(\"%s %i\" %(CHANNELS[ichan], iband+1))\n bp_chan.append(ichan)\n tpbpcs_rect[idrew].append(tpbpcs[idrew][ichan][iband])\n tpcs_rect[idrew][ichan] += tpbpcs[idrew][ichan][iband]\n\n # Make np arrays\n tpbpcs_rect = np.array(tpbpcs_rect)\n tpcs_rect = np.array(tpcs_rect)\n bp_names = np.array(bp_names)\n bp_chan = np.array(bp_chan)\n\n # Make infs --> nans\n infmask = ~np.isfinite(tpbpcs_rect)\n tpbpcs_rect[infmask] = np.nan\n infmask = ~np.isfinite(tpcs_rect)\n tpcs_rect[infmask] = np.nan\n\n # Set attributes\n self.tpbpcs_rect = tpbpcs_rect\n self.tpcs_rect = tpcs_rect\n self.bp_names = bp_names\n self.bp_chan = bp_chan\n\n \"\"\"\n New completeness calculations\n \"\"\"\n\n bandpasses = []\n\n # Loop over telescope channels\n for j, channel in enumerate(CHANNELS):\n\n # Channel dependent bandwidth?\n if type(self.bandwidth) is float:\n bandwidth = self.bandwidth\n else:\n assert len(self.bandwidth) == len(CHANNELS)\n bandwidth = self.bandwidth[j]\n\n # Get the channel specific telescope parameters\n luvoir = default_luvoir(channel=channel)\n self.cn.telescope = luvoir\n\n # Calculate the bandpass edges\n edges = calculate_bandpass_edges(luvoir.lammin, luvoir.lammax, bandwidth = bandwidth)\n\n # Calculate the number of bandpasses\n Nbands = len(edges) - 1\n\n # Loop over bandpasses\n for i in range(Nbands):\n\n # Get the max, min, and middle wavelenths for this bandpass\n lammin = edges[i]\n lammax = edges[i+1]\n\n bandpasses.append([lammin, lammax])\n\n bandpasses = np.array(bandpasses)\n lmin, lmax = np.min(np.hstack(bandpasses)), np.max(np.hstack(bandpasses))\n\n # Fractional completeness of each bandpass\n bp_frac = ((bandpasses[:,1] - bandpasses[:,0]) / (lmax - lmin)) / np.sum((bandpasses[:,1] - bandpasses[:,0]) / (lmax - lmin))\n\n # Completeness by target\n tot_completeness = np.sum(np.isfinite(self.tpbpcs_rect) * bp_frac, axis=1)\n\n # Fraction of stars in biased sample that can completely observe each bandpass\n frac_bias_bp = np.sum(np.isfinite(tpbpcs_rect)*1.0, axis=0) / self.NBIAS\n\n # Set attributes\n self.bandpasses = bandpasses\n self.bp_frac = bp_frac\n self.tot_completeness = tot_completeness\n self.frac_bias_bp = frac_bias_bp\n\n self._make_pandas_table()\n\n return", "def LA_contribution(self):\n pr=paraxial(self.entrance_pupil,0)\n #hnu=-u*self.entrance_pupil #n=1\n pr.propagate(self.surfaces)\n #print('hnu',hnu,1/hnu)\n #print('paraxial y ',pr.y[1:])\n #print('paraxial nu',pr.nu[:-1])\n #print('paraxial u ',pr.nu[:-1]/self.get_n()[:-1])\n #print('paraxial u ',pr.nu[:-1]/self.get_n()[:-1]/hnu/5.715023)\n #print('paraxial i ',pr.i[1:])\n ni=self.get_n()[:-1]*pr.i[1:]\n #print('ni',ni)\n marginal=beam_field()\n marginal.single_beam_from_Kingslake_Q(self.entrance_pupil,0) #marginal beam\n marginal.propagate(self.surfaces)\n Q=marginal.Kingslake_Qabs(self.surfaces)[:,0]\n Q_=marginal.Kingslake_Q_abs(self.surfaces)[:,0]\n #print('marginal Q ',marginal.Kingslake_Qabs(ls.surfaces)[:,0])\n #print('marginal Q\\'',marginal.Kingslake_Q_abs(ls.surfaces)[:,0])\n #print(Q-Q_)\n #print('paraxial nu\\'',pr.nu[1:])\n #print('sin Uk\\'',marginal.U)\n target_surface=len(self.surfaces)-1\n #print(marginal.U[3,0,1]*pr.nu[target_surface])\n nusinU=marginal.U[3,0,1]*pr.nu[target_surface] #n'u'sinU'_k all values at end focus\n LA=-(Q-Q_)*ni/nusinU\n #print('spherical LA contribution',LA)\n #print('sum',sum(LA))\n return LA", "def result_table(fmt='latex_booktabs'):\n \n names = [\n \"ETF EW.\",\n \"Antonacci ETF\",\n \"Antonacci ETF Inv. Vol.\",\n \"Futures EW.\",\n \"Antonacci Futures\",\n \"Antonacci Futures Inv. Vol.\",\n \"TSMOM Futures Low Vol.\",\n \"TSMOM Futures High Vol.\"\n ]\n\n # Get stats for each strategy\n s1 = calculate.stats_from_parameters(name='Antonacci', price_set='ETF', fee_rate_bps=10, get_top=7, target_vol=40, periods=6, vol_weight=False)\n s2 = calculate.stats_from_parameters(name='Antonacci', price_set='ETF', fee_rate_bps=10, get_top=2, target_vol=40, periods=6, vol_weight=False)\n s3 = calculate.stats_from_parameters(name='Antonacci', price_set='ETF', fee_rate_bps=10, get_top=2, target_vol=40, periods=6, vol_weight=True)\n s4 = calculate.stats_from_parameters(name='Antonacci', price_set='Futures', fee_rate_bps=10, get_top=47, target_vol=40, periods=6, vol_weight=False)\n s5 = calculate.stats_from_parameters(name='Antonacci', price_set='Futures', fee_rate_bps=10, get_top=10, target_vol=40, periods=6, vol_weight=False)\n s6 = calculate.stats_from_parameters(name='Antonacci', price_set='Futures', fee_rate_bps=10, get_top=10, target_vol=40, periods=6, vol_weight=True)\n s7 = calculate.stats_from_parameters(name='TSMOM', price_set='Futures', fee_rate_bps=10, get_top=10, target_vol=40, periods=6, vol_weight=False)\n s8 = calculate.stats_from_parameters(name='TSMOM', price_set='Futures', fee_rate_bps=10, get_top=10, target_vol=100, periods=6, vol_weight=False)\n\n # The relevant columns from the summary data\n cols = [3, 4, 5, 6]\n num_assets = [7, 2, 2, 47, 10, 10, 47, 47]\n stats = [s1, s2, s3, s4, s5, s6, s7, s8]\n table = [names]\n \n # Collecting the results\n for i, col in enumerate(cols):\n col_list = [round(stat['summary'][col], 2) for stat in stats]\n table.append(col_list)\n\n table.append(num_assets)\n table = list(map(list, zip(*table))) # Transpose\n \n # Creating table headers\n headers = ['Strategy Name', 'Annual Return', 'Annual Vol.', 'Sharpe', 'Max. Drawdown', '# Assets']\n \n # Returning latex table\n tbl = tabulate(table, headers, tablefmt=fmt)\n print(tbl)\n \n return tbl", "def build_table(numpoints, table_oversamp, grid_size, im_size, ndims, order, alpha):\n table = []\n\n # build one table for each dimension\n for i in range(ndims):\n J = numpoints[i]\n L = table_oversamp[i]\n K = grid_size[i]\n N = im_size[i]\n\n # The following is a trick of Fessler.\n # It uses broadcasting semantics to quickly build the table.\n t1 = J / 2 - 1 + np.array(range(L)) / L # [L]\n om1 = t1 * 2 * np.pi / K # gam\n s1 = build_spmatrix(\n np.expand_dims(om1, 0),\n numpoints=(J,),\n im_size=(N,),\n grid_size=(K,),\n n_shift=(0,),\n order=(order[i],),\n alpha=(alpha[i],)\n )\n h = np.array(s1.getcol(J - 1).todense())\n for col in range(J - 2, -1, -1):\n h = np.concatenate(\n (h, np.array(s1.getcol(col).todense())), axis=0)\n h = np.concatenate((h.flatten(), np.array([0])))\n\n table.append(h)\n\n return table", "def three_PDF_plots(res=200,table_exts=[''],**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n GR = glo.global_results()\n\n fig, axs = plt.subplots(3, sharex='col',\\\n figsize=(8,15),facecolor='w',\\\n gridspec_kw={'hspace': 0, 'wspace': 0})\n\n # First print cell data distribution\n i = 0\n for gal_index in zip(p.gal_index):\n ax1 = axs[i]\n gal_ob = gal.galaxy(GR=GR, gal_index=gal_index)\n df = gal_ob.cell_data.get_dataframe()\n lognH = np.log10(df.nH)\n hist = np.histogram(lognH[df.nH.values > 0],bins=200,weights=df.m[df.nH.values > 0])\n hist1 = np.asarray(hist[0]) # histogram\n hist2 = np.asarray(hist[1]) # bin edges\n hist1 = hist1*1./sum(hist1)\n ax1.plot(hist2[0:len(hist1)],hist1,drawstyle='steps',ls='-',lw=1.5,\\\n alpha=0.7,color=p.color[0],label='Original cell distribution')\n \n for table_ext,ls,color in zip(table_exts,['--',':'],p.color[1::]):\n if '_M10' in table_ext: lab = 'Mach = 10'\n if '_arepoPDF_M51' in table_ext: lab = 'AREPO parametrized PDF'\n PDF(gal_index,color=color,table_ext=table_ext,ls=ls,res=200,add=True,ax=ax1,label=lab,ow=p.ow)\n \n if i == 0: ax1.legend(loc='upper right',fontsize=12)\n if i == 2: ax1.set_xlabel(getlabel('lnH'))\n ax1.set_ylabel('Mass fraction per bin')\n\n i += 1\n\n if p.savefig:\n if not os.path.isdir(p.d_plot + 'cell_data/PDFs/'): os.mkdir(p.d_plot + 'cell_data/PDFs/') \n plt.savefig(p.d_plot + 'cell_data/PDFs/simple_PDF_%s%s%s_x3.png' % (p.sim_name,p.sim_run,p.table_ext), format='png', dpi=250, facecolor='w')", "def _html_galaxy_properties(html, gal):\n galaxy1, ra1, dec1, diam1 = gal[GALAXYCOLUMN], gal[racolumn], gal[deccolumn], 5 * MOSAICRADIUS / pixscale\n viewer_link = legacyhalos.html.viewer_link(ra1, dec1, diam1, dr10=True)\n\n html.write('<h2>Galaxy Properties</h2>\\n')\n\n html.write('<table>\\n')\n html.write('<tr>\\n')\n #html.write('<th>Index</th>\\n')\n html.write('<th>Galaxy</th>\\n')\n html.write('<th>RA</th>\\n')\n html.write('<th>Dec</th>\\n')\n html.write('<th>Redshift</th>\\n')\n html.write('<th>Viewer</th>\\n')\n #html.write('<th>SkyServer</th>\\n')\n html.write('</tr>\\n')\n\n html.write('<tr>\\n')\n #html.write('<td>{:g}</td>\\n'.format(ii))\n #print(gal['INDEX'], gal['SGA_ID'], gal['GALAXY'])\n #html.write('<td>{}</td>\\n'.format(gal['INDEX']))\n html.write('<td>{}</td>\\n'.format(galaxy1))\n html.write('<td>{:.7f}</td>\\n'.format(ra1))\n html.write('<td>{:.7f}</td>\\n'.format(dec1))\n html.write('<td>{:.5f}</td>\\n'.format(gal[ZCOLUMN]))\n html.write('<td><a href=\"{}\" target=\"_blank\">Link</a></td>\\n'.format(viewer_link))\n #html.write('<td><a href=\"{}\" target=\"_blank\">Link</a></td>\\n'.format(_skyserver_link(gal)))\n html.write('</tr>\\n')\n html.write('</table>\\n')", "def gethists():\n histdict = {}\n\n lept_type = ['elect', 'muon']\n\n for lept in lept_type:\n histdict[\"h_misE_{}\".format(lept)] = TH1F('h_misE_{}'.format(lept), 'Gen/Reco missing energy comparison', 200, -70, 40)\n histdict[\"h_misPx_{}\".format(lept)] = TH1F('h_misPx_{}'.format(lept), 'Gen/Reco missing Px comparison', 200, -40, 40)\n histdict[\"h_misPy_{}\".format(lept)] = TH1F('h_misPy_{}'.format(lept), 'Gen/Reco missing Py comparison', 200, -40, 40)\n histdict[\"h_misPz_{}\".format(lept)] = TH1F('h_misPz_{}'.format(lept), 'Gen/Reco missing Pz comparison', 200, -50, 50)\n histdict[\"h_misP_{}\".format(lept)] = TH1F('h_misP_{}'.format(lept), 'Gen/Reco missing position comparison', 200, -30, 30)\n histdict[\"h_misM_{}\".format(lept)] = TH1F(\"h_misM_{}\".format(lept), 'Gen/Reco missing mass comparison', 200, -150, 50)\n histdict[\"h_leptE_{}\".format(lept)] = TH1F(\"h_leptE_{}\".format(lept), 'Gen/Reco lepton energy comparison', 200, -5, 5)\n histdict[\"h_leptTheta_{}\".format(lept)] = TH1F(\"h_leptTheta_{}\".format(lept), 'Gen theta lepton', 200, -5, 5)\n\n histdict[\"h_recoJetsAngle\"] = TH1F(\"h_recoJetsAngle\", 'Angle between the two reconstructed jets', 200, 0, 3.5)\n histdict[\"h_recoJetsTheta\"] = TH1F(\"h_recoJetsTheta\", 'Theta angles of the reconstructed jets', 200, -3.5, 3.5)\n histdict[\"h_recoJetEnergy\"] = TH1F(\"h_recoJetEnergy\", 'Energy of the reconstructed jets', 200, 0, 200)\n\n with_wo = ['FSR', 'woFSR']\n\n # for cut in with_wo:\n # histdict[\"h_ISR_E{}\".format(cut)] = TH1F(\"h_ISR_E{}\".format(cut), 'ISR energy', 200, -0, 150)\n # histdict[\"h_ISR_Theta{}\".format(cut)] = TH1F(\"h_ISR_Theta{}\".format(cut), 'ISR theta', 200, -1.6, 1.6)\n # histdict[\"h_ISR_pz{}\".format(cut)] = TH1F(\"h_ISR_pz{}\".format(cut), 'ISR pz', 200, -10, 10)\n\n # histdict[\"h_ISR_Theta_vs_E{}\".format(cut)] = TH2F(\"h_ISR_Theta_vs_E{}\".format(cut), 'ISR theta versus energy', 750, 0, 150, 200, -1.7, 1.7)\n # histdict[\"h_FSR_Theta_vs_E{}\".format(cut)] = TH2F(\"h_FSR_Theta_vs_E{}\".format(cut), 'FSR theta versus energy', 750, 0, 150, 200, -1.7, 1.7)\n\n histdict[\"h_FSR_E\"] = TH1F(\"h_FSR_E\", 'FSR energy', 200, -1, 5)\n histdict[\"h_FSR_Theta\"] = TH1F(\"h_FSR_Theta\", 'FSR theta', 200, -1.6, 1.6)\n histdict[\"h_FSR_pz\"] = TH1F(\"h_FSR_pz\", 'FSR pz', 200, -10, 10)\n\n # histdict[\"h_FSR_E_electrons\"] = TH1F(\"h_FSR_E_electrons\", 'energy FSR emitted by electrons', 200, -1, 5)\n # histdict[\"h_FSR_Theta_electrons\"] = TH1F(\"h_FSR_Theta_electrons\", 'theta FSR emited by the electrons', 200, -1.6, 1.6)\n # histdict[\"h_FSR_pz_electrons\"] = TH1F(\"h_FSR_pz_electrons\", 'pz FSR emited by the electrons ', 200, -10, 10)\n\n # histdict[\"h_FSR_E_muons\"] = TH1F(\"h_FSR_E_muons\", 'energy FSR emitted by muons', 200, -1, 5)\n # histdict[\"h_FSR_Theta_muons\"] = TH1F(\"h_FSR_Theta_muons\", 'theta FSR emited by the muons', 200, -1.6, 1.6)\n # histdict[\"h_FSR_pz_muons\"] = TH1F(\"h_FSR_pz_muons\", 'pz FSR emited by the muons ', 200, -10, 10)\n\n histdict[\"h_FSR_lepton_angle_vs_E\"] = TH2F(\"h_FSR_lepton_angle_vs_E\", 'Solid angle between the FSR photon and the lepton', 150, -1, 10, 150, 0, 3.17)\n histdict[\"h_E_p_vs_E_FSR\"] = TH2F(\"h_E_p_vs_E_FSRPhoton\", \"E/p ratio versus the FSR energy photon\", 220, -1, 10, 220, 0.9998, 1.0002)\n\n\n histdict[\"h_nonFSR_lepton_angle_vs_E\"] = TH2F(\"h_photons_lepton_angle_vs_E\", 'Solid angle between the non-FSR photons and the lepton', 150, -1, 10, 150, 0, 3.17)\n histdict[\"h_E_p_vs_E_nonFSR\"] = TH2F(\"h_E_p_vs_E_NonFSRPhoton\", \"E/p ratio versus the non-FSR energy photon\", 220, -1, 10, 220, 0.9998, 1.0002)\n\n histdict[\"h_test\"] = TH2F(\"h_test\", \"h_test\", 150, -1, 10, 150, 0, 3.17)\n return histdict", "def _U_table(ci, hi, co, ho):\n # TODO: Base U on Table 18.5, Warren D. Seider et. al. Product and Process Design Principles. (2016)\n cip, hip, cop, hop = ci.phase, hi.phase, co.phase, ho.phase\n phases = cip + hip + cop + hop\n if 'g' in phases:\n if ('g' in hip and 'l' in hop) and ('l' in cip and 'g' in cop):\n return 1.0\n else:\n return 0.5\n else:\n return 0.5", "def _Dp_table(cls, ci, hi, co, ho, inside_heating):\n cip, hip, cop, hop = ci.phase, hi.phase, co.phase, ho.phase\n dP_c = cls._get_dP(cip, cop)\n dP_h = cls._get_dP(hip, hop)\n if inside_heating:\n dP_in = dP_c\n dP_out = dP_h\n else:\n dP_in = dP_h\n dP_out = dP_c\n return dP_in, dP_out", "def dp_all(foods, cal_goal, pro_goal, carb_goal, fat_goal):\n costs = init_four_d_array((cal_goal, pro_goal, carb_goal, fat_goal),\n 999999999)\n foods_used = init_four_d_array((cal_goal, pro_goal, carb_goal, fat_goal),\n {})\n\n for i in range(cal_goal):\n for j in range(pro_goal):\n for k in range(carb_goal):\n for l in range(fat_goal):\n for n in range(len(foods)):\n food = foods[n]\n if (int(food['calories']) > i\n or int(food['protein']) > j\n or int(food['carbs']) > k\n or int(food['fat']) > l):\n continue\n if (costs[i - int(food['calories'])]\n [j - int(food['protein'])]\n [k - int(food['carbs'])]\n [l - int(food['fat'])]\n == 999999999):\n prev_cost = 0\n prev_foods_used = {}\n else:\n prev_cost = (macros[i - int(food['calories'])]\n [j - int(food['protein'])]\n [j - int(food['carbs'])]\n [j - int(food['fat'])])\n prev_foods_used = \\\n (foods_used[i - int(food['calories'])]\n [j - int(food['protein'])]\n [k - int(food['carbs'])]\n [l - int(food['fat'])]).copy()\n new_cal = calories(\n foods, prev_foods_used) + food['calories']\n new_pro = protein(\n foods, prev_foods_used) + food['protein']\n new_car = carbs(\n foods, prev_foods_used) + food['protein']\n new_fat = fat(\n foods, prev_foods_used) + food['protein']\n if (costs[i][j] > prev_cost + food['serving_cost']\n and new_cal > i - 20 and new_cal < i + 10\n and new_pro < j + 5 and new_pro < j + 5\n and new_car < j + 5 and new_car < j + 5\n and new_fat < j + 5 and new_fat < j + 5):\n costs[i][j][k][l] = prev_cost + \\\n food['serving_cost']\n try:\n prev_foods_used[n] += 1\n except KeyError:\n prev_foods_used[n] = 1\n foods_used[i][j][k][l] = prev_foods_used\n return foods_used[cal_goal - 1][pro_goal - 1][carb_goal - 1][fat_goal - 1]", "def init_scatter_table(self, tm, angular_integration=False, verbose=False):\n self._psd_D = np.linspace(self.D_max/self.num_points, self.D_max, \n self.num_points)\n\n self._S_table = {}\n self._Z_table = {}\n self._previous_psd = None\n self._m_table = np.empty(self.num_points, dtype=complex)\n if angular_integration:\n self._angular_table = {\n \"sca_xsect\": {\"h_pol\": {}, \"v_pol\": {}},\n \"ext_xsect\": {\"h_pol\": {}, \"v_pol\": {}}, \n \"asym\": {\"h_pol\": {}, \"v_pol\": {}}\n }\n else:\n self._angular_table = None\n \n (old_m, old_axis_ratio, old_radius, old_geom, old_psd_integrator) = \\\n (tm.m, tm.axis_ratio, tm.radius, tm.get_geometry(), \n tm.psd_integrator)\n \n try:\n # temporarily disable PSD integration to avoid recursion\n tm.psd_integrator = None \n\n for geom in self.geometries:\n self._S_table[geom] = \\\n np.empty((2,2,self.num_points), dtype=complex)\n self._Z_table[geom] = np.empty((4,4,self.num_points))\n\n if angular_integration:\n for int_var in [\"sca_xsect\", \"ext_xsect\", \"asym\"]:\n for pol in [\"h_pol\", \"v_pol\"]:\n self._angular_table[int_var][pol][geom] = \\\n np.empty(self.num_points)\n\n for (i,D) in enumerate(self._psd_D):\n if verbose:\n print(\"Computing point {i} at D={D}...\".format(i=i, D=D))\n if self.m_func != None:\n tm.m = self.m_func(D)\n if self.axis_ratio_func != None:\n tm.axis_ratio = self.axis_ratio_func(D)\n self._m_table[i] = tm.m\n tm.radius = D/2.0\n for geom in self.geometries:\n tm.set_geometry(geom)\n (S, Z) = tm.get_SZ_orient()\n self._S_table[geom][:,:,i] = S\n self._Z_table[geom][:,:,i] = Z\n\n if angular_integration:\n for pol in [\"h_pol\", \"v_pol\"]:\n h_pol = (pol == \"h_pol\")\n self._angular_table[\"sca_xsect\"][pol][geom][i] = \\\n scatter.sca_xsect(tm, h_pol=h_pol)\n self._angular_table[\"ext_xsect\"][pol][geom][i] = \\\n scatter.ext_xsect(tm, h_pol=h_pol)\n self._angular_table[\"asym\"][pol][geom][i] = \\\n scatter.asym(tm, h_pol=h_pol)\n finally:\n #restore old values\n (tm.m, tm.axis_ratio, tm.radius, tm.psd_integrator) = \\\n (old_m, old_axis_ratio, old_radius, old_psd_integrator) \n tm.set_geometry(old_geom)", "def calculate_gain_table(info_table):\n\n\t# sort and separate\n\tinfo_table['decile'] = 10 - pd.qcut(info_table['proba'], 10, labels = False)\n\n\t# group by each decile, calculate gain score\n\ttotal_positive = sum(info_table['target'])\n\tgain_table = info_table.groupby('decile', as_index = False)['target'].sum()\n\tgain_table = gain_table.rename(columns = {'target': 'positive'})\n\tgain_table['gain_score'] = gain_table['positive'] / total_positive\n\n\t# gain = cumsum(gain score)\n\tgain_table['gain'] = gain_table['gain_score'].cumsum()\n\n\treturn gain_table", "def associate_generator_tables(gf, gen, gens):\n stack_gens = stack_generators(\n gens, cat_col='energy_source_code_num', stacked_col='fuel_type')\n\n gen_assoc = (\n pd.merge(\n stack_gens,\n gen,\n on=IDX_GENS,\n how='outer')\n .pipe(remove_retired_generators)\n .merge(\n gf.groupby(by=IDX_PM_FUEL, as_index=False)\n .sum(min_count=1),\n on=IDX_PM_FUEL,\n suffixes=('_g_tbl', '_gf_tbl'),\n how='outer',\n )\n )\n\n gen_assoc = (\n pd.merge(\n gen_assoc,\n gen_assoc.groupby(by=IDX_FUEL)\n [['capacity_mw', 'net_generation_mwh_g_tbl']].sum(min_count=1)\n .add_suffix('_fuel')\n .reset_index(),\n on=IDX_FUEL,\n )\n .pipe(pudl.helpers.convert_cols_dtypes, 'eia')\n .pipe(_associate_unconnected_records)\n .pipe(_associate_fuel_type_only, gf=gf)\n )\n return gen_assoc", "def get_prices(country, year, square=2, name='p_', store=yearly):\n\n year1 = 'y' + str(year) + '_'\n year0 = 'y' + str(year - 1) + '_'\n\n df1 = yearly[year1 + 'price_' + country]\n df0 = yearly[year0 + 'price_' + country]\n\n df1.name = 'p' + str(year)\n df0.name = 'p' + str(year - 1)\n\n gr1 = df1.groupby(axis=0, level='PRODUCT_NC')\n gr0 = df0.groupby(axis=0, level='PRODUCT_NC')\n\n l1 = []\n drops1 = []\n for product in gr1.groups.keys():\n try:\n l1.append((product, ref_dict[product]))\n except KeyError:\n drops1.append(product)\n\n l0 = []\n drops0 = []\n for product in gr0.groups.keys():\n try:\n l0.append((product, ref_dict[product]))\n except KeyError:\n drops0.append(product)\n\n return pd.DataFrame((np.log(df1) - np.log(df0) - (\n np.log(df1.ix[l1].reset_index(level='PARTNER')['p' + str(year)].reindex(df1.index, level='PRODUCT_NC')) - (\n np.log(df0.ix[l0].reset_index(level='PARTNER')['p' + str(year - 1)].reindex(df0.index, level='PRODUCT_NC'))))), columns=[name + str(year)]) ** square", "def __cacula_agio(table):\n from m2py.misc.vectorize import column\n\n PV = table[0][-1]\n total = sum(column(table, 1))\n premium = total/PV - 1\n return round(premium, 2)", "def set_lookup_p(p_max, p_cm, diagram, skip=True):\n\n # for the center-of-mass frame p_max was restricted to (1,1,0)\n if p_cm == 0:\n p_max = 2\n\n lookup_p3 = list(it.ifilter(lambda x: _abs2(x) <= p_max, \\\n it.product(range(-p_max, p_max+1), repeat=3)))\n lookup_p3_reduced = [(0,0,0), (0,0,1), (0,1,1), (1,1,1), (0,0,2)]\n \n if diagram == 'C20' or diagram == 'C2+':\n lookup_so = it.ifilter(lambda x : _abs2(x) == p_cm, lookup_p3)\n lookup_so, lookup_si = it.tee(lookup_so, 2)\n lookup_p = it.ifilter(lambda (x,y): \\\n tuple(x) == tuple(it.imap(operator.neg, y)), \\\n it.product(lookup_so, lookup_si))\n elif diagram == 'C3+':\n lookup_so = it.ifilter(lambda (x,y): \\\n _abs2(list(it.imap(operator.add, x, y))) == p_cm and \\\n not (p_cm == 0 and (tuple(x) == tuple(y))), \\\n it.product(lookup_p3, repeat=2))\n lookup_si = it.ifilter(lambda x : _abs2(x) == p_cm, lookup_p3)\n lookup_p = it.ifilter(lambda ((w,x),y): \\\n tuple(it.imap(operator.add, w, x)) == tuple(it.imap(operator.neg, y)), \\\n it.product(lookup_so, lookup_si))\n\n elif diagram.startswith('C4'):\n # leave out momentum combinations not contributing to rho analysis\n if skip:\n lookup_so = it.ifilter(lambda (x,y): \\\n# _abs2(list(it.imap(operator.add, x, y))) == p_cm and \\\n (tuple(it.imap(operator.add, x, y)) == lookup_p3_reduced[p_cm]) and \\\n not (p_cm == 0 and (tuple(x) == tuple(y))), \\\n it.product(lookup_p3, repeat=2))\n lookup_si = it.ifilter(lambda (x,y): \\\n (tuple(it.imap(operator.neg, it.imap(operator.add, x, y))) == lookup_p3_reduced[p_cm]) and \\\n not (p_cm == 0 and (tuple(x) == tuple(y))), \\\n it.product(lookup_p3, repeat=2))\n else:\n lookup_so = it.ifilter(lambda (x,y): \\\n# _abs2(list(it.imap(operator.add, x, y))) == p_cm and \\\n (tuple(it.imap(operator.add, x, y)) == lookup_p3_reduced[p_cm]), \\\n it.product(lookup_p3, repeat=2))\n lookup_si = it.ifilter(lambda (x,y): \\\n (tuple(it.imap(operator.neg, it.imap(operator.add, x, y))) == lookup_p3_reduced[p_cm]), \\\n it.product(lookup_p3, repeat=2))\n\n lookup_p = it.ifilter(lambda ((w,x),(y,z)): \\\n tuple(it.imap(operator.add, w, x)) == tuple(it.imap(operator.neg, it.imap(operator.add, y, z))), \\\n it.product(lookup_so, lookup_si))\n else:\n print 'in set_lookup_p: diagram unknown! Quantum numbers corrupted.'\n return list(lookup_p)", "def print_tables(hash_table, f_output, l_samples):\n\n l_fields = ['chr', 'pos', 'ref', 'alt', 'QUAL', 'FILTER',\n 'Func.refGene', 'Gene.refGene', 'GeneDetail.refGene', 'ExonicFunc.refGene', 'AAChange.refGene',\n 'cytoBand', 'ExAC_ALL', 'ExAC_AFR', 'ExAC_AMR', 'ExAC_EAS', 'ExAC_FIN', 'ExAC_NFE', 'ExAC_OTH',\n 'ExAC_SAS',\n 'avsnp147', 'SIFT_score', 'SIFT_pred', 'Polyphen2_HDIV_score', 'Polyphen2_HDIV_pred',\n 'Polyphen2_HVAR_score',\n 'Polyphen2_HVAR_pred', 'LRT_score', 'LRT_pred', 'MutationTaster_score', 'MutationTaster_pred',\n 'MutationAssessor_score', 'MutationAssessor_pred', 'FATHMM_score', 'FATHMM_pred', 'PROVEAN_score',\n 'PROVEAN_pred', 'VEST3_score', 'CADD_raw', 'CADD_phred', 'DANN_score', 'fathmm-MKL_coding_score',\n 'fathmm-MKL_coding_pred', 'MetaSVM_score', 'MetaSVM_pred', 'MetaLR_score', 'MetaLR_pred',\n 'integrated_fitCons_score', 'integrated_confidence_value', 'GERP++_RS', 'phyloP7way_vertebrate',\n 'phyloP20way_mammalian', 'phastCons7way_vertebrate', 'phastCons20way_mammalian', 'SiPhy_29way_logOdds']\n l_fields = l_fields + l_samples\n \n l_chr = set([item[0] for item in hash_table.keys()])\n\n fo = open(f_output, 'w')\n fo.write(','.join(l_fields) + '\\n')\n for key in sorted(hash_table.keys(), key=itemgetter(1)):\n fo.write(','.join(map(lambda field: hash_table[key].get(field, '.'), l_fields)) + '\\n')\n fo.close()", "def get_appl_table(mat_dict):\n html_str = \"\"\" \"\"\"\n missing_values = False\n for appl_key, appl_dict in applications.items():\n html_str += \"\"\"<h3>{}</h3>\"\"\".format(appl_key)\n for propr in [\"x\", \"y\"]:\n q_dict = quantities[appl_dict[propr]]['dict']\n q_key = quantities[appl_dict[propr]]['key']\n q_unit = quantities[appl_dict[propr]]['unit']\n\n try:\n q_val = mat_dict[q_dict][q_key]\n #quick fix to show all values nicely enough\n if abs(float(q_val)) > 0.01:\n q_val = round(q_val, 3)\n elif abs(float(q_val)) > 0.001:\n q_val = round(q_val, 4)\n except: #pylint: disable=bare-except # noqa: E722\n q_val = \"***\"\n missing_values = True\n html_str += \"&nbsp;&nbsp;&nbsp; {} ({}): {}\".format(appl_dict[propr], q_unit, q_val)\n\n # If the node exists (even for nonporous mat/appl) get the aiida-link\n try:\n q_uuid = mat_dict[q_dict].uuid\n html_str += get_provenance_link(q_uuid)\n except:\n pass\n\n html_str += \"<br>\"\n if missing_values:\n html_str += \"<br><i>*** this property was not computed yet, incurred in some problem,\" +\\\n \"or can not be computed for a nonpermeable system\"\n return (html_str)", "def prep_alloction_fraction(gen_assoc):\n # flag whether the generator exists in the\n # generation table (this will be used later on)\n # for calculating ratios to use to allocate net generation\n gen_assoc = gen_assoc.assign(\n in_g_tbl=lambda x: np.where(\n x.net_generation_mwh_g_tbl.notnull(),\n True, False)\n )\n\n gens_gb = gen_assoc.groupby(by=IDX_PM_FUEL, dropna=False)\n # get the total values for the merge group\n # we would use on groupby here with agg but it is much slower\n # so we're gb-ing twice w/ a merge\n # gens_gb.agg({'net_generation_mwh_g_tbl': lambda x: x.sum(min_count=1),\n # 'capacity_mw': lambda x: x.sum(min_count=1),\n # 'in_g_tbl': 'all'},)\n gen_pm_fuel = (\n gen_assoc\n .merge( # flag if all generators exist in the generators_eia860 tbl\n gens_gb[['in_g_tbl']].all().reset_index(),\n on=IDX_PM_FUEL,\n suffixes=('', '_all')\n )\n .merge( # flag if some generators exist in the generators_eia860 tbl\n gens_gb[['in_g_tbl']].any().reset_index(),\n on=IDX_PM_FUEL,\n suffixes=('', '_any')\n )\n # Net generation and capacity are both proxies that can be used\n # to allocate the generation which only shows up in generation_fuel\n # Sum them up across the whole plant-prime-fuel group so we can tell\n # what fraction of the total capacity each generator is.\n .merge(\n (gens_gb\n [['net_generation_mwh_g_tbl', 'capacity_mw']]\n .sum(min_count=1)\n .add_suffix('_pm_fuel')\n .reset_index()),\n on=IDX_PM_FUEL,\n )\n .assign(\n # fill in the missing generation with zeros (this will help ensure\n # the calculations to run the fractions in `calc_allocation_ratios`\n # can be consistent)\n net_generation_mwh_g_tbl=lambda x: x.net_generation_mwh_g_tbl.fillna(\n 0)\n )\n )\n # Add a column that indicates how much capacity comes from generators that\n # report in the generation table, and how much comes only from generators\n # that show up in the generation_fuel table.\n gen_pm_fuel = (\n pd.merge(\n gen_pm_fuel,\n gen_pm_fuel.groupby(by=IDX_PM_FUEL + ['in_g_tbl'], dropna=False)\n [['capacity_mw']].sum(min_count=1)\n .add_suffix('_in_g_tbl_group').reset_index(),\n on=IDX_PM_FUEL + ['in_g_tbl'],\n )\n )\n return gen_pm_fuel", "def main_translate(tablename:str, dimapath:str, debug=None):\n\n no_primary_key = ['tblPlots', 'tblLines', 'tblSpecies','tblSpeciesGeneric',\\\n 'tblSites','tblPlotNotes', 'tblSites']\n soil_stab_primary_key = ['tblSoilStabDetail', 'tblSoilStabHeader']\n soil_pit_primary_key = ['tblSoilPits','tblSoilPitHorizons']\n plant_prod_primary_key = ['tblPlantProdDetail', 'tblPlantProdHeader']\n bsne_primary_keys = ['tblBSNE_Box', 'tblBSNE_Stack','tblBSNE_BoxCollection',\\\n 'tblBSNE_TrapCollection']\n\n switcher_arguments= {\n 'no_pk': (None, dimapath, tablename),\n 'no_pk_soilstab': ('soilstab',dimapath, None),\n 'no_pk_soilpits': ('soilpits',dimapath, None),\n 'no_pk_plantprod': ('plantprod',dimapath, None),\n 'yes_pk': dimapath,\n 'f': ('fake', dimapath, tablename)\n }\n # first check if tablename exists in the particular dima\n if table_check(tablename, dimapath):\n if tablename in no_primary_key:\n # no_pk branch\n network_check = 0\n inst = arcno(dimapath)\n\n for i,j in inst.actual_list.items():\n if any([True for i,j in inst.actual_list.items() if 'BSNE' in i]):\n network_check = 2\n else:\n network_check = 1\n\n while network_check!=0:\n\n if network_check==1:\n print('no_pk; netdima in path; line or plot') if debug else None\n df = switcher[tablename](*switcher_arguments['f'])\n network_check=0\n df = blank_fixer(df)\n df = significant_digits_fix_pandas(df)\n return df\n\n elif network_check==2:\n print('no_pk; netdima in path; line or plot') if debug else None\n df = switcher[tablename](*switcher_arguments['no_pk'])\n network_check=0\n df = blank_fixer(df)\n df = significant_digits_fix_pandas(df)\n return df\n\n elif tablename in soil_stab_primary_key:\n # no_pk + soilstab branch\n print('no_pk; soilstab') if debug else None\n df = switcher[tablename](*switcher_arguments['no_pk_soilstab'])\n df = blank_fixer(df)\n df = significant_digits_fix_pandas(df)\n return df\n\n elif tablename in soil_pit_primary_key:\n # no_pk + soilpits branch\n print('no_pk; soilpits') if debug else None\n df = switcher[tablename](*switcher_arguments['no_pk_soilpits'])\n df = blank_fixer(df)\n df = significant_digits_fix_pandas(df)\n return df\n\n elif tablename in plant_prod_primary_key:\n # no_pk + plantprod branch\n print('no_pk; plantprod') if debug else None\n df = switcher[tablename](*switcher_arguments['no_pk_plantprod'])\n df = blank_fixer(df)\n df = significant_digits_fix_pandas(df)\n return df\n\n else:\n # lpi_pk, gap_pk, sperich_pk, plantden_pk, bsne_pk branch\n if tablename in bsne_primary_keys:\n print('bsne collection') if debug else None\n retdf = switcher[tablename](switcher_arguments['yes_pk'])\n retdf = blank_fixer(retdf)\n retdf = significant_digits_fix_pandas(retdf)\n retdf = openingsize_fixer(retdf)\n return retdf\n else:\n print('hmmm?') if debug else None\n df = switcher[tablename](switcher_arguments['yes_pk'])\n arc = arcno()\n iso = arc.isolateFields(df,tableswitch[tablename],\"PrimaryKey\").copy()\n iso.drop_duplicates([tableswitch[tablename]],inplace=True)\n\n target_table = arcno.MakeTableView(tablename, dimapath)\n retdf = pd.merge(target_table, iso, how=\"inner\", on=tableswitch[tablename])\n retdf = blank_fixer(retdf)\n retdf = significant_digits_fix_pandas(retdf)\n retdf = openingsize_fixer(retdf)\n return retdf\n else:\n\n print(f'table not in {os.path.basename(dimapath)}')\n pass", "def question2():\n \n # load sequences and scoring matrix\n score_matrix = read_scoring_matrix(PAM50_URL)\n human_seq = \"HSGVNQLGGVFVNGRPLPDSTRQKIVELAHSGARPCDISRILQVSNGCVSKILGRYYETGSIRPRAIGGSKPRVATPEVVSKIAQYKRECPSIFAWEIRDRLLSEGVCTNDNIPSVSSINRVLRNLASEKQQ\"\n frfly_seq = \"HSGVNQLGGVFVGGRPLPDSTRQKIVELAHSGARPCDISRILQVSNGCVSKILGRYYETGSIRPRAIGGSKPRVATAEVVSKISQYKRECPSIFAWEIRDRLLQENVCTNDNIPSVSSINRVLRNLAAQKEQQ\"\n consensus_pax = read_protein(CONSENSUS_PAX_URL)\n \n # compute human and fruitfly global alignment matrix with consensus pax\n human_align_matrix = student.compute_alignment_matrix(human_seq, consensus_pax, score_matrix, True)\n frfly_align_matrix = student.compute_alignment_matrix(frfly_seq, consensus_pax, score_matrix, True)\n \n # compute human and fruitfly global alignment sequences\n score_human, human_align, consensus_align = student.compute_global_alignment(human_seq, consensus_pax, \n score_matrix, human_align_matrix)\n score_fly, frfly_align, consensus_align_2 = student.compute_global_alignment(frfly_seq, consensus_pax,\n score_matrix, frfly_align_matrix)\n \n # compute percentages match for human and fruitfly\n human_count = 0.0\n for index in range(len(human_align)):\n if human_align[index] == consensus_align[index]:\n human_count += 1\n \n frfly_count = 0.0\n for index in range(len(frfly_align)):\n if frfly_align[index] == consensus_align_2[index]:\n frfly_count += 1\n \n print \"% Human: \" + str(human_count / len(human_align) * 100)\n print \"Hmn: \" + human_align\n print \"PAX: \" + consensus_align\n \n print \"\"\n \n print \"% FrFly: \" + str(frfly_count / len(frfly_align) * 100)\n print \"Fly: \" + frfly_align\n print \"PAX: \" + consensus_align_2", "def chkiapws09table6(printresult=True,chktol=_CHKTOL):\n from teospy.tests.tester import Tester\n args1 = [(273.15,101325.), (273.15,1e8), (313.15,101325.)]\n DERS2 = ((0,0),(1,0),(0,1),(2,0),(1,1),(0,2))\n \n funs = liq_g\n fargs = [(der+args) for args in args1 for der in DERS2]\n refs = [\n 0.101342743e3,0.147644587,0.100015695e-2,-0.154472324e2,\n -0.677459513e-7,-0.508915308e-12,\n 0.977303868e5,0.851506346e1,0.956683354e-3,-0.142970174e2,\n 0.199088060e-6,-0.371527164e-12,\n -0.116198898e5,-0.572365181e3,0.100784471e-2,-0.133463968e2,\n 0.388499694e-6,-0.445841077e-12\n ]\n fnames = 'liq_g'\n argfmt = '({0:1g},{1:1g},{2:6.2f},{3:6g})'\n header = 'F03 liq_g derivatives'\n testder = Tester(funs,fargs,refs,fnames,argfmt,header=header)\n \n funs = [cp,density,soundspeed,\n enthalpy,entropy,helmholtzenergy,internalenergy]\n fargs = args1\n refs = [\n [0.421941153e4,0.390523030e4,0.417942416e4],\n [0.999843071e3,0.104527793e4,0.992216354e3],\n [0.140240099e4,0.157543089e4,0.152891242e4],\n [0.610136242e2,0.954044973e5,0.167616267e6],\n [-0.147644587,-0.851506346e1,0.572365181e3],\n [0.183980891e-2,0.206205140e4,-0.117220097e5],\n [-0.403272791e2,-0.263838183e3,0.167514147e6]\n ]\n fnames = ['cp','density','soundspeed',\n 'enthalpy','entropy','helmholtzenergy','internalenergy']\n argfmt = '({0:6.2f},{1:6g})'\n header = 'F03 thermodynamic properties'\n testprop = Tester(funs,fargs,refs,fnames,argfmt,header=header)\n \n # Run Tester instances and print results\n testder.run()\n testprop.run()\n if printresult:\n testder.printresults(chktol=chktol)\n testprop.printresults(chktol=chktol)\n return (testder, testprop)", "def propabilityLVQ(self):\n self.labels = self.labelingLVQ()\n for i in range(self.labels.shape[0]):\n for j in range(self.labels.shape[1]):\n for k in range(self.labels.shape[2]):\n total = sum(self.labels[i, j, k] for i in range(self.labels.shape[0]))\n if total == 0. :\n continue\n else:\n self.propa[i, j, k] = self.labels[i, j, k] / total\n self.propa[i, j, k] = round(self.propa[i, j, k], 2)\n return self.propa", "def build_positional_table(profile):\n prop_dict = {'pos': [], 'ref_base': [], 'cov': [], 'mismatch_rate': [], 'a_mism': [], 'g_mism': [], 't_mism': [],\n 'c_mism': [], 'arrest_rate': []}\n\n ref = sys.argv[3]\n print(ref.replace('__tt__', '|'))\n for line in profile:\n line1 = line.strip().split()\n if line1[0] == ref.replace('__tt__', '|') and start <= int(line1[1]) <= end:\n prop_dict['pos'].append(int(line1[1]))\n prop_dict['ref_base'].append(line1[2])\n prop_dict['cov'].append(int(line1[3]))\n prop_dict['mismatch_rate'].append(float(line1[5]))\n prop_dict['a_mism'].append(int(line1[6]) + int(line1[11]))\n prop_dict['g_mism'].append(int(line1[7]) + int(line1[12]))\n prop_dict['t_mism'].append(int(line1[8]) + int(line1[13]))\n prop_dict['c_mism'].append(int(line1[9]) + int(line1[14]))\n prop_dict['arrest_rate'].append(float(line1[-1]))\n\n return prop_dict", "def galaxy_selection_func(table, min_mass=10**8.5, max_mass=np.inf, prim_gal_prop='baryonic_mass'):\n\n mask = (table[prim_gal_prop] >= min_mass) & (table[prim_gal_prop] < max_mass)\n return mask", "def gpa_calculator():\n gpa = 0.0\n grade_array = []\n credit_array = []\n grade_converter = {\"A\": 4.00, \"A-\":3.67, \"B+\": 3.33, \"B\": 3.00, \"B-\": 2.67, \"C+\": 2.33, \"C\": 2.00, \"C-\": 1.67, \"D\": 1.00, \"F\": 0.0}\n with open('full_courses.json', 'r') as fp:\n full_courses = json.load(fp)\n for val in full_courses.values():\n if val[2] == 'C':\n credit_array.append(val[0])\n for i, val2 in grade_converter.items():\n if val[1] == i:\n grade_array.append(val2)\n final_array = [val*val1 for val,val1 in zip(grade_array, credit_array)]\n gpa = round(sum(final_array)/sum(credit_array),2)\n print(\"GPA CALCULATED AS: \"+str(gpa))\n return gpa", "def lof_sig_scores(table, samples, verbose=True):\n mut_probdam = 'Missense:Probably'\n mut_syn = 'Synonymous'\n mut_trunc = ['Nonsense', 'Frameshift', 'Splice-site']\n mut_other = ['Missense:Benign', 'Missense:Possibly', 'MissenseNA', 'Indel']\n mut_all = [mut_probdam, mut_syn] + mut_trunc + mut_other\n\n # Calculate the global nonsynonymous:synonymous ratio ---------------------\n # Within each mutation category, sum counts (across all genes)\n tot_count_probdam = sum(table[mut_probdam])\n tot_count_syn = sum(table[mut_syn])\n tot_count_trunc = sum(itertools.chain(*(list(table[col])\n for col in mut_trunc)))\n tot_count_other = sum(itertools.chain(*(list(table[col])\n for col in mut_other)))\n\n # Global mutation count across all categories and genes (= 3504)\n tot_count_all = sum((tot_count_probdam, tot_count_syn, tot_count_trunc,\n tot_count_other))\n if verbose:\n print(\"Counted\", tot_count_all, \"mutations across\", len(table), \"genes\",\n \"and\", len(samples), \"samples\", file=sys.stderr)\n\n # Fraction of global mutations in each category of interest\n tot_frac_probdam = tot_count_probdam / tot_count_all\n tot_frac_syn = tot_count_syn / tot_count_all\n tot_frac_trunc = tot_count_trunc / tot_count_all\n\n # Global nonsynonymous:synonymous ratio = (1-syn)/syn (= 2.13697)\n tot_ns_s_ratio = (1 - tot_frac_syn) / tot_frac_syn\n\n # Calculate each gene's mutation score ------------------------------------\n for _idx, row in table.iterrows():\n gene_count_all = sum([row[col] for col in mut_all])\n if not gene_count_all:\n # Gene is not mutated at all --> zero score\n yield (row['Gene'], 0.0)\n continue\n\n # Initial score is the sum the 'Normalized' values across all samples\n raw_score = sum(row[sid] for sid in samples)\n\n # Adjust for NS:S ratio\n gene_count_syn = row[mut_syn]\n syn_factor = max(1 - tot_ns_s_ratio * gene_count_syn / gene_count_all,\n 0)\n new_score = raw_score * syn_factor\n\n # Adjust for \"probably damaging\" missense and truncating mutations\n gene_frac_probdam = row[mut_probdam] / gene_count_all\n probdam_factor = 1 + gene_frac_probdam - tot_frac_probdam\n gene_frac_trunc = sum([row[col] for col in mut_trunc]) / gene_count_all\n trunc_factor = gene_frac_trunc / tot_frac_trunc\n final_score = new_score * probdam_factor * trunc_factor\n yield (row['Gene'], final_score)", "def calc_GC(filepath):\n liste=['small.exon.piRNA_2.fa', 'small.exon.piRNA_1.fa', 'small.exon.piRNA_3.fa']\n \n length=list(range(0,34))\n d={}\n for i in length:\n d[i]={'A':0, 'G':0, 'T':0, 'C':0}\n for i in liste:\n with open(filepath+'/'+i, 'r') as f:\n for line in f:\n #fasta header starts with >\n if line.startswith('>'):\n pass\n else:\n line_l=list(line)\n for el in range(len(line_l)):\n if line_l[el]=='A':\n d[el]['A']+=1\n elif line_l[el]=='T':\n d[el]['T']+=1\n elif line_l[el]== 'G':\n d[el]['G']+=1\n elif line_l[el]== 'C':\n d[el]['C']+=1\n\n df=pd.DataFrame.from_dict(d)\n df=df.transpose()\n df.index = np.arange(1, len(df) + 1)\n \n\n df['A [%]']=df['A']/(df['A'].sum()+df['G'].sum()+df['C'].sum()+df['T'].sum())*100\n df['G [%]']=df['G']/(df['A'].sum()+df['G'].sum()+df['C'].sum()+df['T'].sum())*100\n df['T [%]']=df['T']/(df['A'].sum()+df['G'].sum()+df['C'].sum()+df['T'].sum())*100\n df['C [%]']=df['C']/(df['A'].sum()+df['G'].sum()+df['C'].sum()+df['T'].sum())*100", "def kl_divergence(self):\r\n\r\n target_columns = list(self.origdst.columns[11:-3])\r\n target_columns.append(self.origdst.columns[1]) # channel\r\n target_columns.append(self.origdst.columns[2]) # program_title\r\n target_columns.append(self.origdst.columns[3]) # genre\r\n\r\n kl_dict = {}\r\n\r\n for col in target_columns:\r\n\r\n try:\r\n\r\n col_counts_orig = self.origdst[col].value_counts(normalize=True).sort_index(ascending=True)\r\n col_counts_synth = self.synthdst[col].value_counts(normalize=True).sort_index(ascending=True)\r\n\r\n kl = sum(rel_entr(col_counts_orig.tolist(), col_counts_synth.tolist()))\r\n\r\n kl_dict[col] = kl\r\n\r\n except:\r\n\r\n print('For the column ', col, ' you must generate the same unique values as the real dataset.')\r\n print('The number of unique values than you should generate for column ', col, 'is ',\r\n len(self.origdst[col].unique()))\r\n\r\n return kl_dict", "def coolingrate(tables, density, He_mass_frac=0.258, metallicity=0.25):\n # list of all elements in table\n elements = [\n x for x in tables.keys() if x not in [\n 'Header', 'Metal_free', 'Solar', 'Total_Metals']\n ]\n \n # Helium mass fraction bins of only hydrogen and helium\n metal_free_He_mass_frac = list(\n tables['Metal_free']['Helium_mass_fraction_bins']\n )\n\n # Ratio of free electron and hydrogen number densities (n_e/n_H) as a \n # function of /Metal_free/Hydrogen_density_bins, \n # /Metal_free/Temperature_bins, and helium abundance.\n metal_free_ne_nh = list(\n tables['Metal_free']['Electron_density_over_n_h']\n )\n \n # Find index of He mass frac, ssuming direct match in table.\n He_mass_frac_index = metal_free_He_mass_frac.index(np.float32(He_mass_frac))\n # Find index of density bin, ssuming direct match in table.\n abundance_index = list(\n tables[elements[0]]['Hydrogen_density_bins']).index(np.float32(density)\n )\n\n # Ratio of free electron and hydrogen number densities (n_e/n_H) as a \n # function of temperature at given denisty and helium mass fraction.\n metal_free_ne_nh = list(\n tables['Metal_free']\\\n ['Electron_density_over_n_h']\\\n [He_mass_frac_index, :,abundance_index]\n )\n\n # Ratio of free electron and hydrogen number densities (n_e/n_H) as a \n # function of temperature at given density for solar abundance.\n solar_ne_nh = tables['Solar']\\\n ['Electron_density_over_n_h']\\\n [:, abundance_index]\n\n # Normalized net cooling rate for only hydrogen and helium as a function of \n # /Metal_free/Hydrogen_density_bins, /Metal_free/Temperature_bins, \n # and helium abundance. \n lambda_metal_free = tables['Metal_free']\\\n ['Net_Cooling']\\\n [He_mass_frac_index, :, abundance_index]\n \n # Temperature bins (T [K])\n T = tables['Metal_free']['Temperature_bins']\n \n sum_metals = np.zeros(352)\n for e in elements:\n # Normalized, net cooling rate e as a function of temperature for \n # Hydrogen density 'density'.\n lambda_i_solar = tables[e]['Net_Cooling'][:,abundance_index]\n \n # Calulating the cooling contribution of element e and addind to total.\n sum_metals += lambda_i_solar*(metal_free_ne_nh/solar_ne_nh)*metallicity\n \n coolingrate = lambda_metal_free + sum_metals\n \n return T, coolingrate", "def poly_rogue_gtdb_reps(self,\r\n domain_gids,\r\n taxa_gid_map,\r\n gtdb_decorate_table):\r\n \r\n self.logger.info('Identifying polyphyletic and rogue GTDB representatives.')\r\n poly_taxa_count = 0\r\n poly_gids = set()\r\n rogue_gids = set()\r\n with open(gtdb_decorate_table) as f:\r\n f.readline()\r\n for line in f:\r\n tokens = line.split('\\t')\r\n \r\n taxon = tokens[0]\r\n fmeasure = float(tokens[2])\r\n rogue_in = tokens[7].strip()\r\n rogue_out = tokens[8].strip()\r\n if fmeasure < 1.0:\r\n poly_taxa_count += 1\r\n poly_gids.update(taxa_gid_map[taxon])\r\n \r\n if rogue_in:\r\n for gid in rogue_in.split(','):\r\n gid = canonical_gid(gid.strip())\r\n if not gid.startswith('D-'):\r\n rogue_gids.add(gid)\r\n \r\n if rogue_out:\r\n for gid in rogue_out.split(','):\r\n gid = canonical_gid(gid.strip())\r\n if not gid.startswith('D-'):\r\n rogue_gids.add(gid)\r\n\r\n self.logger.info(' - identified {:,} polyphyletic taxa spanning {:,} GTDB representatives.'.format(\r\n poly_taxa_count,\r\n len(poly_gids)))\r\n self.logger.info(' - identified {:,} rogue GTDB representatives.'.format(\r\n len(rogue_gids)))\r\n\r\n self.logger.info('Creating curation lists and pseudo-trees of polyphyletic GTDB representatives.')\r\n out_file = os.path.join(self.output_dir, f'gids_poly_taxa.{self.domain}.lst')\r\n fout = open(out_file, 'w')\r\n for gid in poly_gids:\r\n fout.write('{}\\n'.format(gid))\r\n fout.close()\r\n self.pseudo_tree(poly_gids, out_file.replace('.lst', '.tree'))\r\n \r\n self.logger.info('Creating curation lists and pseudo-trees of rogue GTDB representatives.')\r\n out_file = os.path.join(self.output_dir, f'gids_rogues.{self.domain}.lst')\r\n fout = open(out_file, 'w')\r\n for gid in rogue_gids:\r\n fout.write('{}\\n'.format(gid))\r\n fout.close()\r\n self.pseudo_tree(rogue_gids, out_file.replace('.lst', '.tree'))", "def get_gold_probdist():\n\n # Read in the dataset as a pandas dataframe.\n card_data_annot = gspd.read_in_categorised()\n\n # Based on the frequencies of each category in the data, create probability distribution and return.\n probdist_dict = gspd.freq_dist_to_prob_dist(card_data_annot)\n return probdist_dict", "def _vi_tables(im_true, im_test, table=None, ignore_labels=()):\n check_shape_equality(im_true, im_test)\n\n if table is None:\n # normalize, since it is an identity op if already done\n pxy = contingency_table(\n im_true, im_test,\n ignore_labels=ignore_labels, normalize=True\n )\n\n else:\n pxy = table\n\n # compute marginal probabilities, converting to 1D array\n px = np.ravel(pxy.sum(axis=1))\n py = np.ravel(pxy.sum(axis=0))\n\n # use sparse matrix linear algebra to compute VI\n # first, compute the inverse diagonal matrices\n px_inv = sparse.diags(_invert_nonzero(px))\n py_inv = sparse.diags(_invert_nonzero(py))\n\n # then, compute the entropies\n hygx = -px @ _xlogx(px_inv @ pxy).sum(axis=1)\n hxgy = -_xlogx(pxy @ py_inv).sum(axis=0) @ py\n\n return list(map(np.asarray, [hxgy, hygx]))", "def read_multiband(galaxy, galaxydir, filesuffix='custom',\n refband='r', bands=['g', 'r', 'i', 'z'], pixscale=0.262,\n galex_pixscale=1.5, unwise_pixscale=2.75,\n galaxy_id=None, galex=False, unwise=False,\n redshift=None, fill_value=0.0, sky_tests=False, verbose=False):\n import fitsio\n from astropy.table import Table\n import astropy.units as u \n from astrometry.util.fits import fits_table\n from legacypipe.bits import MASKBITS\n from legacyhalos.io import _get_psfsize_and_depth, _read_image_data\n\n #galaxy_id = np.atleast_1d(galaxy_id)\n #if len(galaxy_id) > 1:\n # raise ValueError('galaxy_id in read_multiband cannot be a >1-element vector for now!')\n #galaxy_id = galaxy_id[0]\n #assert(np.isscalar(galaxy_id))\n\n # Dictionary mapping between optical filter and filename coded up in\n # coadds.py, galex.py, and unwise.py, which depends on the project.\n data, filt2imfile, filt2pixscale = {}, {}, {}\n\n for band in bands:\n filt2imfile.update({band: {'image': '{}-image'.format(filesuffix),\n 'model': '{}-model'.format(filesuffix),\n 'invvar': '{}-invvar'.format(filesuffix),\n 'psf': '{}-psf'.format(filesuffix),\n }})\n filt2pixscale.update({band: pixscale})\n filt2imfile.update({'tractor': '{}-tractor'.format(filesuffix),\n 'sample': 'sample',\n 'maskbits': '{}-maskbits'.format(filesuffix),\n })\n\n optbands = bands\n if galex:\n galex_bands = ['FUV', 'NUV']\n #galex_bands = ['fuv', 'nuv'] # ['FUV', 'NUV']\n bands = bands + galex_bands\n for band in galex_bands:\n filt2imfile.update({band: {'image': '{}-image'.format(filesuffix),\n 'model': '{}-model'.format(filesuffix),\n 'invvar': '{}-invvar'.format(filesuffix),\n 'psf': '{}-psf'.format(filesuffix)}})\n filt2pixscale.update({band: galex_pixscale})\n \n if unwise:\n unwise_bands = ['W1', 'W2', 'W3', 'W4']\n #unwise_bands = ['w1', 'w2', 'w3', 'w4'] # ['W1', 'W2', 'W3', 'W4']\n bands = bands + unwise_bands\n for band in unwise_bands:\n filt2imfile.update({band: {'image': '{}-image'.format(filesuffix),\n 'model': '{}-model'.format(filesuffix),\n 'invvar': '{}-invvar'.format(filesuffix),\n 'psf': '{}-psf'.format(filesuffix)}})\n filt2pixscale.update({band: unwise_pixscale})\n\n data.update({'filt2pixscale': filt2pixscale})\n\n # Do all the files exist? If not, bail!\n missing_data = False\n for filt in bands:\n for ii, imtype in enumerate(filt2imfile[filt].keys()):\n #if imtype == 'sky': # this is a dictionary entry\n # continue\n imfile = os.path.join(galaxydir, '{}-{}-{}.fits.fz'.format(galaxy, filt2imfile[filt][imtype], filt))\n #print(imtype, imfile)\n if os.path.isfile(imfile):\n filt2imfile[filt][imtype] = imfile\n else:\n if verbose:\n print('File {} not found.'.format(imfile))\n missing_data = True\n break\n \n data['failed'] = False # be optimistic!\n data['missingdata'] = False\n data['filesuffix'] = filesuffix\n if missing_data:\n data['missingdata'] = True\n return data, None\n\n # Pack some preliminary info into the output dictionary.\n data['bands'] = bands\n data['refband'] = refband\n data['refpixscale'] = np.float32(pixscale)\n\n # We ~have~ to read the tractor catalog using fits_table because we will\n # turn these catalog entries into Tractor sources later.\n tractorfile = os.path.join(galaxydir, '{}-{}.fits'.format(galaxy, filt2imfile['tractor']))\n if verbose:\n print('Reading {}'.format(tractorfile))\n \n cols = ['ra', 'dec', 'bx', 'by', 'type', 'ref_cat', 'ref_id',\n 'sersic', 'shape_r', 'shape_e1', 'shape_e2']\n for band in bands:\n cols = cols + ['flux_{}'.format(band.lower()), 'flux_ivar_{}'.format(band.lower())]\n cols = cols + ['mw_transmission_{}'.format(band.lower())]\n for band in optbands:\n cols = cols + ['nobs_{}'.format(band.lower()), 'psfdepth_{}'.format(band.lower()),\n 'psfsize_{}'.format(band.lower())]\n if galex:\n cols = cols+['flux_fuv', 'flux_nuv', 'flux_ivar_fuv', 'flux_ivar_nuv']\n if unwise:\n cols = cols+['flux_w1', 'flux_w2', 'flux_w3', 'flux_w4',\n 'flux_ivar_w1', 'flux_ivar_w2', 'flux_ivar_w3', 'flux_ivar_w4']\n \n tractor = fits_table(tractorfile, columns=cols)\n hdr = fitsio.read_header(tractorfile)\n if verbose:\n print('Read {} sources from {}'.format(len(tractor), tractorfile))\n data.update(_get_psfsize_and_depth(tractor, bands, pixscale, incenter=False))\n\n # Read the maskbits image and build the starmask.\n maskbitsfile = os.path.join(galaxydir, '{}-{}.fits.fz'.format(galaxy, filt2imfile['maskbits']))\n if verbose:\n print('Reading {}'.format(maskbitsfile))\n maskbits = fitsio.read(maskbitsfile)\n # initialize the mask using the maskbits image\n starmask = ( (maskbits & MASKBITS['BRIGHT'] != 0) | (maskbits & MASKBITS['MEDIUM'] != 0) |\n (maskbits & MASKBITS['CLUSTER'] != 0) | (maskbits & MASKBITS['ALLMASK_G'] != 0) |\n (maskbits & MASKBITS['ALLMASK_R'] != 0) | (maskbits & MASKBITS['ALLMASK_Z'] != 0) )\n\n # Are we doing sky tests? If so, build the dictionary of sky values here.\n\n # subsky - dictionary of additional scalar value to subtract from the imaging,\n # per band, e.g., {'g': -0.01, 'r': 0.002, 'z': -0.0001}\n if sky_tests:\n #imfile = os.path.join(galaxydir, '{}-{}-{}.fits.fz'.format(galaxy, filt2imfile[refband]['image'], refband))\n hdr = fitsio.read_header(filt2imfile[refband]['image'], ext=1)\n nskyaps = hdr['NSKYANN'] # number of annuli\n\n # Add a list of dictionaries to iterate over different sky backgrounds.\n data.update({'sky': []})\n \n for isky in np.arange(nskyaps):\n subsky = {}\n subsky['skysuffix'] = '{}-skytest{:02d}'.format(filesuffix, isky)\n for band in bands:\n refskymed = hdr['{}SKYMD00'.format(band.upper())]\n skymed = hdr['{}SKYMD{:02d}'.format(band.upper(), isky)]\n subsky[band] = refskymed - skymed # *add* the new correction\n print(subsky)\n data['sky'].append(subsky)\n\n # Read the basic imaging data and masks.\n data = _read_image_data(data, filt2imfile, starmask=starmask,\n filt2pixscale=filt2pixscale,\n fill_value=fill_value, verbose=verbose)\n \n # Find the galaxies of interest.\n samplefile = os.path.join(galaxydir, '{}-{}.fits'.format(galaxy, filt2imfile['sample']))\n sample = Table(fitsio.read(samplefile))\n print('Read {} sources from {}'.format(len(sample), samplefile))\n\n # keep all objects\n galaxy_indx = []\n galaxy_indx = np.hstack([np.where(sid == tractor.ref_id)[0] for sid in sample[REFIDCOLUMN]])\n #if len(galaxy_indx\n\n #sample = sample[np.searchsorted(sample['VF_ID'], tractor.ref_id[galaxy_indx])]\n assert(np.all(sample[REFIDCOLUMN] == tractor.ref_id[galaxy_indx]))\n\n tractor.diam_init = np.zeros(len(tractor), dtype='f4')\n tractor.pa_init = np.zeros(len(tractor), dtype='f4')\n tractor.ba_init = np.zeros(len(tractor), dtype='f4')\n if 'DIAM_INIT' in sample.colnames and 'PA_INIT' in sample.colnames and 'BA_INIT' in sample.colnames:\n tractor.diam_init[galaxy_indx] = sample['DIAM_INIT']\n tractor.pa_init[galaxy_indx] = sample['PA_INIT']\n tractor.ba_init[galaxy_indx] = sample['BA_INIT']\n \n # Do we need to take into account the elliptical mask of each source??\n srt = np.argsort(tractor.flux_r[galaxy_indx])[::-1]\n galaxy_indx = galaxy_indx[srt]\n print('Sort by flux! ', tractor.flux_r[galaxy_indx])\n galaxy_id = tractor.ref_id[galaxy_indx]\n\n data['galaxy_id'] = galaxy_id\n data['galaxy_indx'] = galaxy_indx\n\n # Now build the multiband mask.\n data = _build_multiband_mask(data, tractor, filt2pixscale,\n fill_value=fill_value,\n verbose=verbose)\n\n #import matplotlib.pyplot as plt\n #plt.clf() ; plt.imshow(np.log10(data['g_masked'][0]), origin='lower') ; plt.savefig('junk1.png')\n ##plt.clf() ; plt.imshow(np.log10(data['r_masked'][1]), origin='lower') ; plt.savefig('junk2.png')\n ##plt.clf() ; plt.imshow(np.log10(data['r_masked'][2]), origin='lower') ; plt.savefig('junk3.png')\n #pdb.set_trace()\n\n # Gather some additional info that we want propagated to the output ellipse\n # catalogs.\n allgalaxyinfo = []\n for igal, (galaxy_id, galaxy_indx) in enumerate(zip(data['galaxy_id'], data['galaxy_indx'])):\n samp = sample[sample[REFIDCOLUMN] == galaxy_id]\n galaxyinfo = {'refid': (str(galaxy_id), None)}\n #for band in ['fuv', 'nuv', 'g', 'r', 'z', 'w1', 'w2', 'w3', 'w4']:\n # galaxyinfo['mw_transmission_{}'.format(band)] = (samp['MW_TRANSMISSION_{}'.format(band.upper())][0], None)\n \n # 'galaxy': (str(np.atleast_1d(samp['GALAXY'])[0]), '')}\n #for key, unit in zip(['ra', 'dec'], [u.deg, u.deg]):\n # galaxyinfo[key] = (np.atleast_1d(samp[key.upper()])[0], unit)\n allgalaxyinfo.append(galaxyinfo)\n \n return data, allgalaxyinfo", "def Gamma_per_grain(ZZall, Gamma_a_Z, ZZ_fz, fdist, GG):\n\n # index in the ZZall array for the charges in ZZ_fz\n zi_down = np.where(ZZall == ZZ_fz[0])[0][0]# find the index of the ZZ_fz[0] in ZZall \n zi_up = np.where(ZZall == ZZ_fz[-1])[0][0]# find the index of the ZZ_fz[-1] in ZZall\n \n #Gamma_pe_a = np.sum(fz*Gamma_dotdot_scaled[zi_down:zi_up+1])\n Gamma_pe_a = np.sum(fdist*Gamma_a_Z[zi_down:zi_up+1])\n \n return Gamma_pe_a", "def calculate_averaged_properties(poly_data, bucket):\n\n locator = vtk.vtkPointLocator()\n locator.SetDataSet(poly_data)\n locator.BuildLocator()\n\n LENGTH = 0.03\n MODIFIER = 3e3\n\n volume = numpy.zeros(poly_data.GetNumberOfPoints())\n temperature = numpy.zeros(poly_data.GetNumberOfPoints())\n solid_pressure = numpy.zeros(poly_data.GetNumberOfPoints())\n velocity = numpy.zeros((poly_data.GetNumberOfPoints(), 3))\n solid_pressure_gradient = numpy.zeros((poly_data.GetNumberOfPoints(), 3))\n\n for particle in bucket:\n point_list = vtk.vtkIdList()\n locator.FindPointsWithinRadius(LENGTH, particle.pos, point_list)\n\n beta = 1.0/6.0*numpy.pi*particle.parameters.diameter**3\n\n for _ in range(point_list.GetNumberOfIds()):\n point_index = point_list.GetId(_)\n\n particle2 = bucket.particles[point_index]\n\n rad2 = distance2(particle2.pos, particle.pos)\n rad2 /= LENGTH**2\n\n gamma = beta*numpy.exp(-rad2)*MODIFIER\n\n volume[point_index] += gamma\n\n velocity[point_index, :] += particle.vel*gamma\n\n volume /= 0.5*LENGTH**2*(1.0-numpy.exp(-1.0**2))\n velocity /= 0.5*LENGTH**2*(1.0-numpy.exp(-1.0**2))\n\n for i in range(3):\n velocity[:, i] /= volume\n\n for k, particle in enumerate(bucket):\n point_list = vtk.vtkIdList()\n locator.FindPointsWithinRadius(LENGTH, particle.pos, point_list)\n\n beta = 1.0/6.0*numpy.pi*particle.parameters.diameter**3\n\n for _ in range(point_list.GetNumberOfIds()):\n point_index = point_list.GetId(_)\n\n rad2 = distance2(poly_data.GetPoints().GetPoint(point_index), particle.pos)\n rad2 /= LENGTH**2\n\n gamma = beta*numpy.exp(-rad2)*MODIFIER\n\n c = distance2(particle.vel, velocity[k, :])\n\n temperature[point_index] += c*gamma\n\n\n for particle in bucket:\n point_list = vtk.vtkIdList()\n locator.FindPointsWithinRadius(LENGTH, particle.pos, point_list)\n\n beta = 1.0/6.0*numpy.pi*particle.parameters.diameter**3\n\n for _ in range(point_list.GetNumberOfIds()):\n point_index = point_list.GetId(_)\n\n rad2 = distance2(poly_data.GetPoints().GetPoint(point_index), particle.pos)\n rad2 /= LENGTH **2\n\n gamma = beta*numpy.exp(-rad2)*MODIFIER\n\n c = distance2(particle.vel, velocity[point_index, :])\n\n val = (bucket.particles[point_index].pos-particle.pos)/LENGTH**2\n\n spg = ((radial_distribution_function(volume[point_index])\n +volume[point_index]*rdf_deriv(volume[point_index]))*temperature[point_index]\n +c*volume[point_index]*radial_distribution_function(volume[point_index]))\n\n solid_pressure_gradient[point_index, :] += (val*spg*gamma)\n\n for _ in range(poly_data.GetNumberOfPoints()):\n\n solid_pressure[_] = (bucket.particles[0].parameters.rho*volume[_]\n *radial_distribution_function(volume[_])*temperature[_])\n\n data = [vtk.vtkDoubleArray()]\n data[0].SetName('SolidVolumeFraction')\n data.append(vtk.vtkDoubleArray())\n data[1].SetName('SolidVolumeVelocity')\n data[1].SetNumberOfComponents(3)\n data.append(vtk.vtkDoubleArray())\n data[2].SetName('GranularTemperature')\n data.append(vtk.vtkDoubleArray())\n data[3].SetName('SolidPressure')\n data.append(vtk.vtkDoubleArray())\n data[4].SetName('SolidPressureGradient')\n data[4].SetNumberOfComponents(3)\n\n for _ in range(poly_data.GetNumberOfPoints()):\n data[0].InsertNextValue(volume[_])\n data[1].InsertNextTuple3(*velocity[_])\n data[2].InsertNextValue(temperature[_])\n data[3].InsertNextValue(solid_pressure[_])\n data[4].InsertNextTuple3(*solid_pressure_gradient[_])\n\n for _ in data:\n poly_data.GetPointData().AddArray(_)\n\n return data[4]", "def link_caesarGalProp_galname(galObj, galname, index, groupID, galnames, mstar, mgas, mbh, fedd_array, sfr, sfrsd, sfrsd_manual, gassd, gassd_manual, gasR, gasR_half, starR_half, Zgas, Zstar, fgas, fh2, gdr, central, mhalo, hid, SFRSD_manual, gasSD_manual, f_h2, bhmdot, DTM, Zmet_massweighted, frad=0.1):\n\n phm, phid = -1, -1\n\n if galObj.halo is not None:\n phm, phid = galObj.halo.masses['total'], galObj.halo.GroupID\n\n try:\n bhmdots = [bhmdot[k] for k in galObj.bhlist]\n bm = galObj.masses['bh']\n imax = np.argmax(bm)\n try:\n bm = bm[imax]\n bmdot = bhmdots[imax] # only the massive BH particle matters.\n except:\n bm = bm\n bmdot = bhmdots\n\n mdot_edd = 4*np.pi*6.67e-8*1.673e-24/(frad*3.e10*6.65245e-25) * bm * 3.155e7 # in Mo/yr\n fedd = bmdot / mdot_edd\n fedd = fedd[0].value\n except:\n bm = 0\n fedd = 0\n\n groupID.append(galObj.GroupID)\n galnames.append(galname)\n\n mstar.append(galObj.masses['stellar'])\n mgas.append(galObj.masses['gas'])\n mbh.append(bm)\n fedd_array.append(fedd)\n sfr.append(galObj.sfr)\n sfrsd.append(galObj.sfr/np.pi/(galObj.radii['gas'].in_units('kpc'))**2)\n sfrsd_manual.append(SFRSD_manual)\n gassd.append(galObj.masses['gas']/np.pi/(galObj.radii['gas'].in_units('pc'))**2)\n gassd_manual.append(gasSD_manual)\n gasR.append(galObj.radii['gas'].in_units('kpc'))\n gasR_half.append(galObj.radii['gas_half_mass'].in_units('kpc'))\n starR_half.append(galObj.radii['stellar_half_mass'].in_units('kpc'))\n Zgas.append(galObj.metallicities['sfr_weighted']/0.0134)\n Zstar.append(galObj.metallicities['stellar']/0.0134)\n fgas.append(galObj.gas_fraction) # = Mgas / (Mg + Ms)\n fh2.append(f_h2)\n gdr.append(galObj.masses['gas']/galObj.masses['dust'])\n central.append(galObj.central)\n mhalo.append(phm)\n hid.append(phid)\n\n Zmet = galObj.metallicities['mass_weighted'] / 0.0134\n Zmet_massweighted.append(Zmet)\n\n Mgmet = Zmet * (galObj.masses['gas'] - galObj.masses['dust'])\n dtm = np.log10(galObj.masses['dust'] / (Mgmet + galObj.masses['dust']) + 1.e-9)\n DTM.append(dtm)\n\n return groupID, galnames, mstar, mgas, mbh, fedd_array, sfr, sfrsd, sfrsd_manual, gassd, gassd_manual, gasR, gasR_half, starR_half, Zgas, Zstar, fgas, fh2, gdr, central, mhalo, hid, DTM, Zmet_massweighted", "def get_translate_table(self):\n self.groundstate = []\n if self.header['CIMETHOD'] == 'DOCI':\n reg = r'^(\\d+)\\s+(\\d+)\\s+([\\-+]?\\d+\\.*\\d*[eEdD]?[\\-+]?\\d*)' \n else:\n reg = r'^(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+([\\-+]?\\d+\\.*\\d*[eEdD]?[\\-+]?\\d*)' \n self.mapdict = {}\n with open(self.filename,\"r\") as file:\n index = 0 \n for line in file:\n match = re.search(reg,line)\n if match:\n if self.header['CIMETHOD'] == 'DOCI':\n self.mapdict[match.group(2)+ '|' + match.group(2)] = int(match.group(1))\n self.groundstate.append(float(match.group(3)))\n else:\n self.mapdict[match.group(2)+'|'+match.group(3) ] = int(match.group(1))\n self.groundstate.append(float(match.group(4)))\n assert(index == int(match.group(1)) ), str(index) + 'not found ' + 'WARNING' + match.group(1)\n index += 1\n if \"#Significant determinants\" in line:\n break\n assert(len(self.mapdict) == self.header['dim']), 'length dict: ' + str(len(self.mapdict)) +'header: '+ str(self.header['dim'])+ self.filename\n assert(len(self.groundstate) == self.header['dim']), 'length groundstate: ' + str(len(self.mapdict)) +'header: '+ str(self.header['dim']) + self.filename\n self.groundstate = np.array(self.groundstate) \n #if self.groundstate != None:\n #print 'We have read in the groundstate vec: ', self.groundstate", "def galaxy_selection_func(table, min_mass=10**8.0, max_mass=np.inf, prim_gal_prop='stellar_mass'):\n\n mask = (table[prim_gal_prop] >= min_mass) & (table[prim_gal_prop] < max_mass)\n return mask", "def pss(self):\n return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \\\n ((self.table[0, 0] + self.table[1, 0]) * (self.table[0, 1] + self.table[1, 1]))", "def apply_fhd(self, gfhd):\n for bl in self.data.keys():\n i,j = bl\n p1,p2 = self.pol\n G = gfhd[p1][i]*gfhd[p2][j].conj()\n ind = np.where(G != 0)[0]\n self.data[bl][self.pol][:,ind] /= G[ind]", "def compare_eia_heat_rates_to_ampl_projs(year):\n\n db_gen_projects = pull_generation_projects_data(gen_scenario_id=1).rename(\n columns={'name':'Plant Name', 'gen_tech':'Prime Mover'})\n db_gen_projects.loc[:,'Prime Mover'].replace(\n {\n 'Coal_Steam_Turbine':'ST',\n 'Gas_Steam_Turbine':'ST',\n 'Gas_Combustion_Turbine':'GT',\n 'Gas_Combustion_Turbine_Cogen':'GT',\n 'CCGT':'CC',\n 'DistillateFuelOil_Combustion_Turbine':'GT',\n 'DistillateFuelOil_Internal_Combustion_Engine':'IC',\n 'Geothermal':'ST',\n 'Gas_Internal_Combustion_Engine':'IC',\n 'Bio_Gas_Internal_Combustion_Engine':'IC',\n 'Bio_Gas_Steam_Turbine':'ST'\n },\n inplace=True)\n eia_gen_projects = filter_plants_by_region_id(13, year)\n\n df = pd.merge(db_gen_projects, eia_gen_projects,\n on=['Plant Name','Prime Mover'], how='left').loc[:,[\n 'Plant Name','gen_tech','energy_source','full_load_heat_rate',\n 'Best Heat Rate','Prime Mover','Energy Source','Energy Source 2','Operating Year']]\n df = df[df['full_load_heat_rate']>0]\n\n print \"\\nPrinting intersection of DB and EIA generation projects that have a specified heat rate to heat_rate_comparison.tab\"\n \n fpath = os.path.join('processed_data','heat_rate_comparison.tab')\n with open(fpath, 'w') as outfile:\n df.to_csv(outfile, sep='\\t', header=True, index=False)\n\n return df", "def generate(dictalg):\n\n # dsList, sortedAlgs, dictAlg = processInputArgs(args, verbose=verbose)\n res = {}\n for f, i in pproc.dictAlgByFun(dictalg).iteritems():\n for d, j in pproc.dictAlgByDim(i).iteritems():\n tmp = BestAlgSet(j)\n res[(d, f)] = tmp\n return res", "def relative_frequency(table: biom.Table) -> biom.Table:\n table.norm(axis='sample', inplace=True)\n return table", "def generateExtrapolationTable(sex, region):\n pop1 = dataStore.data[dataStore.data.Location == region]\n pop1 = pop1[['Time', 'Age', SEXES[sex]]]\n # pop1 = data[['Time', 'Age', SEX]].query('Location' == CNTRY)\n #print pop1\n\n july1from1950to2100 = [inPosixDays(date(y, 7, 1)) for y in xrange(1950, 2100+1)]\n\n dateRange1950to2100inPosixDays = range(inPosixDays(date(1950,1,1)), inPosixDays(date(2100,12,31))+1)\n\n ''' --- Date interpolation function --- '''\n def dateInterp(iage):\n popi = np.asarray(pop1.loc[dataStore.data.Age == iage.name, SEXES[sex]])\n\n # spline interpolation function from Scipy Package\n iuspl = InterpolatedUnivariateSpline(july1from1950to2100, popi, k=4)\n return iuspl(dateRange1950to2100inPosixDays)\n\n # --- store the results of the date interpolation --- #\n result1 = pd.DataFrame(index = range(0,len(dateRange1950to2100inPosixDays)), columns = range(0,100))\n table = result1.apply(dateInterp, axis=0)\n\n # Change column names by appending \"age_\"\n oldHeaders = table.columns\n newHeaders = []\n for i in oldHeaders:\n newHeaders.append(\"age\" + \"_\" + str(i))\n table.columns = newHeaders\n #print result1.head # results: \"age_0, age_1, ...\"\n\n # Convert the numerical days to date string\n def toDate(d):\n return (date(1970, 1, 1) + timedelta(days=d)).strftime('%Y-%m-%d')\n toDate = np.vectorize(toDate) # vectorize the function to iterate over numpy ndarray\n #fullDateRange = toDate(dateRange1970to2100inPosixDays) # 1st result: 1950-01-01\n fullDateRange = len(dateRange1950to2100inPosixDays)*[None]\n for i in range(0,len(dateRange1950to2100inPosixDays)):\n fullDateRange[i] = toDate(dateRange1950to2100inPosixDays[i])\n\n # Add the fullDateRange to the result1\n table['date1'] = fullDateRange\n\n return table", "def gen_interpolation_coeff(self):\n\n L_counts = [0 for x in range(self.N)]\n ngram_freqs = [self.get_ngram_freq(n) for n in range(0,self.N + 1)]\n\n for ngram in self.freq_dist:\n\n c_at_n = [0 for x in range(self.N)]\n p_at_n = [0 for x in range(self.N)]\n \n for n_len in range(1, self.N + 1):\n\n nom_ngram = ngram[len(ngram) - n_len:]\n denom_ngram = nom_ngram[:-1]\n\n nom_freq = ngram_freqs[n_len]\n denom_freq = ngram_freqs[n_len - 1 ]\n\n nom_count = nom_freq[nom_ngram] - 1\n denom_count = denom_freq[denom_ngram] - 1\n\n if n_len == 1:\n denom_count = sum(nom_freq.values()) - 1 \n \n p_at_n_len = 0\n if denom_count:\n p_at_n_len = nom_count / denom_count\n\n c_at_n[n_len - 1] = nom_count + 1\n p_at_n[n_len - 1] = p_at_n_len \n\n p_max = max(p_at_n)\n for i, p_item in enumerate(p_at_n):\n if p_max == p_item:\n L_counts[i] += c_at_n[i]\n\n total_L = sum(L_counts)\n return [L_count_item / total_L for L_count_item in L_counts]", "def getXy_all_all(year):\r\n print 'getXy_all_all(year=%d)' % year\r\n\r\n patient_keys, patient_dict = get_patient_dict()\r\n keys, counts_dict = patient_keys[1:], patient_dict\r\n #print 'patient_dict = %d' % len(counts_dict)\r\n\r\n drug_keys, drug_dict = get_drugcount_dict(year-1)\r\n keys, counts_dict = common.combine_dicts(keys, counts_dict, drug_keys[1:], drug_dict, use_dict1 = True)\r\n #print '+drug_dict = %d' % len(counts_dict)\r\n\r\n lab_keys, lab_dict = get_labcount_dict(year-1)\r\n keys, counts_dict = common.combine_dicts(keys, counts_dict, lab_keys[1:], lab_dict, use_dict1 = True)\r\n #print '+lab_dict = %d' % len(counts_dict)\r\n\r\n if False:\r\n pcg_keys, pcg_dict = get_pcg_counts_dict(year-1)\r\n keys, counts_dict = common.combine_dicts(keys, counts_dict, pcg_keys[1:], pcg_dict)\r\n print '+pcg_dict = %d' % len(counts_dict) \r\n\r\n for prefix in COUNTS_PREFIXES:\r\n pre_keys, pre_dict = get_counts_dict(prefix, year-1)\r\n pre_keys = ['%s=%s' % (prefix, k) for k in pre_keys]\r\n keys, counts_dict = common.combine_dicts(keys, counts_dict, pre_keys[1:], pre_dict)\r\n #print '+%s_dict = %d' % (prefix, len(counts_dict)) \r\n \r\n X,y = getXy_for_dict(year, keys, counts_dict)\r\n return X,y,keys", "def leitner_proportions(df):\n denom = df.shape[0]\n prop_dict = {}\n\n for i in range(1,6):\n df_i = df[df['comfort_level'] == i]\n numer = df_i.shape[0]\n prop_dict[i] = numer / denom\n\n prop_df = pd.DataFrame.from_dict([prop_dict], orient='columns') \n\n prop_df = prop_df.T.rename(columns={0:'proportion'}) \n \n return prop_df", "def get_GCs_diameters():\n catalog = Table.read(os.path.join(context.tables_dir, \"sample_GCs.csv\"))\n sizes = []\n for gal in tqdm(catalog, desc=\"Querying galaxies with GC catalogs\"):\n sizes.append(query_size(gal[\"galaxy\"]))\n sizes = [s.to(u.arcsec).value for s in sizes] * u.arcsec\n sizes = Table([sizes], names=[\"size\"])\n table = hstack([catalog, sizes])\n table.write(os.path.join(context.tables_dir, \"sample_GCs.fits\"),\n overwrite=True)", "def buildGSUB(self):\n\t\t# Construct GSUB table bottom-up.\n\t\tli_fi = Ligature()\n\t\tli_fi.LigGlyph = 'f_i'\n\t\tli_fi.Component = ['i']\n\t\tli_fi.CompCount = 2\n\n\t\tliSubst = LigatureSubst()\n\t\tliSubst.ligatures = {'f': li_fi}\n\t\tliSubst.Format = 1\n\t\tliSubst.LookupType = 4\n\n\t\tlookup = Lookup()\n\t\tlookup.LookupType = 4 # Ligature\n\t\tlookup.LookupFlag = 0\n\t\tlookup.SubTable = [liSubst]\n\t\tlookup.SubTableCount = len(lookup.SubTable)\n\n\t\tlookupList = LookupList()\n\t\tlookupList.Lookup = [lookup]\n\t\tlookupList.LookupCount = len(lookupList.Lookup)\n\n\t\tfea = Feature()\n\t\tfea.FeatureParams = None\n\t\tfea.LookupCount = 1\n\t\tfea.LookupListIndex = [0]\n\n\t\tfeaRecord = FeatureRecord()\n\t\tfeaRecord.FeatureTag = 'liga'\n\t\tfeaRecord.Feature = fea\n\n\t\tfeaList = FeatureList()\n\t\tfeaList.FeatureRecord = [feaRecord]\n\t\tfeaList.FeatureCount = len(feaList.FeatureRecord)\n\n\t\tlangSys = LangSys()\n\t\tlangSys.LookupOrder = None\n\t\tlangSys.ReqFeatureIndex = 0xFFFF\n\t\tlangSys.FeatureIndex = [0]\n\t\tlangSys.FeatureCount = len(langSys.FeatureIndex)\n\n\t\tsct = Script()\n\t\tsct.DefaultLangSys = langSys\n\t\tsct.LangSysRecord = []\n\t\tsct.LangSysCount = len(sct.LangSysRecord)\n\n\t\tsctRec = ScriptRecord()\n\t\tsctRec.ScriptTag = 'tag1'\n\t\tsctRec.Script = sct\n\n\t\tsctList = ScriptList()\n\t\tsctList.ScriptRecord = [sctRec]\n\t\tsctList.ScriptCount = len(sctList.ScriptRecord)\n\n\t\tgsub = GSUB()\n\t\tgsub.LookupList = lookupList\n\t\tgsub.FeatureList = feaList\n\t\tgsub.ScriptList = sctList\n\n\t\ttable = ttLib.newTable('GSUB')\n\t\ttable.table = gsub\n\t\treturn table", "def _populate_output(self):\n self._store_query_percentiles_table()", "def get_transformations_lookups(foreign_keys: list, source_db: str, df_name: str):\n #template = \"\"\"[{{\"name\": \"{lookup_name}\",\"script\": \"{source_table}, {lookup_table} lookup(user_id == {lookup_table}@id,\\n\\tbroadcast: 'none')~> {lookup_name}\"}},{{\"name\": \"DerivedColumn1\",\"script\": \"{lookup_name} derive(migration_date = currentTimestamp(),\\n\\t\\tid_old = {source_table}@id,\\n\\t\\tsource_db = '{source_db}') ~> DerivedColumn1\"}}]\"\"\".encode(\"unicode_escape\").decode('utf-8')\n\n script = \"{0}, {1} lookup({2} == {1}@id_old,\\n\\tbroadcast: 'none')~> {3}\"\n transformation_list = []\n\n source_table = \"Input\" +df_name.replace(\"_\", \"\")\n orginal_table = source_table\n for idx, foreign_key in enumerate(foreign_keys):\n\n lookup_table = \"Output\" + foreign_key['referenced_table_name'].replace(\"_\", \"\")\n lookup_col = foreign_key['column_name']\n lookup_name = 'LKP{0}'.format(lookup_table)\n\n print(lookup_name)\n print(source_table)\n print(lookup_table)\n print(source_db)\n\n script = script.format(source_table, lookup_table, lookup_col, lookup_name, orginal_table)\n transformation_dict = {\"name\": lookup_name, \"script\": script}\n transformation_list.append(transformation_dict)\n source_table = lookup_name\n\n transformation_list += get_transformations_no_lookup(source_table)\n return transformation_list", "def do_califa(outfile='NGC4047.pipe3d.hdf5', gallist=['NGC4047'], \n fitsdir='fits_natv_edge', comomdir=None, colabel='co.smo7',\n ext='', nsm=2, ortpar='edge_leda.csv', distpar='edge_califa.csv',\n distcol='caDistP3d', hexgrid=False, allpix=False, debug=False, \n prob=True, discard_cdmatrix=False, append=True, overwrite=True):\n if allpix:\n stride = [1,1,1]\n else:\n stride = [3,3,1]\n\n if len(gallist) == 0:\n raise RuntimeError('Error: gallist is empty!')\n\n # cuts for when to apply BD correction\n hacut = 0.06 # 1e-16 erg / (cm2 s) - no longer used\n hbcut = 0.04 # 1e-16 erg / (cm2 s) - no longer used\n ahalo = 0 # mag\n ahahi = 6 # mag\n\n # FITS keywords important for astrometry\n wcskeys = ['CTYPE1', 'CTYPE2', 'CRVAL1', 'CRVAL2', 'CRPIX1', 'CRPIX2', \n 'CDELT1', 'CDELT2']\n cdkeys = ['CD1_1', 'CD1_2', 'CD2_1', 'CD2_2', 'CD1_3', 'CD2_3',\n 'CD3_1', 'CD3_2', 'CD3_3']\n dimkeys = ['NAXIS1', 'NAXIS2']\n\n # Get the orientation parameters from LEDA\n orttbl = EdgeTable(ortpar)\n orttbl.add_index('Name') \n\n # Get the distance from the CALIFA table\n disttbl = EdgeTable(distpar)\n disttbl.add_index('Name')\n\n # Read the FITS data\n # The columns to save are defined in fitsextract.py\n prodtype = ['ELINES', 'SFH', 'SSP', 'indices', 'flux_elines']\n leadstr = ['', '', '', 'indices.CS.', 'flux_elines.']\n tailstr = ['.ELINES', '.SFH', '.SSP', '', '']\n tailstr = [s+'.cube.fits.gz' for s in tailstr]\n\n for i_prod, prod in enumerate(prodtype):\n zsel, labels, units, nsel = getlabels(prod)\n default_len = len(zsel)\n tlist = []\n\n if prod == 'SFH':\n # Required file for SFH lum to mass conversion\n models = SSPModels('gsd01_156.fits')\n print('Number of model steps:',models.n_models)\n nlumcols = models.n_models\n\n for i_gal, gal in enumerate(gallist):\n print('\\nWorking on galaxy {} product {} nsel={}'.format(\n gal, prod, nsel))\n\n # Read in Pipe3D output\n cafile = os.path.join(fitsdir,leadstr[i_prod]+gal+tailstr[i_prod])\n if not os.path.exists(cafile):\n print('####### Cannot find',cafile)\n continue \n hdu = fits.open(cafile, ignore_missing_end=True)[0]\n cahd = hdu.header.copy()\n # Blanking of CTYPE3 so that fitsextract treats cubes as pseudocubes\n cahd['CTYPE3'] = ''\n # Set CDELT3 to 1 since that will be its value in template\n for key in ['CDELT3', 'CD3_3']:\n if key in cahd.keys():\n cahd[key] = 1.\n\n # Read in CO template\n if comomdir is not None:\n cofile = os.path.join(comomdir,gal+'.'+colabel+'_dil.snrpk.fits.gz')\n if not os.path.exists(cofile):\n print('####### Cannot find',cofile)\n continue\n cohd = fits.getheader(cofile)\n # Copy the CALIFA header and replace wcskeys with CO values\n for key in dimkeys+wcskeys:\n if key in cohd.keys():\n cahd[key] = cohd[key]\n # Need to discard CD matrix which would override the new wcskeys\n if 'CDELT1' in cohd.keys() and 'CDELT2' in cohd.keys():\n for key in cdkeys:\n if key in cahd.keys():\n del cahd[key]\n # Optionally discard CD matrix in CALIFA files and fall back on CDELTs\n if discard_cdmatrix:\n for key in cdkeys:\n if key in hdu.header.keys():\n del hdu.header[key]\n if debug:\n print('\\nINPUT',WCS(hdu.header))\n print('\\nCO data',WCS(cohd))\n print('\\nOUTPUT',WCS(cahd))\n newim = reproject_interp(hdu, cahd, order=0, return_footprint=False)\n if debug:\n fits.writeto(cafile.replace('.fits','.rg.fits'), newim, cahd, \n overwrite=True)\n else:\n newim = hdu.data\n\n # Set up output table\n nz = newim.shape[0]\n if debug:\n print('nz=',nz)\n col_lbl = [s+ext for s in labels]\n\n # Add smoothed Ha and Hb columns for extinction estimates\n if prod == 'ELINES' or prod == 'flux_elines':\n kernel = Gaussian2DKernel(nsm)\n if prod == 'ELINES':\n hb_idx = 5\n ha_idx = 6\n col_lbl += ['Hbeta_sm'+str(nsm)+ext, 'Halpha_sm'+str(nsm)+ext]\n cahd['DESC_20'] = ' Hbeta after {} pix smooth'.format(str(nsm))\n cahd['DESC_21'] = ' Halpha after {} pix smooth'.format(str(nsm))\n else:\n hb_idx = 28\n ha_idx = 45\n col_lbl += ['flux_Hbeta_sm'+str(nsm)+ext, 'flux_Halpha_sm'+str(nsm)+ext]\n hb_conv = convolve(newim[hb_idx,:,:], kernel, preserve_nan=True)\n ha_conv = convolve(newim[ha_idx,:,:], kernel, preserve_nan=True)\n newim = np.concatenate((newim, hb_conv[np.newaxis], ha_conv[np.newaxis]))\n if len(zsel) == default_len:\n zsel = list(zsel) + [nz, nz+1]\n if len(units) == default_len:\n units += ['10^-16 erg cm^-2 s^-1', '10^-16 erg cm^-2 s^-1']\n\n if i_prod == 0:\n print(\"RA, DEC, PA, INC:\",orttbl.loc[gal]['ledaRA'],\n orttbl.loc[gal]['ledaDE'], orttbl.loc[gal]['ledaPA'],\n orttbl.loc[gal]['ledaAxIncl'])\n tab0 = fitsextract(newim, header=cahd, keepnan=True, stride=stride, \n bunit=units, col_lbl=col_lbl, zselect=zsel, \n ra_gc=15*orttbl.loc[gal]['ledaRA'],\n dec_gc=orttbl.loc[gal]['ledaDE'], \n pa=orttbl.loc[gal]['ledaPA'],\n inc=orttbl.loc[gal]['ledaAxIncl'], \n ortlabel='LEDA', first=True, use_hexgrid=hexgrid)\n gname = Column([np.string_(gal)]*len(tab0), name='Name', \n description='Galaxy Name')\n tab0.add_column(gname, index=0)\n \n # Add additional columns\n if prod == 'ELINES' or prod == 'flux_elines':\n if prod == 'ELINES':\n prfx = ''\n else:\n prfx = 'flux_'\n # Provide labels for flux_elines columns\n for linecol in labels:\n if linecol.startswith('e_'):\n linetype = linecol.split('_')[1]\n linename = linecol.split('_')[2]\n prelbl = 'error in '\n else:\n linetype = linecol.split('_')[0]\n linename = linecol.split('_')[1]\n prelbl = ''\n if linetype == 'flux':\n suffix = 'intensity'\n elif linetype == 'vel':\n suffix = 'velocity'\n elif linetype == 'disp':\n suffix = 'velocity dispersion'\n elif linetype == 'EW':\n suffix = 'equivalent width'\n tab0[linecol+ext].description=prelbl+linename+' '+suffix\n tab0['flux_Hbeta_sm'+str(nsm)+ext].description=\\\n 'Hbeta intensity after {} pix smooth'.format(str(nsm))\n tab0['flux_Halpha_sm'+str(nsm)+ext].description=\\\n 'Halpha intensity after {} pix smooth'.format(str(nsm))\n\n # sfr0 is SFR from Halpha without extinction correction\n sfr0 = sfr_ha(tab0[prfx+'Halpha'+ext], imf='salpeter', \n name=prfx+'sigsfr0'+ext)\n e_sfr0 = Column(sfr0 *\n abs(tab0['e_'+prfx+'Halpha'+ext]/tab0[prfx+'Halpha'+ext]), \n name='e_'+prfx+'sigsfr0'+ext, dtype='f4', unit=sfr0.unit,\n description='error of uncorrected SFR surface density')\n tab0.add_columns([sfr0, e_sfr0])\n\n # Balmer decrement corrected SFR\n sfr_cor, A_Ha, e_sfr_cor, e_A_Ha = sfr_ha(\n tab0[prfx+'Halpha'+ext], \n flux_hb=tab0[prfx+'Hbeta'+ext], \n e_flux_ha=tab0['e_'+prfx+'Halpha'+ext],\n e_flux_hb=tab0['e_'+prfx+'Hbeta'+ext], \n imf='salpeter', \n name=prfx+'sigsfr_corr'+ext)\n # For negative extinction we assume A=0\n sfr_cor[A_Ha < ahalo] = sfr0[A_Ha < ahalo]\n e_sfr_cor[A_Ha < ahalo] = e_sfr0[A_Ha < ahalo]\n # For high extinction we blank the value\n sfr_cor[A_Ha > ahahi] = np.nan\n e_sfr_cor[A_Ha > ahahi] = np.nan\n tab0.add_columns([sfr_cor, e_sfr_cor, A_Ha, e_A_Ha])\n\n # Halpha extinction and SFR after smoothing and clipping\n A_Ha_smo = Column(get_AHa(tab0[prfx+'Halpha_sm'+str(nsm)+ext], \n tab0[prfx+'Hbeta_sm'+str(nsm)+ext], np.log10), \n name=prfx+'AHa_smooth'+str(nsm)+ext, dtype='f4', unit='mag',\n description='Ha extinction after {} pix smooth'.format(str(nsm)))\n sfr_smo = Column(sfr0 * 10**(0.4*A_Ha_smo),\n name=prfx+'sigsfr_adopt'+ext, dtype='f4', unit=sfr0.unit,\n description='smooth+clip BD corrected SFR surface density')\n # For negative extinction we assume A=0\n sfr_smo[A_Ha_smo < ahalo] = sfr0[A_Ha_smo < ahalo]\n # For high extinction we blank the value\n sfr_smo[A_Ha_smo > ahahi] = np.nan\n tab0.add_columns([A_Ha_smo, sfr_smo])\n\n # BPT requires flux_elines since EW(Ha) is part of classification\n if prod == 'flux_elines':\n if prob:\n BPT0, BPT0sf, p_BPT0 = bpt_type(tab0, ext=ext, name='BPT'+ext, \n prob=prob)\n tab0.add_columns([BPT0, p_BPT0, BPT0sf])\n else:\n BPT0, BPT0sf = bpt_type(tab0, ext=ext, name='BPT'+ext, \n prob=prob)\n tab0.add_columns([BPT0, BPT0sf])\n #\n zoh0, zoherr0 = ZOH_M13(tab0, ext=ext, name='ZOH'+ext, err=True)\n tab0.add_columns([zoh0, zoherr0])\n\n elif prod == 'SFH':\n if i_gal == 0:\n f_young = []\n # For star formation history also calculate mass fractions\n # Multiply the luminosity fraction by M/L ratio and re-normalize\n lumcols = Table(tab0.columns[9:nlumcols+9])\n df_lum = lumcols.to_pandas()\n df_mass = df_lum.multiply(models.mass_to_light, axis='columns')\n df_norm = df_mass.divide(df_mass.sum(axis=1), axis='index')\n df_norm.columns = [x.replace('lum','mass') for x in list(df_norm.columns)]\n # Add aggregated mass fraction columns to table\n agecols = [s.split('_')[2] for s in df_norm.columns.values]\n metcols = [s.split('_')[4] for s in df_norm.columns.values]\n df_age = df_norm.groupby(agecols, sort=False, axis=1).sum(min_count=1)\n df_age = df_age.reindex(sorted(df_age.columns, key=float), axis=1)\n df_age.columns = ['massfrac_age_'+x+ext for x in list(df_age.columns)]\n # Total the mass fractions < 32 Myr for later SFR calculation\n f_young.append(np.array(df_age[df_age.columns[:12]].sum(axis=1, \n min_count=1).astype(np.float32)))\n df_met = df_norm.groupby(metcols, axis=1).sum(min_count=1)\n df_met.columns = ['massfrac_met_'+x+ext for x in list(df_met.columns)]\n naggcols = len(df_age.columns) + len(df_met.columns)\n print('Number of aggregated columns:', naggcols)\n t_mass_age = Table.from_pandas(df_age.astype(np.float32))\n t_mass_met = Table.from_pandas(df_met.astype(np.float32))\n indexcols = Table(tab0.columns[:9])\n lumaggcols = Table(tab0.columns[nlumcols+9:nlumcols+naggcols+9])\n erraggcols = Table(tab0.columns[2*nlumcols+naggcols+9:])\n tab0 = hstack([indexcols, lumaggcols, erraggcols,\n t_mass_age.filled(np.nan), \n t_mass_met.filled(np.nan)], join_type='exact')\n tab0.add_column(f_young[i_gal], name='f_young')\n tab0['f_young'].description='total mass fraction < 32 Myr'\n for i_col in range(naggcols):\n newname=lumaggcols.columns[i_col].name.replace('lum','mass')\n newdesc=lumaggcols.columns[i_col].description.replace('Luminosity','Mass')\n tab0[newname].description = newdesc\n tab0[newname].unit = 'fraction'\n\n elif prod == 'SSP':\n # For stellar surface density we need distance\n star0 = stmass_pc2(tab0['mass_ssp'+ext], dz=tab0['cont_dezon'+ext],\n dist=disttbl.loc[gal][distcol], name='sigstar'+ext)\n avstar0 = stmass_pc2(tab0['mass_Avcor_ssp'+ext], dz=tab0['cont_dezon'+ext],\n dist=disttbl.loc[gal][distcol], name='sigstar_Avcor'+ext)\n avstar0.description += ' dust corrected'\n ferr0 = Column(abs(tab0['e_medflx_ssp'+ext]/tab0['medflx_ssp'+ext]), \n name='fe_medflx'+ext, dtype='f4', unit='fraction',\n description='fractional error in continuum flux')\n tab0.add_columns([star0, avstar0, ferr0])\n # Add the SSP-based SFR if SFH was run\n try:\n ssp_sfr = Column(f_young[i_gal] * star0 / (0.032*u.Gyr),\n name='sigsfr_ssp'+ext, dtype='f4',\n description='Sigma_SFR from < 32 Myr SSP')\n avssp_sfr = Column(f_young[i_gal] * avstar0 / (0.032*u.Gyr),\n name='sigsfr_Avcor_ssp'+ext, dtype='f4',\n description='Sigma_SFR Av-corrected from < 32 Myr SSP')\n tab0.add_columns([ssp_sfr, avssp_sfr])\n except NameError:\n pass\n\n tlist.append(tab0)\n\n if len(tlist) > 0:\n t_merge = vstack(tlist)\n t_merge.meta['date'] = datetime.today().strftime('%Y-%m-%d')\n if debug:\n print(t_merge.colnames)\n print('There are',len(t_merge),'rows in merged table')\n\n if prod == prodtype[0]:\n t_merge.write(outfile, path=prod+ext, overwrite=overwrite, \n append=append, serialize_meta=True, compression=True)\n else:\n t_merge.write(outfile, path=prod+ext, overwrite=overwrite, \n append=True, serialize_meta=True, compression=True)\n return", "def personnel_search_table(selected_funding, selected_year, search_ids_personnel, search_ids_professors_keywords, personnel_name, search_keywords):\n if search_ids_personnel:\n print(\"personnel_ids are \", search_ids_personnel)\n print(type(search_ids_personnel))\n\n if search_ids_professors_keywords:\n print(\"search_ids are \", search_ids_professors_keywords)\n print(type(search_ids_professors_keywords))\n\n if search_ids_personnel:\n search_ids_personnel = json.loads(search_ids_personnel)\n # personnel_ids = np.array(personnel_ids)\n print(search_ids_personnel)\n\n filtered_data = funding_data[\n funding_data.start_year.isin(selected_year)\n # & funding_data['Submitting Institution Name:'].isin(selected_uni)\n # & funding_data['Project Status:'].isin(selected_award_status)\n & funding_data['Program Cycle:'].isin(selected_funding)]\n\n # Using the dataset only to get relevant names of the personnel who are involved in the project\n # from personnel_data\n\n personnel_data_filtered = personnel_data\n personnel_data_filtered = personnel_data_filtered[\n personnel_data_filtered[\"Proposal Number:\"].isin(filtered_data[\"Proposal Number:\"])]\n\n\n # Implementation of personnel-search-table table with ES search on personnel data\n\n personnel_names = []\n personnel_projects_list = []\n\n if search_keywords and len(search_keywords) > 0:\n if len(search_ids_professors_keywords) > 0:\n\n # USE NPRP LISTS\n\n search_ids_professors_keywords = json.loads(search_ids_professors_keywords)\n search_ids_professors_keywords = np.array(search_ids_professors_keywords)\n\n # Using the dataset only to get relevant names of the personnel who are involved in the project\n # from personnel_data\n filtered_data = filtered_data[filtered_data._id.isin(search_ids_professors_keywords[0:, 0])]\n list_of_projects = filtered_data[\"Proposal Number:\"]\n personnel_data_filtered = personnel_data_filtered[personnel_data_filtered[\"Proposal Number:\"].isin(list_of_projects)]\n\n personnel_projects_list = personnel_data_filtered[[\"investigator\", \"Proposal Number:\"]].groupby(\n 'investigator').agg({\"Proposal Number:\": lambda x: [].append(x)})\n\n print(\"Personnel Projects list is \", personnel_projects_list)\n personnel_names = personnel_data_filtered[\"investigator\"].unique()\n print(\"Personnel Who have worked in this field are \", personnel_names)\n\n # some_data = filtered_data.loc[(filtered_data._id.isin(search_ids_professors_keywords[0:, 0]))]\n # personnel_names = some_data[\"Lead Investigator:\"].unique()\n\n\n\n # Implementation of personnel-search-table table with ES search on funding data\n # personnel_names = []\n # if search_keywords and len(search_keywords) > 0:\n # if len(search_ids_professors_keywords) > 0:\n # search_ids_professors_keywords = json.loads(search_ids_professors_keywords)\n # search_ids_professors_keywords = np.array(search_ids_professors_keywords)\n # # Using the dataset only to get relevant names out\n # some_data = filtered_data.loc[(filtered_data._id.isin(search_ids_professors_keywords[0:, 0]))]\n # personnel_names = some_data[\"Lead Investigator:\"].unique()\n\n\n\n # Filtering out the required professors only\n # if len(personnel_names) > 0:\n # filtered_data = filtered_data.loc[filtered_data[\"Lead Investigator:\"].isin(personnel_names)]\n\n # common_dataframe = pd.read_json(common_dataframe, orient='split')\n\n # Filters data on the search id and professors if content present in search box\n if personnel_name and len(personnel_name) > 0:\n if len(search_ids_personnel) > 0:\n # Using the dataset only to get relevant names of the personnel who are involved in the project\n # from personnel_data\n\n personnel_data_filtered = personnel_data_filtered[personnel_data_filtered._id.isin(search_ids_personnel)]\n\n # Filtering the datasets based on search ids and filtered_data\n # Find a better way to do filters by join?\n\n filtered_data = filtered_data[\n filtered_data[\"Proposal Number:\"].isin(personnel_data_filtered[\"Proposal Number:\"])]\n\n personnel_projects_list = personnel_data_filtered[[\"investigator\", \"Proposal Number:\"]].groupby(\n 'investigator').agg({\"Proposal Number:\": lambda x: [].append(x)})\n\n print(\"Personnel Projects list is \", personnel_projects_list)\n personnel_names = personnel_data_filtered[\"investigator\"].unique()\n print(\"Personnel Who have worked in this field are \", personnel_names)\n\n # filtered_data = filtered_data.loc[filtered_data._id.isin(search_ids_personnel)]\n\n # prof_name = clickData['points'][0]['customdata']\n # print(\"selected professor for network graph is \", prof_name)\n\n # TODO: Handle no search_ids or Nonetype search ids DONE\n # print(\" Search IDS ************* 8********** ****** here are this\", search_ids_keywords[0:,0])\n\n # professor_data = funding_data[(funding_data['Lead Investigator:'] == prof_name)\n # & (funding_data.start_year.isin(selected_year))\n # # & (funding_data._id.isin(search_ids_keywords[0:,0]))\n # & (funding_data['Program Cycle:'].isin(selected_funding))]\n\n # if search_keywords and len(search_keywords) > 0:\n # search_ids_keywords = json.loads(search_ids_keywords)\n # search_ids_keywords = np.array(search_ids_keywords)\n # professor_data = professor_data[(professor_data._id.isin(search_ids_keywords[0:, 0]))]\n\n\n # collaborating_personnel_counts = collaborating_personnel.investigator.value_counts()\n\n # filtered_data = filtered_data[\"Lead Investigator:\"]\n\n # proposal_numbers = filtered_data[\"Proposal Number:\"]\n # some_data = personnel_data[personnel_data[\"proposal_numbers\"].isin(proposal_numbers)]\n # some_data = some_data[]\n\n personnel_data_filtered = personnel_data_filtered[\"investigator\"]\n personnel_data_filtered = personnel_data_filtered.value_counts()\n personnel_data_filtered = personnel_data_filtered.reset_index()\n personnel_data_filtered.rename({\"index\": \"Personnel\", \"investigator\": \"Total Projects\"}, inplace=True, axis=1)\n\n\n # filtered_data = filtered_data.value_counts()\n # filtered_data = filtered_data.reset_index()\n # filtered_data.rename({\"index\":\"Personnel\", \"Lead Investigator:\":\"Total Projects\"}, inplace=True, axis=1)\n # [\"Proposal Number:\", \"Submitting Institution Name:\", \"Project Status:\", \"Proposal Title:\"]]\n print(\"filtered_data is \", personnel_data_filtered)\n return personnel_data_filtered.to_dict('rows')", "def make_lookup_table(opt_func, Ms, degree):\n nu_s = np.arange(Ms // 2 + degree + 1, dtype=float) / Ms\n C = calc_C(opt_func.h, opt_func.x0, nu_s, opt_func.W)\n table = [C]\n for d in range(degree):\n C = np.diff(C, 1)\n table.append(C)\n return Lookup(opt_func.W, Ms, table, degree)", "def calc(df: pd.DataFrame, step_key: str, jaccard: bool) -> dict:\n\n # Set Output Dictionary\n result_dict = dict()\n # Iterate over DataFrame two rows at a time via index\n for (indx1),(indx2) in zip(df[:-1].index,df[1:].index):\n # Select pairs (a & b)\n if df.loc[indx1].pairing_label.endswith('a') and df.loc[indx2].pairing_label.endswith('b'):\n # save the label and the fulltext depending on pairing (a or b)\n patterna = df.loc[indx1].pairing_label\n patternb = df.loc[indx2].pairing_label\n try:\n # Select preprocessed full_texts \n input_full_a = df.loc[indx1].OneString\n input_full_b = df.loc[indx2].OneString\n except:\n logging.info('For a pair in Calculation, no full_texts could be extracted. Scores will remain empty.')\n result_dict.update({patterna : ''}) \n result_dict.update({patternb : ''})\n pass\n # CALCULATOR\n cosine = formulas.distributor(step_key, input_full_a, input_full_b, jaccard)\n # update dict with cosine for each label\n result_dict.update({patterna : cosine}) \n result_dict.update({patternb : cosine})\n else:\n continue\n return result_dict", "def prob_t_a_given_s(self, alignment_info):\n ...", "def get_analysis_table(folder, hyps=None):\n table = dict()\n if hyps is None:\n hyps = get_hyps(folder)\n for k,v in hyps.items():\n if \"state_dict\" not in k and \"model_hyps\" not in k:\n table[k] = [v]\n return table", "def generate_aliased_tables_for_labelling(properties):\n aliased_joins = []\n for prop_i, prop in enumerate(properties):\n if prop == 0: # recall we are faking sitelinks as property 0\n label_table = label_misc\n join_key = 'src'\n elif prop in [Properties.DATE_OF_BIRTH.value, Properties.DATE_OF_DEATH.value]:\n label_table = None # there is no join to be made\n join_key = None # there is no join to be made\n else:\n label_table = label\n join_key = 'qid'\n aliased_label = aliased(label_table, name=f\"label_{prop_i}\") if label_table else None\n join_data = {'label_table': aliased_label, 'join_key': join_key}\n aliased_joins.append(join_data)\n return aliased_joins", "def lookup_transform(self, data, grid=None, method=np.mean, lut=None,\n return_lut=False):\n\n # Input checks\n if grid is None:\n grid = check_crs(data) # xarray\n if not isinstance(grid, Grid):\n raise ValueError('grid should be a Grid instance')\n if hasattr(data, 'values'):\n data = data.values # xarray\n\n # dimensional check\n in_shape = data.shape\n ndims = len(in_shape)\n if (ndims < 2) or (ndims > 4):\n raise ValueError('data dimension not accepted')\n if (in_shape[-1] != grid.nx) or (in_shape[-2] != grid.ny):\n raise ValueError('data dimension not compatible')\n\n if lut is None:\n lut = self.grid_lookup(grid)\n\n # Prepare the output\n out_shape = list(in_shape)\n out_shape[-2:] = [self.ny, self.nx]\n\n if data.dtype.kind == 'i':\n out_data = np.zeros(out_shape, dtype=float) * np.NaN\n else:\n out_data = np.zeros(out_shape, dtype=data.dtype) * np.NaN\n\n def _2d_trafo(ind, outd):\n for ji, l in lut.items():\n outd[ji] = method(ind[l[:, 0], l[:, 1]])\n return outd\n\n if ndims == 2:\n _2d_trafo(data, out_data)\n if ndims == 3:\n for dimi, cdata in enumerate(data):\n out_data[dimi, ...] = _2d_trafo(cdata, out_data[dimi, ...])\n if ndims == 4:\n for dimj, cdata in enumerate(data):\n for dimi, ccdata in enumerate(cdata):\n tmp = _2d_trafo(ccdata, out_data[dimj, dimi, ...])\n out_data[dimj, dimi, ...] = tmp\n\n # prepare output\n if method is len:\n out_data[~np.isfinite(out_data)] = 0\n out_data = out_data.astype(int)\n else:\n out_data = np.ma.masked_invalid(out_data)\n\n if return_lut:\n return out_data, lut\n else:\n return out_data" ]
[ "0.5527577", "0.48766977", "0.48666936", "0.48461375", "0.48401406", "0.48253617", "0.48167393", "0.47914568", "0.4777298", "0.46711516", "0.46498317", "0.46371827", "0.46279138", "0.46278507", "0.4626222", "0.46053305", "0.45921257", "0.45880622", "0.4578638", "0.45648557", "0.45647752", "0.45577258", "0.45500246", "0.454028", "0.4537981", "0.45366195", "0.45360622", "0.4533559", "0.4513368", "0.45079282", "0.45065448", "0.45043352", "0.45029727", "0.45025477", "0.44894615", "0.4488913", "0.44883978", "0.44831356", "0.44754648", "0.44722104", "0.44665757", "0.44633782", "0.4461866", "0.44608688", "0.44592702", "0.4437944", "0.4430294", "0.44236055", "0.44132432", "0.44113842", "0.44086164", "0.4395823", "0.43935877", "0.439187", "0.43856773", "0.43736723", "0.43732682", "0.43657494", "0.43566352", "0.43543404", "0.43512234", "0.43451798", "0.43418014", "0.4332544", "0.4331168", "0.43289444", "0.43261278", "0.4323108", "0.43196774", "0.4314807", "0.43139738", "0.43006027", "0.42895383", "0.42839637", "0.42722926", "0.42721054", "0.42687857", "0.42665043", "0.42602038", "0.42570913", "0.42544705", "0.42523444", "0.4249928", "0.42495692", "0.42404273", "0.42389154", "0.42370015", "0.42366186", "0.4226316", "0.42257157", "0.42244846", "0.42214042", "0.42172143", "0.42166576", "0.42151737", "0.42134374", "0.42071834", "0.42033815", "0.42016068", "0.41979957" ]
0.63473904
0
Method creates ``self.param_dict`` regulating the strength of the correlation between sec_haloprop and galprop at each value of prim_galprop.
def _build_param_dict(self, **kwargs): if 'correlation_strength' in kwargs.keys(): correlation_strength = kwargs['correlation_strength'] if custom_len(correlation_strength) > 1: try: self.correlation_strength_abcissa = kwargs['correlation_strength_abcissa'] except KeyError: msg = ("If correlation_strength keyword is passed to the constructor, \n" + "you must also pass a correlation_strength_abcissa keyword argument " + "storing an array of the same length as correlation_strength.") raise(msg) else: self.correlation_strength_abcissa = [0] correlation_strength = [correlation_strength] self._param_dict_keys = ['correlation_param' + str(i+1) for i in range(len(correlation_strength))] self.param_dict = {key:value for key, value in zip(self._param_dict_keys, correlation_strength)} else: self.param_dict = {'correlation_param1': 1.0} self._set_correlation_strength()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_nav', 'g_kvhh', 'g_kva', 'g_kvsi', \n 'g_cav', 'g_kca', 'g_nap', 'g_kir']\n gX_log: np.ndarray = 4 * np.random.rand(9) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(9)) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n gR_name: List[str] = ['g_ampar', 'g_nmdar', 'g_gabar']\n gR_log: np.ndarray = 4 * np.random.rand(3) - 3 # from -3 to 1\n gR: np.ndarray = (10 * np.ones(3)) ** gR_log # 0.001 ~ 10\n gR_itr: Iterator = zip(gR_name, gR)\n\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n\n param_dict.update(gX_itr)\n param_dict.update(gR_itr)\n param_dict.update(tCa_dict)\n return param_dict", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_kvhh', 'g_cav', 'g_kca', 'g_nap']\n gX_log: np.ndarray = 4 * np.random.rand(5) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(5)) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n\n param_dict.update(gX_itr)\n param_dict.update(tCa_dict)\n return param_dict", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_nav', 'g_kvhh', 'g_kva', 'g_kvsi', \n 'g_cav', 'g_kca', 'g_nap', 'g_kir']\n gX_name: List[str] = list(itertools.compress(gX_name, list(self.channel_bool.values())[:9]))\n gX_log: np.ndarray = 4 * np.random.rand(len(gX_name)) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(len(gX_name))) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n gR_name: List[str] = ['g_ampar', 'g_nmdar', 'g_gabar']\n gR_name: List[str] = list(itertools.compress(gR_name, list(self.channel_bool.values())[9:12]))\n gR_log: np.ndarray = 4 * np.random.rand(len(gR_name)) - 3 # from -3 to 1\n gR: np.ndarray = (10 * np.ones(len(gR_name))) ** gR_log # 0.001 ~ 10\n gR_itr: Iterator = zip(gR_name, gR)\n\n param_dict.update(gX_itr)\n param_dict.update(gR_itr)\n\n if self.channel_bool['ca']:\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n param_dict.update(tCa_dict)\n\n return param_dict", "def _mc_galprop(self, seed=None, **kwargs):\n model_helpers.update_param_dict(self, **kwargs)\n self._set_correlation_strength()\n\n if ('galaxy_table' in kwargs.keys()) & ('halos' in kwargs.keys()):\n msg = (\"The mc_\"+self.galprop_key+\" method accepts either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\" + \n \" but never both.\")\n raise KeyError(msg)\n elif 'galaxy_table' in kwargs.keys():\n galaxy_table = kwargs['galaxy_table']\n operative_sec_haloprop_key = (\n model_defaults.host_haloprop_prefix + self.sec_haloprop_key)\n elif 'halos' in kwargs.keys():\n galaxy_table = kwargs['halos']\n operative_sec_haloprop_key = self.sec_haloprop_key\n else:\n msg = (\"The mc_\"+self.galprop_key+\" requires either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\")\n raise KeyError(msg)\n\n self.add_new_haloprops(galaxy_table)\n\n # All at once, draw all the randoms we will need\n np.random.seed(seed=seed)\n all_randoms = np.random.random(len(galaxy_table)*2)\n galprop_cumprob = all_randoms[0:len(galaxy_table)]\n galprop_scatter = all_randoms[len(galaxy_table):]\n\n # Initialize the output array\n output_galprop = np.zeros(len(galaxy_table))\n\n # Determine binning and loop range\n if 'galaxy_table_slice_array' not in kwargs.keys():\n binned_prim_galprop = np.digitize(\n galaxy_table[self.prim_galprop_key], \n self.prim_galprop_bins)\n prim_galprop_loop_range = set(binned_prim_galprop)\n else:\n prim_galprop_loop_range = range(len(self.one_point_lookup_table))\n\n for i in prim_galprop_loop_range:\n\n # Determine the slice corresponding to the i^th prim_galprop bin\n if 'galaxy_table_slice_array' not in kwargs.keys():\n idx_bini = np.where(binned_prim_galprop==i)[0]\n num_bini = len(idx_bini)\n else:\n idx_bini = kwargs['galaxy_table_slice_array'][i]\n num_bini = len(galaxy_table[idx_bini])\n\n if len(idx_bini) > 0:\n # Fetch the appropriate number of randoms\n # for the i^th prim_galprop bin\n galprop_cumprob_bini = galprop_cumprob[idx_bini]\n galprop_scatter_bini = galprop_scatter[idx_bini]\n\n # Fetch the halos in the i^th prim_galprop bin, \n # and determine how they are sorted\n haloprop_bini = galaxy_table[idx_bini][operative_sec_haloprop_key]\n idx_sorted_haloprop_bini = np.argsort(haloprop_bini)\n\n galprop_bini = self._condition_matched_galprop(\n haloprop_bini[idx_sorted_haloprop_bini], \n galprop_cumprob_bini, i, galprop_scatter_bini, self.tol)\n\n # Assign the final values to the \n # appropriately sorted subarray of output_galprop\n output_galprop[idx_bini[idx_sorted_haloprop_bini]] = galprop_bini\n\n return output_galprop", "def polarParams(pol, chord, cl_lin_method='leastsquare', DS_constants='OpenFAST', tau=None):\n # Return interpolant\n fPolar = pol.interpolant(variables=['cl','cd','cm','fs','cl_inv','cl_fs'], radians=True)\n\n p=dict()\n p['Polar'] = pol # backup\n p['fPolar'] = fPolar\n\n # Linear region\n linear_region = np.array([-5, 10])*np.pi/180\n Cl_slope, alpha_0 = pol.cl_linear_slope(window=linear_region, method=cl_lin_method, radians=True)\n #print('Cl_slope',Cl_slope, '[1/rad] - alpha_0', alpha_0*180/np.pi,'[deg]')\n\n p['alpha_0'] = alpha_0 # TODO HARMONIZATION WITH DS\n p['Cl_slope'] = Cl_slope # TODO HARMONIZATION WITH DS\n p['alpha_range'] = None\n p['alpha_range_lin'] = None\n\n # Dynamic stall\n p.update(dynstall_mhh_param_from_polar(pol, chord, constants=DS_constants))\n p.update(dynstall_oye_param_from_polar(pol, tau=tau)) # TODO\n return p", "def _build_param_dict(self):\n self._build_common_param_dict()\n\n self._param_dict.add(Parameter.NUM_AVG_SAMPLES,\n r'ScansToAverage>([\\d]+)</ScansToAverage>',\n lambda match: int(match.group(1)),\n str,\n type=ParameterDictType.INT,\n display_name=\"Scans to Average\",\n description=\"Number of samples to average (must be even)\",\n range=INT16,\n startup_param=True,\n direct_access=False,\n default_value=4,\n visibility=ParameterDictVisibility.READ_WRITE)\n self._param_dict.add(Parameter.MIN_COND_FREQ,\n r'MinimumCondFreq>([\\d]+)</MinimumCondFreq',\n lambda match: int(match.group(1)),\n str,\n type=ParameterDictType.INT,\n display_name=\"Minimum Conductivity Frequency\",\n range=INT16,\n description=\"Minimum conductivity frequency to enable pump turn-on.\",\n startup_param=True,\n direct_access=False,\n default_value=500,\n units=Units.HERTZ,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.PUMP_DELAY,\n r'PumpDelay>([\\d]+)</PumpDelay',\n lambda match: int(match.group(1)),\n str,\n type=ParameterDictType.INT,\n display_name=\"Pump Delay\",\n range=INT16,\n description=\"Time to wait after minimum conductivity frequency is reached before turning pump on.\",\n startup_param=True,\n direct_access=False,\n default_value=60,\n units=Units.SECOND,\n visibility=ParameterDictVisibility.READ_WRITE)\n self._param_dict.add(Parameter.AUTO_RUN,\n r'AutoRun>(.*)</AutoRun',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Auto Run\",\n description=\"Enable automatic logging when power is applied: (true | false).\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=False,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.IGNORE_SWITCH,\n r'IgnoreSwitch>(.*)</IgnoreSwitch',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Ignore Switch\",\n description=\"Disable magnetic switch position for starting or stopping logging: (true | false)\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=True,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.OPTODE,\n r'OPTODE>(.*)</OPTODE',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Optode Attached\",\n description=\"Enable optode: (true | false)\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=True,\n visibility=ParameterDictVisibility.IMMUTABLE)\n self._param_dict.add(Parameter.VOLT1,\n r'ExtVolt1>(.*)</ExtVolt1',\n lambda match: True if match.group(1) == 'yes' else False,\n self._true_false_to_string,\n type=ParameterDictType.BOOL,\n display_name=\"Volt 1\",\n description=\"Enable external voltage 1: (true | false)\",\n range={'True': True, 'False': False},\n startup_param=True,\n direct_access=True,\n default_value=True,\n visibility=ParameterDictVisibility.IMMUTABLE)\n\n self._build_ctd_specific_params()", "def initialize(self):\n params = {}\n for i in range(1, len(self.layer_dimensions)):\n params['b_' + str(i)] = np.ones((self.layer_dimensions[i], 1))\n if self.he_initialization:\n params['W_' + str(i)] = np.random.randn(self.layer_dimensions[i],\n self.layer_dimensions[i - 1]) * np.sqrt(\n 2 / self.layer_dimensions[i - 1])\n else:\n params['W_' + str(i)] = np.random.rand(self.layer_dimensions[i], self.layer_dimensions[i - 1]) - 0.5\n return params", "def _set_correlation_strength(self):\n\n if hasattr(self, 'correlation_strength_abcissa'):\n abcissa = self.correlation_strength_abcissa\n ordinates = [self.param_dict['correlation_param'+str(i+1)] for i in range(len(abcissa))]\n correlation_strength_spline = model_helpers.custom_spline(abcissa, ordinates, k=custom_len(abcissa)-1)\n self.correlation_strength = correlation_strength_spline(self.prim_galprop_bins)\n else:\n self.correlation_strength = np.repeat(self.param_dict['correlation_param1'], len(self.prim_galprop_bins))\n\n self.correlation_strength[self.correlation_strength > 1] = 1\n self.correlation_strength[self.correlation_strength <- 1] = -1\n\n self.correlation_strength = np.append(\n self.correlation_strength, self.correlation_strength[-1])", "def param_init(self, sig=0.01):\n self.rhos = np.ones(self.Ndim)\n self.a = np.random.rand(self.Ndim, self.Nhidden)\n self.c = np.random.rand(self.Nhidden)\n self.W = np.random.randn(self.Nhidden, self.Ndim) * sig\n self.alphas = np.zeros((self.Ndim, self.Ncomponents))\n self.mus = np.zeros((self.Ndim, self.Ncomponents))\n self.sigmas = np.zeros((self.Ndim, self.Ncomponents))\n self.optimize_params = [self.rhos, self.c, self.W]\n\n types = ['alpha', 'mu', 'sigma']\n self.bs = {}\n self.Vs = {}\n for t in types:\n self.bs[t] = np.random.randn(self.Ndim, self.Ncomponents) * sig\n self.Vs[t] = np.random.randn(self.Ndim, self.Nhidden,\n self.Ncomponents) * sig\n self.optimize_params.append(self.bs[t])\n self.optimize_params.append(self.Vs[t])", "def get_params(self, deep=False):\n sampling_params = {'n_dim': self.n_dim,\n 'simplex_sampling': self.simplex_sampling,\n 'within_simplex_sampling': self.within_simplex_sampling,\n 'gaussian_component': self.gaussian_component}\n return {'ss_params': sampling_params,\n **RandomStateMixin.get_params(self, deep)}", "def _set_leg_params(self):\n self.p = 0.01600\n self.q = 0.00000\n self.r = 0.02000\n self.c = 0.01811\n self.u = 0.00000\n self.v = 0.00000\n self.e = -0.06000\n self.h = -0.02820\n self.s = 0.02200\n self.d1 = 0.0\n self.d2 = 0.0\n self.d3 = 0.0\n self.stability = 0.0", "def sample_parameters_given_hyper(self, gen_seed=0):\n if type(gen_seed) is not int:\n raise TypeError(\"gen_seed should be an int\")\n\n rng = random.Random(gen_seed)\n\n hypers = self.get_hypers()\n s = hypers[b's']\n r = hypers[b'r']\n nu = hypers[b'nu']\n m = hypers[b'mu']\n\n rho = rng.gammavariate(nu/2.0, s)\n mu = rng.normalvariate(m, (r/rho)**.5)\n\n assert(rho > 0)\n\n params = {'mu': mu, 'rho': rho}\n\n return params", "def getGPEParams(self):\n outKeysScaleDouble = ['R', 'gamma_C', 'gamma_R', 'g_C', 'g_R', 'k',\n 'Pth']\n outKeysScaleSingle = outKeysScaleDouble + ['gamma_nl']\n outKeysScale = outKeysScaleSingle if self.singleComp else\\\n outKeysScaleDouble\n outKeys = ['charL', 'charT']\n out = {key: self.__dict__[key + '_scaled'] for key in outKeysScale}\n for key in outKeys:\n out[key] = self.__dict__[key]\n return out", "def _set_init_param_dict(self):\n\n self.param_dict = {}\n\n try:\n suppress_warning = self._suppress_repeated_param_warning\n except AttributeError:\n suppress_warning = False\n msg = (\"\\n\\nThe param_dict key %s appears in more than one component model.\\n\"\n \"This is permissible, but if you are seeing this message you should be sure you \"\n \"understand it.\\nIn particular, double-check that this parameter does not have \"\n \"conflicting meanings across components.\\n\"\n \"\\nIf you do not wish to see this message every time you instantiate, \\n\"\n \"simply attach a _suppress_repeated_param_warning attribute \\n\"\n \"to any of your component models and set this variable to ``True``.\\n\")\n\n for component_model in self.model_dictionary.values():\n\n if not hasattr(component_model, 'param_dict'):\n component_model.param_dict = {}\n intersection = set(self.param_dict) & set(component_model.param_dict)\n if intersection != set():\n for key in intersection:\n if suppress_warning is False:\n warn(msg % key)\n\n for key, value in component_model.param_dict.iteritems():\n self.param_dict[key] = value\n\n self._init_param_dict = copy(self.param_dict)", "def get_hyperparams_dict(self, lr, gamma):\n hyperparams_dict = Storage.BASE_HYPERPARAMS_DICT\n hyperparams_dict[\"learning_rate\"] = lr\n hyperparams_dict[\"gamma\"] = gamma\n return hyperparams_dict", "def init_params(self):\n self.params = Parameters()\n self.params.add('qoff', self.qoff, vary=0, min=-np.inf, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('yscale', self.yscale, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('int_bg', self.int_bg, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('Rc', self.Rc, vary=0, min=-np.inf, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('sur_den', self.sur_den, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('ion_depth', self.ion_depth, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)", "def get_params(self) -> Dict:\n params: Dict = {}\n params['g_leak'] = self.leak.get_g()\n params['g_kvhh'] = self.kvhh.get_g()\n params['g_cav'] = self.cav.get_g()\n params['g_kca'] = self.kca.get_g()\n params['g_nap'] = self.nap.get_g()\n params['t_ca'] = self.tau_ca\n return params", "def add_to_dict(param_dict):\n ### Sample - Int\n sample_s = param_dict['ml_args'].sample_s\n ### Sample - Mr\n sample_Mr = param_dict['ml_args'].sample_Mr\n ## Sample volume\n # Units (Mpc/h)**3\n volume_sample = { '18': 37820 / 0.01396,\n '19': 6046016.60311 ,\n '20': 2.40481e7 ,\n '21': 8.79151e7 }\n vol_mr = volume_sample[sample_s]\n ##\n ## Choice of Centrals and Satellites\n cens = int(1)\n sats = int(0)\n ## Other constants\n # Speed of light - In km/s\n speed_c = ac.c.to(u.km/u.s).value\n ## Number of CPU's to use\n cpu_number = int(cpu_count() * param_dict['cpu_frac'])\n ##\n ## Plotting constants\n plot_dict = { 'size_label':23,\n 'size_title':25,\n 'color_ham' :'red',\n 'color_dyn' :'blue'}\n ##\n ## Catalogue Prefix string\n catl_str_fig = param_dict['ml_args'].catl_alg_comp_fig_str()\n ##\n ## Saving to `param_dict`\n param_dict['sample_s' ] = sample_s\n param_dict['sample_Mr' ] = sample_Mr\n param_dict['vol_mr' ] = vol_mr\n param_dict['cens' ] = cens\n param_dict['sats' ] = sats\n param_dict['speed_c' ] = speed_c\n param_dict['cpu_number' ] = cpu_number\n param_dict['plot_dict' ] = plot_dict\n param_dict['catl_str_fig'] = catl_str_fig\n\n return param_dict", "def _get_prior_params(self):\n # relation transformation matrix\n M_mu = np.eye(self.n_polarities, dtype=\"float32\")\n M_mu[1, :] = [0., 0.3, 0.]\n M_mu = np.tile(M_mu, (self.n_rels, 1)).reshape(\n self.n_rels, self.n_polarities, self.n_polarities\n )\n # for rel, rel_idx in iteritems(self.rel2idx):\n # # swap axes for contrastive relations\n # if check_rel(rel, CONTRASTIVE_RELS):\n # mu_i = M_mu[rel_idx]\n # mu_i[[0, 2]] = mu_i[[2, 0]]\n M_mu = torch.tensor(M_mu)\n M_sigma = torch.tensor(\n np.ones((self.n_rels, self.n_polarities, self.n_polarities),\n dtype=\"float32\")\n )\n # beta\n beta_p = 5. * torch.tensor(np.ones((self.n_rels, self.n_polarities),\n dtype=\"float32\"))\n beta_q = 5. * torch.tensor(np.ones((self.n_rels, self.n_polarities),\n dtype=\"float32\"))\n # z_epsilon\n z_epsilon_p = torch.tensor(1.)\n z_epsilon_q = torch.tensor(15.)\n # scale factor\n scale_factor = torch.tensor(34.)\n return {\"M_mu\": M_mu, \"M_sigma\": M_sigma, \"beta_p\": beta_p,\n \"beta_q\": beta_q, \"z_epsilon_p\": z_epsilon_p,\n \"z_epsilon_q\": z_epsilon_q, \"scale_factor\": scale_factor}", "def evaluate_reco_param(self):\n evals = self.input_binning['true_energy'].weighted_centers.magnitude\n n_e = len(self.input_binning['true_energy'].weighted_centers.magnitude)\n n_cz = len(self.input_binning['true_coszen'].weighted_centers.magnitude)\n eval_dict = deepcopy(self.param_dict)\n for flavintgroup, dim_dict in eval_dict.items():\n for dim, dist_list in dim_dict.items():\n for dist_prop_dict in dist_list:\n for dist_prop in dist_prop_dict.keys():\n if dist_prop == 'dist':\n continue\n if callable(dist_prop_dict[dist_prop]):\n func = dist_prop_dict[dist_prop]\n vals = func(evals)\n dist_prop_dict[dist_prop] =\\\n np.repeat(vals,n_cz).reshape((n_e,n_cz))\n elif isinstance(dist_prop_dict[dist_prop], dict):\n assert dist_prop == 'kwargs'\n for kwarg in dist_prop_dict['kwargs'].keys():\n func = dist_prop_dict['kwargs'][kwarg]\n vals = func(evals)\n dist_prop_dict['kwargs'][kwarg] =\\\n np.repeat(vals,n_cz).reshape((n_e,n_cz))\n # Now check for consistency, to not have to loop over all dict\n # entries again at a later point in time\n self.check_reco_dist_consistency(dist_list)\n return eval_dict", "def init_params():\n p = {}\n \n # p['rootFolder'] = 'C:/Users/Umberto Gostoli/SPHSU/Social Care Model II'\n # p['rootFolder'] = 'N:/Social Care Model Paper III'\n \n p['noPolicySim'] = False\n p['multiprocessing'] = True\n p['numberProcessors'] = 9\n p['numRepeats'] = 3\n \n p['startYear'] = 1860\n p['endYear'] = 2040\n p['thePresent'] = 2012\n p['statsCollectFrom'] = 1990\n p['regressionCollectFrom'] = 1960 \n p['implementPoliciesFromYear'] = 2020\n p['yearOutcome'] = 2015\n \n p['favouriteSeed'] = 123\n p['loadFromFile'] = False\n p['verboseDebugging'] = False\n p['singleRunGraphs'] = False\n p['saveChecks'] = True\n p['getCheckVariablesAtYear'] = 2015\n # To change through command-line arguments\n\n p['numberPolicyParameters'] = 2\n p['valuesPerParam'] = 1\n p['numberScenarios'] = 3\n \n ############ Policy Parameters #######################\n p['incomeCareParam'] = 0.0005 #[0.00025 - 0.001]\n p['taxBreakRate'] = 0.0\n p['ageOfRetirement'] = 65\n p['socialSupportLevel'] = 5\n # p['educationCosts']\n #############################################################\n p['socialCareCreditShare'] = 0.0\n p['maxWtWChildAge'] = 5\n # The basics: starting population and year, etc.\n \n p['discountingFactor'] = 0.03\n \n \n p['initialPop'] = 600 \n \n p['minStartAge'] = 24\n p['maxStartAge'] = 45\n p['numberClasses'] = 5\n p['socialClasses'] = ['unskilled', 'skilled', 'lower', 'middle', 'upper']\n p['initialClassShares'] = [0.2, 0.25, 0.3, 0.2, 0.05]\n p['initialUnemployment'] = [0.25, 0.2, 0.15, 0.1, 0.1]\n p['unemploymentAgeBandParam'] = 0.3\n \n # doDeath function parameters\n p['mortalityBias'] = 0.85 # After 1950\n p['careNeedBias'] = 0.9\n p['unmetCareNeedBias'] = 0.5\n p['baseDieProb'] = 0.0001\n p['babyDieProb'] = 0.005\n p['maleAgeScaling'] = 14.0\n p['maleAgeDieProb'] = 0.00021\n p['femaleAgeScaling'] = 15.5\n p['femaleAgeDieProb'] = 0.00019\n \n p['orphansRelocationParam'] = 0.5\n \n # doBirths function parameters\n p['minPregnancyAge'] = 17\n p['maxPregnancyAge'] = 42\n p['growingPopBirthProb'] = 0.215\n p['fertilityCorrector'] = 1.0\n p['fertilityBias'] = 0.9\n \n # careTransitions function parameters\n p['zeroYearCare'] = 80.0\n p['childcareDecreaseRate'] = 0.25\n p['personCareProb'] = 0.0008\n p['maleAgeCareScaling'] = 18.0 # p['maleAgeCareProb'] = 0.0008\n p['femaleAgeCareScaling'] = 19.0 # p['femaleAgeCareProb'] = 0.0008\n p['baseCareProb'] = 0.0002\n p['careBias'] = 0.9\n p['careTransitionRate'] = 0.7\n\n p['unmetNeedExponent'] = 1.0 # 0.005 #[0.005 - 0.02]\n \n p['numCareLevels'] = 5\n p['careLevelNames'] = ['none','low','moderate','substantial','critical']\n p['careDemandInHours'] = [ 0.0, 8.0, 16.0, 32.0, 80.0 ]\n p['quantumCare'] = 4.0\n \n # careSupplies getCare and probSuppliers function parameters\n \n ######## Key parameter 1 ##############\n \n \n p['weeklyHours'] = 40.0\n \n \n p['priceChildCare'] = 0.76 # 6 \n p['schoolAge'] = 5\n p['maxFormalChildcareHours'] = 48\n p['schoolHours'] = 30\n p['freeChildcareHours'] = 15\n p['workingParentsFreeChildcareHours'] = 30\n p['minAgeStartChildCareSupport'] = 3\n p['minAgeStartChildCareSupportByIncome'] = 2\n p['maxHouseholdIncomeChildCareSupport'] = 40 # 320\n \n ######## Key parameter 2 ##############\n # 5: No public supply \n \n p['retiredHours'] = [48.0, 36.0, 20.0, 10.0] # 60.0\n p['studentHours'] = [24.0, 16.0, 8.0, 4.0]\n p['teenAgersHours'] = [16.0, 0.0, 0.0, 0.0]\n p['unemployedHours'] = [32.0, 24.0, 16.0, 8.0]\n p['employedHours'] = [28.0, 20.0, 12.0, 8.0]\n p['formalCareDiscountFactor'] = 0.5\n \n p['socialNetworkDistances'] = [0.0, 1.0, 2.0, 1.0, 2.0, 2.0, 3.0, 3.0]\n p['networkDistanceParam'] = 2.0\n p['socialCareWeightBias'] = 1.0\n p['unmetCareNeedDiscountParam'] = 0.5\n p['shareUnmetNeedDiscountParam'] = 0.5\n # p['pastShareUnmetNeedWeight'] = 0.5\n \n \n \n p['networkSizeParam'] = 10.0 # 1.0\n \n p['careSupplyBias'] = 0.5\n p['careIncomeParam'] = 0.001\n \n # Hospitalization Costs\n p['qalyBeta'] = 0.18\n p['qalyAlpha'] = 1.5\n p['qalyDiscountRate'] = 0.035\n p['qalyIndexes'] = [1.0, 0.8, 0.6, 0.4, 0.2]\n p['unmetCareHealthParam'] = 0.1\n p['hospitalizationParam'] = 0.5\n p['needLevelParam'] = 2.0\n p['unmetSocialCareParam'] = 2.0\n p['costHospitalizationPerDay'] = 400\n \n # ageTransitions, enterWorkForce and marketWage functions parameters\n p['ageTeenagers'] = 12\n p['minWorkingAge'] = 16\n \n ######## Key parameter 3 ##############\n \n p['careBankingSchemeOn'] = False\n p['socialCareBankingAge'] = 65\n \n p['absoluteCreditQuantity'] = False\n p['quantityYearlyIncrease'] = 0.0\n p['socialCareCreditQuantity'] = 0\n p['kinshipNetworkCarePropension'] = 0.5\n p['volunteersCarePropensionCoefficient'] = 0.01\n p['pensionContributionRate'] = 0.05\n \n p['hillHealthLevelThreshold'] = 3\n p['seriouslyHillSupportRate'] = 0.5\n \n ### Prices ####\n p['pricePublicSocialCare'] = 20.0 # [2.55] # 20\n p['priceSocialCare'] = 17.0 # [2.29] # 18\n p['taxBrackets'] = [663, 228, 0] # [28.16, 110.23] # [221, 865]\n p['taxBandsNumber'] = 3\n p['bandsTaxationRates'] = [0.4, 0.2, 0.0] # [0.0, 0.2, 0.4]\n # Tax Break Policy\n\n \n p['pensionWage'] = [5.0, 7.0, 10.0, 13.0, 18.0] # [0.64, 0.89, 1.27, 1.66, 2.29] # \n p['incomeInitialLevels'] = [5.0, 7.0, 9.0, 11.0, 14.0] #[0.64, 0.89, 1.15, 1.40, 1.78] # \n p['incomeFinalLevels'] = [10.0, 15.0, 22.0, 33.0, 50.0] #[1.27, 1.91, 2.80, 4.21, 6.37] # \n p['educationCosts'] = [0.0, 100.0, 150.0, 200.0] #[0.0, 12.74, 19.12, 25.49] # \n \n # Priced growth #####\n p['wageGrowthRate'] = 1.0 # 1.01338 # \n\n p['incomeGrowthRate'] = [0.4, 0.35, 0.35, 0.3, 0.25]\n \n # SES inter-generational mobility parameters\n p['leaveHomeStudentsProb'] = 0.5\n \n p['eduWageSensitivity'] = 0.2 # 0.5\n p['eduRankSensitivity'] = 3.0 # 5.0\n p['costantIncomeParam'] = 80.0 # 20.0\n p['costantEduParam'] = 10.0 # 10.0\n p['careEducationParam'] = 0.005 # 0.04\n \n \n \n # p['incEduExp'] = 0.25\n p['educationLevels'] = ['GCSE', 'A-Level', 'HND', 'Degree', 'Higher Degree']\n p['workingAge'] = [16, 18, 20, 22, 24]\n \n # doDivorce function parameters\n p['basicDivorceRate'] = 0.06\n p['variableDivorce'] = 0.06\n p['divorceModifierByDecade'] = [ 0.0, 1.0, 0.9, 0.5, 0.4, 0.2, 0.1, 0.03, 0.01, 0.001, 0.001, 0.001, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n p['divorceBias'] = 1.0\n \n # doMarriages function parameters\n p['deltageProb'] = [0.0, 0.1, 0.25, 0.4, 0.2, 0.05]\n p['incomeMarriageParam'] = 0.025\n p['studentFactorParam'] = 0.5\n ######## Key parameter 4 ##############\n p['betaGeoExp'] = 2.0 #[1.0 - 4.0]\n \n p['betaSocExp'] = 2.0\n p['rankGenderBias'] = 0.5\n p['basicMaleMarriageProb'] = 0.9\n p['maleMarriageModifierByDecade'] = [ 0.0, 0.16, 0.5, 1.0, 0.8, 0.7, 0.66, 0.5, 0.4, 0.2, 0.1, 0.05, 0.01, 0.0, 0.0, 0.0, 0.0 ]\n \n # jobMarket, updateWork and unemploymentRate functions parameters\n p['unemploymentClassBias'] = 0.75\n p['unemploymentAgeBias'] = [1.0, 0.55, 0.35, 0.25, 0.2, 0.2]\n p['numberAgeBands'] = 6\n p['jobMobilitySlope'] = 0.004\n p['jobMobilityIntercept'] = 0.05\n p['ageBiasParam'] = [7.0, 3.0, 1.0, 0.5, 0.35, 0.15]\n p['deltaIncomeExp'] = 0.05\n p['unemployedCareBurdernParam'] = 0.025\n # Potential key parameter\n p['relocationCareLossExp'] = 1.0 # 40.0 # \n p['incomeSocialCostRelativeWeight'] = 0.5\n \n p['firingParam'] = 0.2\n p['wageVar'] = 0.06\n p['workDiscountingTime'] = 0.75 # 0.8\n p['sizeWeightParam'] = 0.7\n p['minClassWeightParam'] = 1.0\n p['incomeDiscountingExponent'] = 4.0\n p['discountingMultiplier'] = 2.0\n #p['incomeDiscountingParam'] = 2.0\n \n # relocationPensioners function parameters\n p['agingParentsMoveInWithKids'] = 0.1\n p['variableMoveBack'] = 0.1\n p['retiredRelocationParam'] = 0.001 # 0.005\n \n # houseMap function parameters\n p['geoDistanceSensitivityParam'] = 2.0\n p['socDistanceSensitivityParam'] = 2.0\n p['classAffinityWeight'] = 4.0\n p['distanceSensitivityParam'] = 0.5\n \n # relocationProb function parameters\n p['baseRelocatingProb'] = 0.05\n p['relocationParameter'] = 1.0 \n p['apprenticesRelocationProb'] = 0.5\n #p['expReloc'] = 1.0\n \n # computeRelocationCost and relocation Propensity functions parameters\n p['yearsInTownSensitivityParam'] = 0.5\n \n ######## Key parameter 5 ##############\n p['relocationCostParam'] = 0.5 # 1.0 \n \n ######## Key parameter 6 ##############\n p['propensityRelocationParam'] = 2.0 # 2.0 \n p['denRelocationWeight'] = 0.5\n \n \n ## Description of the map, towns, and houses\n p['mapGridXDimension'] = 8\n p['mapGridYDimension'] = 12 \n p['townGridDimension'] = 70\n p['cdfHouseClasses'] = [ 0.6, 0.9, 5.0 ]\n p['ukMap'] = [[ 0.0, 0.1, 0.2, 0.1, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.1, 0.1, 0.2, 0.2, 0.3, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.2, 0.2, 0.3, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.2, 1.0, 0.5, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.4, 0.0, 0.2, 0.2, 0.4, 0.0, 0.0, 0.0 ],\n [ 0.6, 0.0, 0.0, 0.3, 0.8, 0.2, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.0, 0.6, 0.8, 0.4, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.2, 1.0, 0.8, 0.6, 0.1, 0.0 ],\n [ 0.0, 0.0, 0.1, 0.2, 1.0, 0.6, 0.3, 0.4 ],\n [ 0.0, 0.0, 0.5, 0.7, 0.5, 1.0, 1.0, 0.0 ],\n [ 0.0, 0.0, 0.2, 0.4, 0.6, 1.0, 1.0, 0.0 ],\n [ 0.0, 0.2, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0 ]]\n p['ukClassBias'] = [[ 0.0, -0.05, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ -0.05, -0.05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, -0.05, -0.05, 0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ -0.05, 0.0, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ -0.05, 0.0, 0.0, -0.05, -0.05, -0.05, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.0, -0.05, -0.05, -0.05, 0.0, 0.0 ],\n [ 0.0, 0.0, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.0, -0.05, 0.0, -0.05, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.0, -0.05, 0.0, 0.2, 0.15, 0.0 ],\n [ 0.0, 0.0, 0.0, 0.0, 0.1, 0.2, 0.15, 0.0 ],\n [ 0.0, 0.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0 ] ]\n p['mapDensityModifier'] = 0.6\n # p['numHouseClasses'] = 3\n # p['houseClasses'] = ['small','medium','large']\n \n ## Graphical interface details\n p['interactiveGraphics'] = False #True\n p['delayTime'] = 0.0\n p['screenWidth'] = 1300\n p['screenHeight'] = 700\n p['bgColour'] = 'black'\n p['mainFont'] = 'Helvetica 18'\n p['fontColour'] = 'white'\n p['dateX'] = 70\n p['dateY'] = 20\n p['popX'] = 70\n p['popY'] = 50\n p['pixelsInPopPyramid'] = 2000\n p['num5YearAgeClasses'] = 28\n p['careLevelColour'] = ['blue','green','yellow','orange','red']\n p['houseSizeColour'] = ['brown','purple','yellow']\n p['pixelsPerTown'] = 56\n p['maxTextUpdateList'] = 22\n \n # p['eduEduSensitivity'] = 0.5\n # p['mortalityBias'] = [1.0, 0.92, 0.84, 0.76, 0.68]\n # p['fertilityBias'] = [1.0, 0.92, 0.84, 0.76, 0.68]\n # p['divorceBias'] = [2.0, 1.5, 1.0, 0.75, 0.5]\n\n ## Transitions to care statistics\n \n ## Availability of care statistics\n \n #p['childHours'] = 5.0\n # p['employedHours'] = 12.0\n #p['homeAdultHours'] = 30.0\n #p['workingAdultHours'] = 25.0\n #p['maxEmployedHours'] = 60.0\n \n #p['lowCareHandicap'] = 0.5\n #p['hourlyCostOfCare'] = 20.0\n \n ## Fertility statistics\n \n # p['steadyPopBirthProb'] = 0.13\n # p['transitionYear'] = 1965\n \n ## Class and employment statistics\n # p['numClasses'] = 5\n # p['occupationClasses'] = ['lower','intermediate','higher']\n # p['cdfOccupationClasses'] = [ 0.6, 0.9, 1.0 ]\n\n ## Age transition statistics\n # p['ageOfAdulthood'] = 17\n \n ## Marriage function parameters\n \n # p['basicFemaleMarriageProb'] = 0.25\n # p['femaleMarriageModifierByDecade'] = [ 0.0, 0.5, 1.0, 1.0, 1.0, 0.6, 0.5, 0.4, 0.1, 0.01, 0.01, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n # p['femaleMarriageProb'] = [0.01, 0.15, 0.3, 0.2, 0.1, 0.1, 0.06, 0.05, 0.02, 0.01, 0.01, 0.005]\n # p['maleMarriageProb'] = [0.005, 0.08, 0.25, 0.25, 0.15, 0.1, 0.07, 0.05, 0.03, 0.02, 0.01, 0.005]\n \n ## Leaving home and moving around statistics\n # p['probApartWillMoveTogether'] = 0.3\n # p['coupleMovesToExistingHousehold'] = 0.3\n # p['basicProbAdultMoveOut'] = 0.22\n # p['probAdultMoveOutModifierByDecade'] = [ 0.0, 0.2, 1.0, 0.6, 0.3, 0.15, 0.03, 0.03, 0.01, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n # p['basicProbSingleMove'] = 0.05\n # p['probSingleMoveModifierByDecade'] = [ 0.0, 1.0, 1.0, 0.8, 0.4, 0.06, 0.04, 0.02, 0.02, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n # p['basicProbFamilyMove'] = 0.03\n # p['probFamilyMoveModifierByDecade'] = [ 0.0, 0.5, 0.8, 0.5, 0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1 ]\n\n \n return p", "def init_parameters(obj, hyperparameters):\n # Initialize Global Configuration Parameter\n params = hyperparameters['global']\n setattr(obj, 'param', params)\n\n # Initialize Attributes (Pre-Checked Parameters)\n setattr(obj, 'learning_rate', params['learning_rate'])\n setattr(obj, 'loss', params['loss'])\n setattr(obj, 'max_iter', params['max_iter'])\n\n if params['loss'] == 'least_squares':\n setattr(obj, 'num_classes', 1)\n elif params['loss'] in ['binary_crossentropy', 'categorical_crossentropy', 'auto']:\n setattr(obj, 'num_classes', params['num_classes'])\n\n # Initialize Attributes (Optional Values - Based on Default Parameters)\n if 'l2_regularization' not in params or params['l2_regularization'] is None:\n setattr(obj, 'l2_regularization', 0)\n else:\n setattr(obj, 'l2_regularization', params['l2_regularization'])\n\n if 'max_bins' not in params:\n setattr(obj, 'max_bins', 255)\n else:\n setattr(obj, 'max_bins', params['max_bins'])\n\n if 'max_depth' not in params or params['max_depth'] is None:\n setattr(obj, 'max_depth', None)\n else:\n setattr(obj, 'max_depth', params['max_depth'])\n\n if 'max_leaf_nodes' not in params or params['max_leaf_nodes'] is None:\n setattr(obj, 'max_leaf_nodes', 31)\n else:\n setattr(obj, 'max_leaf_nodes', params['max_leaf_nodes'])\n\n if 'min_samples_leaf' not in params or params['min_samples_leaf'] is None:\n setattr(obj, 'min_samples_leaf', 20)\n else:\n setattr(obj, 'min_samples_leaf', params['min_samples_leaf'])\n\n if 'random_state' in params:\n setattr(obj, 'random_state', params['random_state'])\n else:\n setattr(obj, 'random_state', None)\n\n if 'scoring' in params:\n setattr(obj, 'scoring', params['scoring'])\n else:\n setattr(obj, 'scoring', None)\n\n if 'verbose' not in params or params['verbose'] is None:\n setattr(obj, 'verbose', False)\n else:\n setattr(obj, 'verbose', True)\n\n return obj", "def _build_param_dict(self):\n # Add parameter handlers to parameter dict. \n self._param_dict.add(SBE37Parameter.OUTPUTSAL,\n r'(do not )?output salinity with each sample',\n lambda match : False if match.group(1) else True,\n self._true_false_to_string)\n self._param_dict.add(SBE37Parameter.OUTPUTSV,\n r'(do not )?output sound velocity with each sample',\n lambda match : False if match.group(1) else True,\n self._true_false_to_string)\n self._param_dict.add(SBE37Parameter.NAVG,\n r'number of samples to average = (\\d+)',\n lambda match : int(match.group(1)),\n self._int_to_string)\n self._param_dict.add(SBE37Parameter.SAMPLENUM,\n r'samplenumber = (\\d+), free = \\d+',\n lambda match : int(match.group(1)),\n self._int_to_string)\n self._param_dict.add(SBE37Parameter.INTERVAL,\n r'sample interval = (\\d+) seconds',\n lambda match : int(match.group(1)),\n self._int_to_string)\n self._param_dict.add(SBE37Parameter.STORETIME,\n r'(do not )?store time with each sample',\n lambda match : False if match.group(1) else True,\n self._true_false_to_string)\n self._param_dict.add(SBE37Parameter.TXREALTIME,\n r'(do not )?transmit real-time data',\n lambda match : False if match.group(1) else True,\n self._true_false_to_string)\n self._param_dict.add(SBE37Parameter.SYNCMODE,\n r'serial sync mode (enabled|disabled)',\n lambda match : False if (match.group(1)=='disabled') else True,\n self._true_false_to_string)\n self._param_dict.add(SBE37Parameter.SYNCWAIT,\n r'wait time after serial sync sampling = (\\d+) seconds',\n lambda match : int(match.group(1)),\n self._int_to_string)\n self._param_dict.add(SBE37Parameter.TCALDATE,\n r'temperature: +((\\d+)-([a-zA-Z]+)-(\\d+))',\n lambda match : self._string_to_date(match.group(1), '%d-%b-%y'),\n self._date_to_string)\n self._param_dict.add(SBE37Parameter.TA0,\n r' +TA0 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.TA1,\n r' +TA1 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.TA2,\n r' +TA2 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.TA3,\n r' +TA3 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.CCALDATE,\n r'conductivity: +((\\d+)-([a-zA-Z]+)-(\\d+))',\n lambda match : self._string_to_date(match.group(1), '%d-%b-%y'),\n self._date_to_string)\n self._param_dict.add(SBE37Parameter.CG,\n r' +G = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.CH,\n r' +H = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.CI,\n r' +I = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.CJ,\n r' +J = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.WBOTC,\n r' +WBOTC = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.CTCOR,\n r' +CTCOR = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.CPCOR,\n r' +CPCOR = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PCALDATE,\n r'pressure .+ ((\\d+)-([a-zA-Z]+)-(\\d+))',\n lambda match : self._string_to_date(match.group(1), '%d-%b-%y'),\n self._date_to_string)\n self._param_dict.add(SBE37Parameter.PA0,\n r' +PA0 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PA1,\n r' +PA1 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PA2,\n r' +PA2 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PTCA0,\n r' +PTCA0 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PTCA1,\n r' +PTCA1 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PTCA2,\n r' +PTCA2 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PTCB0,\n r' +PTCSB0 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PTCB1,\n r' +PTCSB1 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.PTCB2,\n r' +PTCSB2 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.POFFSET,\n r' +POFFSET = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.RCALDATE,\n r'rtc: +((\\d+)-([a-zA-Z]+)-(\\d+))',\n lambda match : self._string_to_date(match.group(1), '%d-%b-%y'),\n self._date_to_string)\n self._param_dict.add(SBE37Parameter.RTCA0,\n r' +RTCA0 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.RTCA1,\n r' +RTCA1 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)\n self._param_dict.add(SBE37Parameter.RTCA2,\n r' +RTCA2 = (-?\\d.\\d\\d\\d\\d\\d\\de[-+]\\d\\d)',\n lambda match : float(match.group(1)),\n self._float_to_string)", "def prior_param(self, param_dict={}): \n self.param_obj = Params(param_dict) # parameter object \n self.param_names = param_dict.keys() \n self.n_params = len(param_dict.keys()) # number of parameters in theta ", "def setParams(self, p = 2):\n self.p = p\n self.l = p - 1\n self.id_ntot = {}\n self.id_y = {}\n self.id_W = {}\n self.id_X = {}\n for i in self.uniids:\n tracker = (self.data['id'] == i)\n self.id_ntot.update({i: np.sum(tracker)})\n self.id_y.update({i:\n self.data['weight'][tracker].reshape(np.sum(tracker), 1)})\n self.id_W.update({i: self._designMatrix_(p, tracker)})\n self.id_X.update({i:\n self._designMatrix_(self.l+1,tracker,is_X=True)})\n self.id_Z = self.id_W.copy()", "def set_rand_params(self) -> Dict:\n new_params: Dict = self.gen_params()\n self.set_params(new_params)\n return new_params", "def get_prob_params():\n prob = Namespace()\n prob.study_name = STUDY_NAME\n if IS_DEBUG:\n prob.num_trials = 3\n prob.max_capital = 10\n else:\n prob.num_trials = NUM_TRIALS\n prob.max_capital = MAX_CAPITAL\n # Common\n prob.time_distro = TIME_DISTRO\n prob.num_workers = NUM_WORKERS\n _study_params = {\n 'branin': ('synthetic/branin/config_mf.json',\n branin_mf, cost_branin_mf, 0.1, 0, 1),\n 'hartmann3_2': ('synthetic/hartmann3_2/config_mf.json',\n hartmann3_2_mf, cost_hartmann3_2_mf, 0.1, 0, 1),\n 'hartmann6_4': ('synthetic/hartmann6_4/config_mf.json',\n hartmann6_4_mf, cost_hartmann6_4_mf, 0.1, 0, 1),\n 'borehole_6': ('synthetic/borehole_6/config_mf.json',\n borehole_6_mf, cost_borehole_6_mf, 1, 0, 1),\n 'park2_4': ('synthetic/park2_4/config_mf.json',\n park2_4_mf, cost_park2_4_mf, 0.3, 0, 1),\n 'park2_3': ('synthetic/park2_3/config_mf.json',\n park2_3_mf, cost_park2_3_mf, 0.1, 0, 1),\n 'park1_3': ('synthetic/park1_3/config_mf.json',\n park1_3_mf, cost_park1_3_mf, 0.5, 0, 1),\n }\n (domain_config_file_suffix, raw_func, raw_fidel_cost_func, _fc_noise_scale,\n _initial_pool_size, _) = _study_params[prob.study_name]\n domain_config_file = os.path.join(DRAGONFLY_EXPERIMENTS_DIR, domain_config_file_suffix)\n # noisy\n prob.noisy_evals = NOISY_EVALS\n if NOISY_EVALS:\n noise_type = 'gauss'\n noise_scale = _fc_noise_scale\n else:\n noise_type = 'no_noise'\n noise_scale = None\n # Create domain, function_caller and worker_manager\n config = load_config_file(domain_config_file)\n func_caller = get_multifunction_caller_from_config(raw_func, config,\n raw_fidel_cost_func=raw_fidel_cost_func, noise_type=noise_type,\n noise_scale=noise_scale)\n # Set max_capital\n if hasattr(func_caller, 'fidel_cost_func'):\n prob.max_capital = prob.max_capital * \\\n func_caller.fidel_cost_func(func_caller.fidel_to_opt)\n else:\n prob.max_capital = prob.max_capital\n # Store everything in prob\n prob.func_caller = func_caller\n prob.worker_manager = SyntheticWorkerManager(prob.num_workers,\n time_distro='caller_eval_cost')\n prob.save_file_prefix = prob.study_name + ('-debug' if IS_DEBUG else '')\n prob.methods = METHODS\n prob.save_results_dir = SAVE_RESULTS_DIR\n prob.reporter = get_reporter('default')\n # evaluation options\n prob.evaluation_options = Namespace(prev_eval_points='none',\n initial_pool_size=_initial_pool_size)\n return prob", "def get_param_dict(self, theta):\r\n # create dictionary setting parameters in the feature map to a value corresponding to the random samples\r\n vf_param = list(self.circuit.parameters)\r\n zip_it = zip(vf_param, theta)\r\n param_dict = dict(zip_it)\r\n\r\n return param_dict", "def create_param_grid ( param_grid: Dict ):\n \n return (\n dict ( zip ( param_grid.keys(), instance ) )\n for instance in product ( * param_grid.values() )\n ) # End create_param_grid", "def create_param_grid ( param_grid: Dict ):\n \n return (\n dict ( zip ( param_grid.keys(), instance ) )\n for instance in product ( * param_grid.values() )\n ) # End create_param_grid", "def __init__(self, input_size, hidden_size, output_size, std=1e-4):\n self.params = {}\n self.params['W1'] = std * np.random.randn(input_size, hidden_size)\n self.params['b1'] = np.zeros(hidden_size)\n self.params['W2'] = std * np.random.randn(hidden_size, output_size)\n self.params['b2'] = np.zeros(output_size)", "def glcmProps(P, prop='contrast'):\n\n (num_level, num_level2, num_dist, num_angle) = P.shape\n assert num_level == num_level2\n assert num_dist > 0\n assert num_angle > 0\n\n # create weights for specified property\n I, J = np.ogrid[0:num_level, 0:num_level]\n if prop == 'contrast':\n weights = (I - J) ** 2\n elif prop in ['ASM', 'energy', 'correlation']:\n pass\n elif prop == 'mean':\n weights, _ = np.mgrid[0:num_level, 0:num_level]\n elif prop == 'dissimilarity':\n weights = np.abs(I - J)\n elif prop == 'homogeneity':\n weights = 1. / (1. + (I - J) ** 2)\n else:\n raise ValueError('%s is an invalid property' % (prop))\n\n # compute property for each GLCM\n if prop == 'energy':\n asm = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]\n results = np.sqrt(asm)\n elif prop == 'ASM':\n results = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]\n elif prop == 'correlation':\n results = np.zeros((num_dist, num_angle), dtype=np.float64)\n I = np.array(range(num_level)).reshape((num_level, 1, 1, 1))\n J = np.array(range(num_level)).reshape((1, num_level, 1, 1))\n diff_i = I - np.apply_over_axes(np.sum, (I * P), axes=(0, 1))[0, 0]\n diff_j = J - np.apply_over_axes(np.sum, (J * P), axes=(0, 1))[0, 0]\n\n std_i = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_i) ** 2),\n axes=(0, 1))[0, 0])\n std_j = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_j) ** 2),\n axes=(0, 1))[0, 0])\n cov = np.apply_over_axes(np.sum, (P * (diff_i * diff_j)),\n axes=(0, 1))[0, 0]\n\n # handle the special case of standard deviations near zero\n mask_0 = std_i < 1e-15\n mask_0[std_j < 1e-15] = True\n results[mask_0] = 1\n\n # handle the standard case\n mask_1 = mask_0 == False\n results[mask_1] = cov[mask_1] / (std_i[mask_1] * std_j[mask_1])\n elif prop in ['contrast', 'dissimilarity', 'homogeneity', 'mean']:\n weights = weights.reshape((num_level, num_level, 1, 1))\n results = np.apply_over_axes(np.sum, (P * weights), axes=(0, 1))[0, 0]\n\n return results", "def init_P_PHM_GIVEN_PHI():\n global P_PHM_GIVEN_PHI\n for i in INTERFACE_LEVEL_ACTIONS: # ui\n P_PHM_GIVEN_PHI[i] = collections.OrderedDict()\n for j in INTERFACE_LEVEL_ACTIONS: # um\n if i == j:\n # try to weight the true command more for realistic purposes. Can be offset by using a high UM_GIVEN_UI_NOISE\n P_PHM_GIVEN_PHI[i][j] = 1.0\n else:\n # P_PHM_GIVEN_PHI[i][j] = np.random.random()*UM_GIVEN_UI_NOISE#IF UM_GIVEN_UI_NOISE is 0, then the p(um|ui) is a deterministic mapping\n P_PHM_GIVEN_PHI[i][j] = 0.0\n\n delta_dist = np.array(P_PHM_GIVEN_PHI[i].values())\n uniform_dist = (1.0 / len(INTERFACE_LEVEL_ACTIONS)) * np.ones(len(INTERFACE_LEVEL_ACTIONS))\n blended_dist = (1 - PHM_GIVEN_PHI_NOISE) * delta_dist + PHM_GIVEN_PHI_NOISE * uniform_dist # np.array\n for index, j in enumerate(INTERFACE_LEVEL_ACTIONS):\n P_PHM_GIVEN_PHI[i][j] = blended_dist[index]", "def get_parameters(self):\n # Get the parameters from the parent class\n params = super(LPEvalSplit, self).get_parameters()\n\n # Add the LP specific parameters\n params.update({\"owa\": self._owa, \"fe_ratio\": self._fe_ratio})\n return params", "def get_params(self) -> Dict:\n params: Dict = {}\n params['g_leak']: float = self.leak.get_g()\n params['g_nav']: float = self.nav.get_g()\n params['g_kvhh']: float = self.kvhh.get_g()\n params['g_kva']: float = self.kva.get_g()\n params['g_kvsi']: float = self.kvsi.get_g()\n params['g_cav']: float = self.cav.get_g()\n params['g_kca']: float = self.kca.get_g()\n params['g_nap']: float = self.nap.get_g()\n params['g_kir']: float = self.kir.get_g()\n params['g_ampar']: float = self.ampar.get_g()\n params['g.nmdar']: float = self.nmdar.get_g()\n params['g_gabar']: float = self.gabar.get_g()\n params['t_Ca']: float = self.tau_ca\n return params", "def update_parameters(self):\n self.alignment_factor = rospy.get_param('/dyn_reconf/alignment_factor')\n self.cohesion_factor = rospy.get_param('/dyn_reconf/cohesion_factor')\n self.separation_factor = rospy.get_param('/dyn_reconf/separation_factor')\n self.avoid_factor = rospy.get_param('/dyn_reconf/avoid_factor')\n self.max_speed = rospy.get_param('/dyn_reconf/max_speed')\n self.max_force = rospy.get_param('/dyn_reconf/max_force')\n self.friction = rospy.get_param('/dyn_reconf/friction')\n self.crowd_radius = rospy.get_param('/dyn_reconf/crowd_radius')\n self.search_radius = rospy.get_param('/dyn_reconf/search_radius')\n\n rospy.loginfo(rospy.get_caller_id() + \" -> Parameters updated\")\n if DEBUG:\n print('alignment_factor: ', self.alignment_factor)\n print('cohesion_factor: ', self.cohesion_factor)\n print('separation_factor: ', self.separation_factor)\n print('avoid_factor: ', self.avoid_factor)\n print('max_speed: ', self.max_speed)\n print('max_force: ', self.max_force)\n print('friction: ', self.friction)\n print('crowd_radius: ', self.crowd_radius)\n print('search_radius: ', self.search_radius)", "def random_parameters():\n res = dict()\n res[\"population_size\"] = random.randrange(2, 21)\n res[\"mutation_prob\"] = random.choice([0.02, 0.05, 0.10, 0.20, 0.30, 0.40, 0.50])\n res[\"crossover\"] = random.choice([True, False])\n res[\"selection\"] = random.choice([True, False])\n res[\"sigma\"] = random.choice([0.1, 0.25, 0.5, 1])\n res[\"crossover_method\"] = random.choice([\"single_swap\", \"uniform_swap\", \"arithmetic\"])\n res[\"selection_method\"] = random.choice([\"truncated\", \"fitness_based\", \"rank_based\"])\n res[\"best_rate\"] = random.choice([0.2, 0.3, 0.5])\n res[\"n_parents\"] = random.choice([2, 3, 4])\n res[\"elitism\"] = random.choice([True, False])\n return res", "def compute_derived_parameters(cls, fdict):\n cgg = fdict['cgd'] + fdict['cgs']\n return dict(\n cgg=cgg,\n cdd=fdict['cgd'] + fdict['cds'],\n vstar=2.0 * (fdict['ids'] / fdict['gm']),\n gain=fdict['gm'] / fdict['gds'],\n ft=fdict['gm'] / (2.0 * np.pi * cgg),\n )", "def set_params(self):\n \n lo, hi = self.R.get((self.h, self.w, self.m), (0.0, 0.0))\n params.update({\n 'gamma' : 1.0, # minesweeper is a finite horizon game\n 'epsilon': 0.0,\n 'K': 16,\n 'R_lo': lo,\n 'R_hi': hi,\n 'max_depth': self.h * self.w / 2,\n 'c':hi-lo\n })", "def genParametersOldFormat(self, **kwargs):\n\n # if, for some reason, you don't want to be changign the new dict\n newDict = dict(kwargs)\n # One big change was only saving statistical information of the FEL pulses\n # etc. Caclulate that information and update the dict.\n if isinstance(kwargs.get(\"fieldStrength\", {}), list):\n stats = [\"kurtosis\", \"mean\", \"skew\", \"std\"]\n sets = [\"fieldStrength\", \"fieldInt\", \"cdRatios\", \"fpTime\", \"pyroVoltage\"]\n newDict[\"fel_pulses\"] = sum(kwargs[\"fel_pulses\"])\n\n newDict.update(\n {set: {stat: np.mean(\n [x.get(stat, '-1') for x in kwargs[set]] )\n for stat in stats}\n for set in sets}\n )\n\n return self.genParameters(**newDict)", "def getInitParams(self):\n paramDict = super().getInitParams()\n paramDict['method'] = self.method\n paramDict['dimension'] = self.dimension\n paramDict['rank'] = self.rank\n paramDict['mu'] = self.mu\n paramDict['covariance'] = self.covariance\n return paramDict", "def compute_derived_parameters(cls, fdict):\n cgg = fdict['cgd'] + fdict['cgs'] + fdict['cgb']\n return dict(\n cgg=cgg,\n cdd=fdict['cgd'] + fdict['cds'] + fdict['cdb'],\n css=fdict['cgs'] + fdict['cds'] + fdict['csb'],\n cbb=fdict['cgb'] + fdict['cdb'] + fdict['csb'],\n vstar=2.0 * (fdict['ids'] / fdict['gm']),\n gain=fdict['gm'] / fdict['gds'],\n ft=fdict['gm'] / (2.0 * np.pi * cgg),\n )", "def set_params_data(self):\n for key in self.params:\n self.params_data[key] = {}\n self.params_data[key]['x'] = [i[0] for i in self.rand_points]\n self.params_data[key]['y'] = [i[1] for i in self.rand_points]\n self.params_data[key]['z'] = self.generate_random_data(\n min_=self.params[key]['min'],\n max_=self.params[key]['max'],\n len_=len(self.rand_points)\n )\n return self.params_data", "def resampleParams(self, caliStep, iterNO=-1):\n names = self.getNames()\n smcSamples = self.smcSamples[iterNO]\n numSamples = self.numSamples\n numThreads = self.threads if self.threads else cpu_count()\n # posterior probability at caliStep is used as the proposal distribution\n proposal = self.posterior[:, caliStep]\n newSmcSamples, newparamsFile, gmm, maxNumComponents = \\\n resampledParamsTable(keys=names, smcSamples=smcSamples, proposal=proposal, num=numSamples,\n threads=numThreads,\n maxNumComponents=self.__maxNumComponents, priorWeight=self.__priorWeight,\n covType=self.__covType,\n tableName='smcTable%i.txt' % (iterNO + 1))\n self.smcSamples.append(newSmcSamples)\n self.paramsFiles.append(newparamsFile)\n return gmm, maxNumComponents", "def __iter__(self):\n for p in self.param_grid:\n # Always sort the keys of a dictionary, for reproducibility\n modstr = '%s__' % self.modality\n items = sorted([(k.replace('clf__'+modstr, ''), v) for k, v in p.items() if modstr in k])\n if not items:\n yield {}\n else:\n keys, values = zip(*items)\n for v in product(*values):\n params = dict(zip(keys, v))\n yield params", "def construct_param_dict(params,K_RC,K_CP,m_P):\n ###scaling constants\n w=params['w']\n pd=params['pd'] # in 3D and 0.21 in 2D\n pv=params['pv']\n Er=params['Er'] ;Ek=params['Ek']\n ER=params['ER'];EC=params['EC'];EP=params['EP'];\n Eq1=params['Eq1'];Eq2=params['Eq2']\n\n\n #capture success function\n a = params['a']\n b = params['b']\n c = params['c']\n formC = params['formC']\n formPC = params['formPC']\n formPR = params['formPR']\n \n ###variables\n TR= params['TR'] ;TC= params['TC'];TP=params['TP'];D_R= params['D_R']; D_C= params['D_C']\n K_RP=K_RC*K_CP\n fmC=params['fmC'];thermyR=params['thermyR']\n thermyC=params['thermyC'];thermyP=params['thermyP']\n fmPR=params['fmPR']\n fmPC=params['fmPC']\n m_C = K_CP*m_P;m_R = K_RP*m_P\n ###normalization constants and boltzmann constant\n r0 = params['r0']\n k0 = params['k0'] # will depend on the productivity of the habitat\n a01 = a02 = params['a012'] # will depedend on the dimension of the habitat \n a03 = params['a03']\n d0= params['d0']\n q10 = params['q10'];q20 = params['q20'];\n v0R = params['v0R'];v0C =params['v0C'];v0P =params['v0P'];k = b_k\n hC0 = params['hC0'];hP0 = params['hP0'] \n \n #intrapopulation parameters\n q1=set_q1(q10,m_C,w,Eq1,TR,k)\n q2=set_q2(q20,m_P,w,Eq2,TC,k)\n K=set_K(k0,m_R,w,Ek,TR,k)\n r=set_r(r0,m_R,w,Er,TR,k)\n\n #interpopulation parameters\n a1=set_alfa(m_C,a01,K_RC,pv,pd,TR,TC,ER,EC,D_R,v0R,v0C,g,alfa,fmC,thermyR,thermyC,k,a,b,c,formC)\n a2=set_alfa(m_P,a02,K_RP,pv,pd,TR,TP,ER,EP,D_R,v0R,v0P,g,alfa,fmPR,thermyR,thermyP,k,a,b,c,formPR)\n a3=set_alfa(m_P,a03,K_CP,pv,pd,TC,TP,EC,EP,D_C,v0C,v0P,g,alfa,fmPC,thermyC,thermyP,k,a,b,c,formPC)\n\n t_hp = set_th(hP0,m_P,w,EP,k,TP)\n t_hc = set_th(hC0,m_C,w,EC,k,TC)\n param_dict={'q1':q1,'q2':q2,'K':K,'r':r,'a1':a1,'a2':a2,'a3':a3,'t_hp':t_hp,'t_hc':t_hc}\n \n return param_dict", "def _build_space(self, param_grid):\n if self.verbose>9:\n 'Building param space...'\n \n _warnings.filterwarnings('ignore')\n \n param_grid = param_grid.copy()\n space = {}\n for key in param_grid.keys():\n params = param_grid[key]\n \n if self.verbose>9:\n print('\\tinput:',key, params)\n \n type_str = str(type(params[0]))\n\n if 'float' in type_str or 'int' in type_str:\n \n min_ = min(params)\n max_ = max(params)\n log10_min_ = _np.log10(min_)\n log10_max_ = _np.log10(max_)\n\n if round(log10_max_)-round(log10_min_)>1 and round(log10_max_)-round(log10_min_)!=_np.inf: # use uniform distribution on log spacing \n \n space['log10.'+key] = _hyperopt.hp.uniform(key, log10_min_, log10_max_)\n \n if self.verbose>9:\n print('\\toutput:','log10.'+key, 'uniform', log10_min_, log10_max_)\n \n else:\n if 'int' in type_str:\n space[key] = _hyperopt.hp.quniform(key, min_, max_, 1)\n \n if self.verbose>9:\n print('\\toutput:',key, 'quniform', min_, max_)\n \n elif 'float' in type_str:\n space[key] = _hyperopt.hp.uniform(key, min_, max_)\n \n if self.verbose>9:\n print('\\toutput:',key, 'uniform', min_, max_)\n \n \n elif 'str' in type_str:\n space[key] = _hyperopt.hp.choice(key, [i for i in range(len(params))])\n \n if self.verbose>9:\n print('\\toutput:',key, 'choice', [i for i in range(len(params))])\n\n else:\n raise Exception('type(params[0]) is '+type_str+'. This type of hyperparameter is not yet supported.')\n\n assert(len(space.keys())==len(param_grid.keys())), 'len(space.keys())='+str(len(space.keys()))+', which is not equal to len(param_grid.keys())='+str(len(param_grid.keys()))\n \n if self.verbose>9:\n print('...finished building space')\n \n _warnings.filterwarnings('default')\n\n return space", "def _get_guide_priors(self):\n if not self._alpha_guide_prior_params:\n # create initial parameters\n params = self._get_prior_params()\n # register all parameters in pyro\n for p, v in iteritems(params):\n pyro.param(p, v)\n self._alpha_guide_prior_params = dict(\n self._param_store.named_parameters()\n )\n else:\n # register all parameters in pyro\n for p, v in iteritems(self._alpha_guide_prior_params):\n pyro.param(p, v)\n return self._params2probs(self._alpha_guide_prior_params)", "def get_params(self):\n return {'classifier': self.classifier,\n 'grid_param': self.grid_param,\n 'n_param_comb': self.n_param_comb,\n 'top_bagging': self.bagging,\n 'bagging_param': self.bagging_param,\n 'comb_seed': self.comb_seed}", "def _sample_hyperparameters(self):\n\t\tconfig = {}\n\t\tfor attr, option in self._config_options.items():\n\t\t\tprint('Sampling', attr)\n\t\t\tconfig[attr] = option.sample()\n\t\treturn config", "def __init__(self, parameter_dictionary):\n super().__init__(parameter_dictionary)\n\n self.model_string = \"gauss\"\n model_dictionary = self._get_model_dict(__class__.default_parameters)\n\n # wake expansion parameters\n self.ka = model_dictionary[\"ka\"]\n self.kb = model_dictionary[\"kb\"]\n\n # near wake / far wake boundary parameters\n self.alpha = model_dictionary[\"alpha\"]\n self.beta = model_dictionary[\"beta\"]\n\n # GCH Parameters\n self.calculate_VW_velocities = model_dictionary[\"calculate_VW_velocities\"]\n self.use_yaw_added_recovery = model_dictionary[\"use_yaw_added_recovery\"]\n self.eps_gain = model_dictionary[\"eps_gain\"]", "def setup_parameters(self):\n structure = self.ctx.structure_initial_primitive\n ecutwfc = []\n ecutrho = []\n\n for kind in structure.get_kind_names():\n try:\n dual = self.ctx.protocol['pseudo_data'][kind]['dual']\n cutoff = self.ctx.protocol['pseudo_data'][kind]['cutoff']\n cutrho = dual * cutoff\n ecutwfc.append(cutoff)\n ecutrho.append(cutrho)\n except KeyError as exception:\n self.abort_nowait('failed to retrieve the cutoff or dual factor for {}'.format(kind))\n\n natoms = len(structure.sites)\n conv_thr = self.ctx.protocol['convergence_threshold'] * natoms\n\n self.ctx.inputs['parameters'] = {\n 'CONTROL': {\n 'restart_mode': 'from_scratch',\n 'tstress': self.ctx.protocol['tstress'],\n },\n 'SYSTEM': {\n 'ecutwfc': max(ecutwfc),\n 'ecutrho': max(ecutrho),\n 'smearing': self.ctx.protocol['smearing'],\n 'degauss': self.ctx.protocol['degauss'],\n 'occupations': self.ctx.protocol['occupations'],\n },\n 'ELECTRONS': {\n 'conv_thr': conv_thr,\n }\n }", "def _compute(self, global_step, params, batch_loss):\n individual_losses = get_individual_losses(global_step)\n individual_gradients = autograd_individual_gradients(individual_losses, params)\n layerwise = [\n self._compute_histogram(p, igrad)\n for p, igrad in zip(params, individual_gradients)\n ]\n\n hist = sum(out[0] for out in layerwise)\n edges = layerwise[0][1]\n\n result = {\"hist\": hist, \"edges\": edges}\n\n if self._keep_individual:\n result[\"param_groups\"] = len(params)\n\n for idx, (hist, edges) in enumerate(layerwise):\n result[f\"param_{idx}\"] = {\"hist\": hist, \"edges\": edges}\n\n return result", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['p'] = self.p\n return paramDict", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['p'] = self.p\n return paramDict", "def load_hyperparams():\n #Load halo data (encoding='latin1' for Python3)\n with open('../Data/halo_data.pkl', 'rb') as halo_input:\n halo_data = pickle.load(halo_input, encoding='latin1')\n\n #Load interpolator\n with open('../Data/interpolator.pkl', 'rb') as interp:\n vpeak_Mr_interp = pickle.load(interp, encoding='latin1')\n\n #Cosmological params\n cosmo_params = {}\n cosmo_params['omega_b'] = 0.0 \n cosmo_params['omega_m'] = 0.286\n cosmo_params['h'] = 0.7\n\n #hyperparameters\n hparams = {}\n hparams['mpeak_cut'] = 10**7\n hparams['vpeak_cut'] = 10.\n hparams['vmax_cut'] = 9.\n hparams['orphan_radii_cut'] = 300.\n hparams['chi'] = 1.\n hparams['R0'] = 10.0\n hparams['gamma_r'] = 0.0\n hparams['beta'] = 0.\n hparams['O'] = 1.\n hparams['n_realizations'] = 5\n\n #prior hyperparameters\n prior_hparams = {}\n prior_hparams['alpha'] = np.array([-2.,-1.1])\n prior_hparams['sigma_M'] = np.array([0.,2.])\n prior_hparams['M50'] = np.array([7.35,10.85])\n prior_hparams['sigma_mpeak'] = np.array([1e-5,1.])\n prior_hparams['B'] = np.array([1e-5,3.])\n prior_hparams['A'] = np.array([10.,500.])\n prior_hparams['sigma_r'] = np.array([1e-5,2.])\n prior_hparams['n'] = np.array([0.,2.])\n prior_hparams['Mhm'] = np.array([5.,9.])\n\n #Orphan hyperparameters\n orphan_params = {}\n orphan_params['eps'] = 0.01 \n orphan_params['df'] = 1\n\n #Simulation and LMC indices\n sim_indices = {}\n sim_indices['host'] = [0,1]\n sim_indices['LMC'] = [0,0]\n\n return hparams, prior_hparams, cosmo_params, orphan_params, halo_data, sim_indices, vpeak_Mr_interp", "def build_param_and_data_dict(self, s_gen, xr, yr, r):\n # Note it is important to create a new dictionary here so that\n # we reset the data dict after generating new data\n self.data = {\n 'DT': self.dt,\n 'motion_prior': self.motion_prior,\n 'motion_gen': self.motion_gen,\n 'ds': self.ds,\n 'de': self.de,\n 'L0': self.l0,\n 'L1': self.l1,\n 'GAMMA': self.gamma,\n 'lamb': self.lamb,\n 'fista_c': self.fista_c,\n 'D': self.tc.t_D.get_value(),\n 'N_L': self.n_l,\n 'N_T': self.n_t,\n 'L_I': self.l_i,\n 'L_N': self.l_n,\n 'N_g_itr': self.n_g_itr,\n 'N_itr': self.n_itr,\n 'N_P': self.n_p,\n 'XS': self.tc.t_XS.get_value(),\n 'YS': self.tc.t_YS.get_value(),\n 'XE': self.tc.t_XE.get_value(),\n 'YE': self.tc.t_YE.get_value(),\n 'Var': self.tc.t_Var.get_value(),\n 'G': self.tc.t_G.get_value(),\n 'tau': self.tau,\n 'XR': xr, 'YR': yr,\n 'IE': self.tc.t_IE.get_value(),\n 'S_gen': s_gen,\n 'S_gen_name': self.s_gen_name,\n 'R': r,\n 'Ips': self.Ips,\n 'FP': self.FP,\n 'quad_reg': self.quad_reg,\n 'quad_reg_mean': self.quad_reg_mean,\n 'drop_prob': self.drop_prob,\n 's_range': self.s_range,\n }", "def _setup_from_parameters(self,params):\n\n # SHOULD WE CHECK HERE THAT INPUT PARAMETERS HAVE SAME KP / Z_STAR ?\n\n # copy input dictionary\n self.linP_params=params.copy()\n\n # will add polynomial describing the log power, around kp_kms\n linP_kms_2=0.5*params['alpha_star']\n linP_kms_1=params['n_star']\n A_star=(2*np.pi**2)*params['Delta2_star']/self.kp_kms**3\n linP_kms_0=np.log(A_star)\n linP_kms = np.poly1d([linP_kms_2,linP_kms_1,linP_kms_0])\n # why are we storing this poly1d object? When do we actually use it?\n self.linP_params['linP_kms']=linP_kms", "def sample(self):\n # Initialize nested dictionary of kwargs\n kwargs = Dict()\n\n # Realize samples\n for comp, param_name in self.params_to_realize:\n hyperparams = getattr(self, comp)[param_name].copy()\n kwargs[comp][param_name] = self.sample_param(hyperparams)\n\n # Convert any q, phi into e1, e2 as required by lenstronomy\n for comp in self.comps_qphi_to_e1e2: # e.g. 'lens_mass'\n q = kwargs[comp].pop('q')\n phi = kwargs[comp].pop('phi')\n e1, e2 = param_util.phi_q2_ellipticity(phi, q)\n kwargs[comp]['e1'] = e1\n kwargs[comp]['e2'] = e2\n\n # Source pos is defined wrt the lens pos\n kwargs['src_light']['center_x'] += kwargs['lens_mass']['center_x']\n kwargs['src_light']['center_y'] += kwargs['lens_mass']['center_y']\n\n # Ext shear is defined wrt the lens center\n kwargs['external_shear']['ra_0'] = kwargs['lens_mass']['center_x']\n kwargs['external_shear']['dec_0'] = kwargs['lens_mass']['center_y']\n \n if 'lens_light' in self.components:\n # Lens light shares center with lens mass\n kwargs['lens_light']['center_x'] = kwargs['lens_mass']['center_x']\n kwargs['lens_light']['center_y'] = kwargs['lens_mass']['center_y']\n return kwargs", "def _set_primary_behaviors(self):\n\n for component_model in self.model_dictionary.values():\n gal_type = component_model.gal_type\n feature_name = component_model.feature_name\n\n try:\n component_model_galprop_dtype = component_model._galprop_dtypes_to_allocate\n except AttributeError:\n component_model_galprop_dtype = np.dtype([])\n\n methods_to_inherit = list(set(\n component_model._methods_to_inherit))\n\n for methodname in methods_to_inherit:\n new_method_name = methodname + '_' + gal_type\n new_method_behavior = self._update_param_dict_decorator(\n component_model, methodname)\n setattr(self, new_method_name, new_method_behavior)\n setattr(getattr(self, new_method_name), \n '_galprop_dtypes_to_allocate', component_model_galprop_dtype)\n setattr(getattr(self, new_method_name), 'gal_type', gal_type)\n setattr(getattr(self, new_method_name), 'feature_name', feature_name)\n\n attrs_to_inherit = list(set(\n component_model._attrs_to_inherit))\n for attrname in attrs_to_inherit:\n new_attr_name = attrname + '_' + gal_type\n attr = getattr(component_model, attrname)\n setattr(self, new_attr_name, attr)\n\n # Repeatedly overwrite self.threshold \n # This is harmless provided that all gal_types are ensured to have the same threshold, \n # which is guaranteed by the _test_dictionary_consistency method\n if hasattr(component_model, 'threshold'):\n setattr(self, 'threshold_' + gal_type, component_model.threshold)\n self.threshold = getattr(self, 'threshold_' + gal_type)", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['apex' ] = self.apex\n paramDict['min' ] = self.min\n paramDict['max' ] = self.max\n return paramDict", "def getInitParams(self):\n paramDict = super().getInitParams()\n paramDict['p'] = self.p\n return paramDict", "def paramDetails(cls):\n return {\n 'dim': (10, 20, 2, 20),\n 'nIter': (1, 10, 2, 5),\n 'lamb': (.1, 1., .1, .05),\n 'alph': (30, 50, 5, 40)\n }", "def __init__(self, input_size, hidden_size, output_size, weight_init_std=0.01):\n\n self.params = {}\n self.params['W1'] = weight_init_std * \\\n np.random.randn(input_size, hidden_size)\n self.params['b1'] = np.zeros(hidden_size)\n self.params['W2'] = weight_init_std * \\\n np.random.randn(hidden_size, output_size)\n self.params['b2'] = np.zeros(output_size)", "def enrich_params(self):\n\n self.params['nmaps'] = len(self.params['probes']) + np.sum(self.params['spins'] == 2)\n\n pass", "def get_parameters(self):\n # Get the parameters from the parent class\n params = super(NREvalSplit, self).get_parameters()\n\n # Add the LP specific parameters\n params.update({\"samp_frac\": self._samp_frac})\n return params", "def _set_model_parameters(self, verbose=False):\n from scipy.special import gamma\n\n z0 = self.z0\n\n # set parameters that are constants\n p_v, d_v, cs0, sigma, vout0 = (1, 2, 6.7, 0.1, 25.0)\n p_vB, d_vB, Mach0, p_M, d_M = (4, 2, 0.5, 1, 3)\n\n # calculate amplitudes that make the pdf integrate to 1\n A_v = np.log(10)*p_v/gamma(d_v/p_v)\n A_cs = np.log(10)/np.sqrt(2*np.pi)/sigma\n A_vB = np.log(10)*p_vB/gamma(d_vB/p_vB)\n A_M = np.log(10)*p_M/gamma(d_M/p_M)\n\n # store them in dictionaries\n self.cool_params = dict(A_v=A_v, p_v=p_v, d_v=d_v,\n A_cs=A_cs, cs0=cs0, sigma=sigma, vout0=vout0)\n self.hot_params = dict(A_vB=A_vB, p_vB=p_vB, d_vB=d_vB,\n A_M=A_M, Mach0=Mach0,p_M=p_M,d_M=d_M)\n # SN related parameters that set the reference values for loading factors\n self.params = dict(Esn=1.e51*au.erg, mstar=95.5*au.M_sun, vcool=200*au.km/au.s,\n Mej=10.*au.M_sun, ZSN=0.2, ZISM0=0.02)\n self.params['vej'] = np.sqrt(2.0*self.params['Esn']/self.params['Mej']).to('km/s')\n self.ref_params = dict(Mref=self.params['mstar'],\n pref=self.params['Esn']/(2*self.params['vcool']),\n Eref=self.params['Esn'],\n Zref=self.params['Mej']*self.params['ZSN'])\n\n # coefficients used in conversion from mass to other PDFs\n self.vp = (self.ref_params['pref']/self.params['mstar']).to('km/s').value\n self.vE = np.sqrt(self.ref_params['Eref']/self.params['mstar']).to('km/s').value\n self.Ze = (self.ref_params['Zref']/self.params['mstar']).cgs.value\n\n # parameters for scaling relations from Paper~I\n a = np.array(fit_alpha[z0])\n b = np.array(fit_beta[z0])\n\n self.scaling_params = dict(a=a, b=b)\n if z0 == '2H':\n self.cool_params['vout0'] = 45\n self.cool_params['cs0'] = 7.5\n elif z0 == '500':\n self.cool_params['vout0'] = 45\n self.cool_params['cs0'] = 8.5\n elif z0 == '1000':\n self.cool_params['vout0'] = 60\n self.cool_params['cs0'] = 10.0\n self.scaling_params['A'] = np.round(10.**(np.array(self.scaling_params['a'])),2)\n self.scaling_params['p'] = 1.+np.array(self.scaling_params['b'])\n self.enum=dict(M_cool=0, M_int=1, M_hot=2, M_total=3,\n p_cool=4, p_int=5, p_hot=6, p_total=7,\n E_cool=8, E_int=9, E_hot=10, E_total=11,\n Z_cool=12, Z_int=13, Z_hot=14, Z_total=15)\n\n # print parameters\n if verbose:\n self.show_parameters()", "def _build_param_dict(self):\n # The parameter dictionary.\n self._param_dict = ProtocolParameterDict()\n\n # Add parameter handlers to parameter dictionary for instrument configuration parameters.\n self._param_dict.add(Parameter.SAMPLE_INTERVAL,\n '', # this is a driver only parameter\n None,\n int,\n type=ParameterDictType.INT,\n startup_param=True,\n display_name='D1000 Sample Periodicity',\n range=(1, 3600),\n description='Periodicity of D1000 temperature sample in autosample mode: (1-3600)',\n default_value=DEFAULT_SAMPLE_RATE,\n units=Units.SECOND,\n visibility=ParameterDictVisibility.READ_WRITE)\n self._add_setup_param(Parameter.CHANNEL_ADDRESS,\n int,\n type=ParameterDictType.INT,\n display_name='Base Channel Address',\n description='Hex value of ASCII character to ID unit, e.g. 31 is the ASCII code for 1:'\n ' (30-31, 41-5A, 61-7A)',\n range=(0x30, 0x7A),\n default_value=0x31)\n self._add_setup_param(Parameter.LINEFEED,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Line Feed Flag',\n range={'True': True, 'False': False},\n description='Enable D1000 to generate a linefeed before and after each response:'\n ' (true | false)',\n default_value=False)\n self._add_setup_param(Parameter.PARITY_TYPE,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Parity Type',\n range={'Odd': True, 'Even': False},\n description='Sets the parity: (true:odd | false:even)',\n default_value=False)\n self._add_setup_param(Parameter.PARITY_ENABLE,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Parity Flag',\n range={'True': True, 'False': False},\n description='Enable use of parity bit, a parity error will be issued if detected:'\n ' (true | false)',\n default_value=False)\n self._add_setup_param(Parameter.EXTENDED_ADDRESSING,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Extended Addressing',\n range={'True': True, 'False': False},\n description='Enable extended addressing: (true | false)',\n default_value=False)\n self._add_setup_param(Parameter.BAUD_RATE,\n int,\n type=ParameterDictType.INT,\n display_name='Baud Rate',\n range={'38400': 0, '19200': 1, '9600': 2, '4800': 3, '2400': 4, '1200': 5, '600': 6,\n '300': 7, '57600': 8},\n description='Using ethernet interface in deployed configuration: (300, 600, '\n '1200, 2400, 4800, 9600, 19200, 38400, 57600)',\n default_value=9600,\n units=Units.BAUD)\n self._add_setup_param(Parameter.ALARM_ENABLE,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Enable Alarms',\n range={'True': True, 'False': False},\n description='Enable alarms to be controlled by the Digital Output (DO) command:'\n ' (true | false)',\n default_value=False)\n self._add_setup_param(Parameter.LOW_ALARM_LATCH,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Low Alarm Latching',\n range={'True': True, 'False': False},\n description='Enable changing the alarm to latching mode: (true | false)',\n default_value=False)\n self._add_setup_param(Parameter.HIGH_ALARM_LATCH,\n bool,\n type=ParameterDictType.BOOL,\n display_name='High Alarm Latching',\n range={'True': True, 'False': False},\n description='Enable changing the alarm to latching mode: (true | false)',\n default_value=False)\n self._add_setup_param(Parameter.RTD_4_WIRE,\n bool,\n type=ParameterDictType.BOOL,\n display_name='4 Wire RTD Flag',\n range={'True': True, 'False': False},\n description='Represents a physical configuration of the instrument, '\n 'disabling may cause data to be misaligned: (true | false)',\n default_value=True)\n self._add_setup_param(Parameter.TEMP_UNITS,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Fahrenheit Flag',\n range={'Fahrenheit': True, 'Celsius': False},\n description='Flag to control the temperature format: (true:Fahrenheit | false:Celsius)',\n default_value=False)\n self._add_setup_param(Parameter.ECHO,\n bool,\n type=ParameterDictType.BOOL,\n display_name='Daisy Chain',\n range={'True': True, 'False': False},\n description='If not set, only 1 out of 3 D1000s will process commands: (true | false)',\n default_value=True)\n self._add_setup_param(Parameter.COMMUNICATION_DELAY,\n int,\n type=ParameterDictType.INT,\n display_name='Communication Delay',\n range=(0, 3),\n description='The number of delays to add when processing commands: (0-3)',\n default_value=0)\n self._add_setup_param(Parameter.PRECISION,\n int,\n type=ParameterDictType.INT,\n display_name='Precision',\n range={'4 digits': 0, '5 digits': 1, '6 digits': 2, '7 digits': 3},\n description='Number of digits the instrument should output for temperature query: '\n '(0=4-3=7)',\n default_value=6)\n self._add_setup_param(Parameter.LARGE_SIGNAL_FILTER_C,\n float,\n type=ParameterDictType.FLOAT,\n display_name='Large Signal Filter Constant',\n range={'0': 0, '.25': 1, '.5': 2, '1': 3, '2': 4, '4': 5, '8': 6, '16': 7},\n description='Time to reach 63% of its final value: '\n '(0 = 0.0, 1 = 0.25, 2 = 0.5, 3 = 1.0, 4 = 2.0, 5 = 4.0, 6 = 8.0, 7 = 16.0)',\n default_value=0.0,\n units=Units.SECOND)\n self._add_setup_param(Parameter.SMALL_SIGNAL_FILTER_C,\n float,\n type=ParameterDictType.FLOAT,\n display_name='Small Signal Filter Constant',\n range={'0': 0, '.25': 1, '.5': 2, '1': 3, '2': 4, '4': 5, '8': 6, '16': 7},\n description='Smaller filter constant, should be larger than large filter constant: '\n '(0 = 0.0, 1 = 0.25, 2 = 0.5, 3 = 1.0, 4 = 2.0, 5 = 4.0, 6 = 8.0, 7 = 16.0)',\n default_value=0.50,\n units=Units.SECOND)\n\n for key in self._param_dict.get_keys():\n self._param_dict.set_default(key)", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['lambda'] = self.lambdaVar # rate parameter\n paramDict['low' ] = self.low # lower domain boundary\n return paramDict", "def lenstronomy_params(self):\n if not hasattr(self, '_lenstronomy_args'):\n\n (concentration, gamma, x_core_halo) = self.profile_args\n rhos, rs, _ = self._lens_cosmo.NFW_params_physical(self.mass, concentration, self.z)\n kpc_per_arcsec = self._lens_cosmo.cosmo.kpc_proper_per_asec(self.z)\n\n if 'x_match' in self._args.keys():\n x_match = self._args['x_match']\n else:\n # r_vmax = 2.16 * rs\n x_match = 2.16\n\n r_match_arcsec = x_match * rs / kpc_per_arcsec\n fx = np.log(1+x_match) - x_match/(1 + x_match)\n m = 4 * np.pi * rs ** 3 * rhos * fx\n r_core_arcsec = x_core_halo * r_match_arcsec / x_match\n\n sigma_crit_mpc = self._lens_cosmo.get_sigma_crit_lensing(self.z, self._lens_cosmo.z_source)\n sigma_crit_arcsec = sigma_crit_mpc * (0.001 * kpc_per_arcsec) ** 2\n\n rho0 = m/self._prof.mass_3d(r_match_arcsec, sigma_crit_arcsec, r_core_arcsec, gamma)\n sigma0 = rho0 * r_core_arcsec\n\n self._lenstronomy_args = [{'sigma0': sigma0, 'gamma': gamma, 'center_x': self.x, 'center_y': self.y,\n 'r_core': r_core_arcsec}]\n\n return self._lenstronomy_args, None", "def _init_hyperparam(self, **p_par):\r\n \r\n try:\r\n p_input_size = self._input_space.get_num_dim()\r\n p_output_size = self._output_space.get_num_dim()\r\n except:\r\n raise ParamError('Input size and/or output size of the network are not defined.')\r\n \r\n if 'p_update_rate' not in p_par:\r\n p_par['p_update_rate'] = 1\r\n elif p_par.get('p_update_rate') < 1:\r\n raise ParamError(\"p_update_rate must be equal or higher than 1.\")\r\n \r\n if 'p_num_hidden_layers' not in p_par:\r\n raise ParamError(\"p_num_hidden_layers is not defined.\")\r\n \r\n if 'p_output_activation_fct' not in p_par:\r\n p_par['p_output_activation_fct'] = None\r\n \r\n if 'p_optimizer' not in p_par:\r\n raise ParamError(\"p_optimizer is not defined.\")\r\n \r\n if 'p_loss_fct' not in p_par:\r\n raise ParamError(\"p_loss_fct is not defined.\")\r\n\r\n if 'p_test_data' not in p_par:\r\n p_par['p_test_data'] = 0.3\r\n\r\n if 'p_batch_size' not in p_par:\r\n p_par['p_batch_size'] = 100\r\n\r\n if 'p_seed_buffer' not in p_par:\r\n p_par['p_seed_buffer'] = 1\r\n\r\n if 'p_learning_rate' not in p_par:\r\n p_par['p_learning_rate'] = 3e-4\r\n \r\n if 'p_hidden_size' not in p_par:\r\n raise ParamError(\"p_hidden_size is not defined.\")\r\n try:\r\n if len(p_par['p_hidden_size']) != p_par['p_num_hidden_layers']:\r\n raise ParamError(\"length of p_hidden_size list must be equal to p_num_hidden_layers or an integer.\")\r\n except:\r\n p_par['p_hidden_size'] = [int(p_par['p_hidden_size'])] * int(p_par['p_num_hidden_layers'])\r\n \r\n if 'p_activation_fct' not in p_par:\r\n raise ParamError(\"p_activation_fct is not defined.\")\r\n try:\r\n if len(p_par['p_activation_fct']) != p_par['p_num_hidden_layers']:\r\n raise ParamError(\"length of p_activation_fct list must be equal to p_num_hidden_layers or a single activation function.\")\r\n except:\r\n if isinstance(p_par['p_activation_fct'], list):\r\n raise ParamError(\"length of p_activation_fct list must be equal to p_num_hidden_layers or a single activation function.\")\r\n else:\r\n p_par['p_activation_fct'] = [p_par['p_activation_fct']] * int(p_par['p_num_hidden_layers'])\r\n \r\n if 'p_weight_bias_init' not in p_par:\r\n p_par['p_weight_bias_init'] = True\r\n \r\n if p_par['p_weight_bias_init']:\r\n if 'p_weight_init' not in p_par:\r\n p_par['p_weight_init'] = torch.nn.init.orthogonal_\r\n \r\n if 'p_bias_init' not in p_par:\r\n p_par['p_bias_init'] = lambda x: torch.nn.init.constant_(x, 0)\r\n \r\n if 'p_gain_init' not in p_par:\r\n p_par['p_gain_init'] = np.sqrt(2)\r\n \r\n self._hyperparam_space.add_dim(HyperParam('p_input_size','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_output_size','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_update_rate','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_num_hidden_layers','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_hidden_size','Z'))\r\n self._hyperparam_space.add_dim(HyperParam('p_activation_fct'))\r\n self._hyperparam_space.add_dim(HyperParam('p_output_activation_fct'))\r\n self._hyperparam_space.add_dim(HyperParam('p_optimizer'))\r\n self._hyperparam_space.add_dim(HyperParam('p_loss_fct'))\r\n self._hyperparam_space.add_dim(HyperParam('p_test_data'))\r\n self._hyperparam_space.add_dim(HyperParam('p_batch_size'))\r\n self._hyperparam_space.add_dim(HyperParam('p_seed_buffer'))\r\n self._hyperparam_space.add_dim(HyperParam('p_learning_rate'))\r\n self._hyperparam_space.add_dim(HyperParam('p_weight_bias_init'))\r\n self._hyperparam_space.add_dim(HyperParam('p_weight_init'))\r\n self._hyperparam_space.add_dim(HyperParam('p_bias_init'))\r\n self._hyperparam_space.add_dim(HyperParam('p_gain_init'))\r\n self._hyperparam_tuple = HyperParamTuple(self._hyperparam_space)\r\n \r\n ids_ = self.get_hyperparam().get_dim_ids()\r\n self.get_hyperparam().set_value(ids_[0], p_input_size)\r\n self.get_hyperparam().set_value(ids_[1], p_output_size)\r\n self.get_hyperparam().set_value(ids_[2], p_par['p_update_rate'])\r\n self.get_hyperparam().set_value(ids_[3], p_par['p_num_hidden_layers'])\r\n self.get_hyperparam().set_value(ids_[4], p_par['p_hidden_size'])\r\n self.get_hyperparam().set_value(ids_[5], p_par['p_activation_fct'])\r\n self.get_hyperparam().set_value(ids_[6], p_par['p_output_activation_fct'])\r\n self.get_hyperparam().set_value(ids_[7], p_par['p_optimizer'])\r\n self.get_hyperparam().set_value(ids_[8], p_par['p_loss_fct'])\r\n self.get_hyperparam().set_value(ids_[9], p_par['p_test_data'])\r\n self.get_hyperparam().set_value(ids_[10], p_par['p_batch_size'])\r\n self.get_hyperparam().set_value(ids_[11], p_par['p_seed_buffer'])\r\n self.get_hyperparam().set_value(ids_[12], p_par['p_learning_rate'])\r\n self.get_hyperparam().set_value(ids_[13], p_par['p_weight_bias_init'])\r\n self.get_hyperparam().set_value(ids_[14], p_par['p_weight_init'])\r\n self.get_hyperparam().set_value(ids_[15], p_par['p_bias_init'])\r\n self.get_hyperparam().set_value(ids_[16], p_par['p_gain_init'])", "def add_params(self, params, module, prefix=''):\n # get param-wise options\n bias_lr_mult = self.paramwise_cfg.get('bias_lr_mult', 1.)\n bias_decay_mult = self.paramwise_cfg.get('bias_decay_mult', 1.)\n norm_decay_mult = self.paramwise_cfg.get('norm_decay_mult', 1.)\n dwconv_decay_mult = self.paramwise_cfg.get('dwconv_decay_mult', 1.)\n bypass_duplicate = self.paramwise_cfg.get('bypass_duplicate', False)\n\n # special rules for norm layers and depth-wise conv layers\n is_norm = isinstance(module,\n (_BatchNorm, _InstanceNorm, GroupNorm, LayerNorm))\n is_dwconv = (\n isinstance(module, torch.nn.Conv2d)\n and module.in_channels == module.groups)\n\n for name, param in module.named_parameters(recurse=False):\n param_group = {'params': [param]}\n if not param.requires_grad:\n params.append(param_group)\n continue\n if bypass_duplicate and self._is_in(param_group, params):\n warnings.warn(f'{prefix} is duplicate. It is skipped since '\n f'bypass_duplicate={bypass_duplicate}')\n continue\n # bias_lr_mult affects all bias parameters except for norm.bias\n if name == 'bias' and not is_norm:\n param_group['lr'] = self.base_lr * bias_lr_mult\n # apply weight decay policies\n if self.base_wd is not None:\n # norm decay\n if is_norm:\n param_group[\n 'weight_decay'] = self.base_wd * norm_decay_mult\n # depth-wise conv\n elif is_dwconv:\n param_group[\n 'weight_decay'] = self.base_wd * dwconv_decay_mult\n # bias lr and decay\n elif name == 'bias':\n param_group[\n 'weight_decay'] = self.base_wd * bias_decay_mult\n params.append(param_group)\n\n for child_name, child_mod in module.named_children():\n child_prefix = f'{prefix}.{child_name}' if prefix else child_name\n self.add_params(params, child_mod, prefix=child_prefix)", "def update_params(self, learning_rate):\n\t\t#######################################################################\n\t\t# ** START OF YOUR CODE **\n\t\t#######################################################################\n\t\tself._W = self._W - learning_rate * self._grad_W_current\n\t\tself._b = self._b - learning_rate * self._grad_b_current\n\t\t#######################################################################\n\t\t# ** END OF YOUR CODE **\n\t\t#######################################################################", "def set_params(self, config):\n params = {'n_bins', 'edges', 'classes', 'chi', 'n_params'}\n self.__dict__.update((param, np.array(value)) for param, value in config.items() if param in params)", "def Params(cls):\n p = hyperparams.InstantiableParams(cls)\n\n p.Define('task_dict', None, 'dataset_name -> task params')\n p.Define('task_name', None, 'High level task name')\n p.Define('logdir', None, 'Log directory')\n p.Define('train_program', None, 'Train program params')\n p.Define('train_executions_per_eval', 1, '')\n p.Define('dataset_names', [], 'List of all dataset names.')\n p.Define('num_splits_per_client', None, '')\n\n p.Define('ml_perf', hyperparams.Params(), 'MlPerf configuration.')\n\n mlp = p.ml_perf\n mlp.Define('benchmark_name', None, 'Benchmark name for compliance log.')\n mlp.Define('decoder_metric_name', None,\n 'Name of the decoder metric to report for compliance log.')\n mlp.Define('decoder_metric_success_threshold', None,\n 'Benchmark run must exceed this value to succeed.')\n mlp.Define('max_steps_to_train', None,\n 'Maximum number of steps to reach target accuracy')\n mlp.Define('steps_per_epoch', None, 'Number of training steps per epoch.')\n mlp.Define('global_batch_size', None, 'Global batch size.')\n mlp.Define('max_sequence_length', None, 'Maximum sequence length.')\n mlp.Define('optimizer_name', None, 'Optimizer used.')\n mlp.Define('base_learning_rate', None, 'Base learning rate.')\n mlp.Define('warmup_steps', None, 'Number of warm-up steps.')\n\n return p", "def init_params():\r\n\r\n p = OrderedDict()\r\n p['startYear'] = 1855\r\n p['num5YearAgeClasses'] = 25\r\n p['numCareLevels'] = 5\r\n p['pixelsInPopPyramid'] = 2000\r\n p['pixelsPerTown'] = 16 # 56\r\n p['mapGridXDimension'] = 20\r\n p['mapGridYDimension'] = 25\r\n p['careLevelColour'] = ['deepskyblue','green','yellow','orange','red']\r\n p['careDemandInHours'] = [ 0.0, 12.0, 24.0, 48.0, 96.0 ]\r\n p['unmetNeedColor'] = ['deepskyblue','green','yellow','orange','red', 'mediumorchid']\r\n p['houseSizeColour'] = ['deepskyblue','green','yellow','orange','red', 'mediumorchid']\r\n p['mainFont'] = 'Helvetica 18'\r\n p['fontColour'] = 'white'\r\n p['dateX'] = 70\r\n p['dateY'] = 20\r\n p['popX'] = 70\r\n p['popY'] = 50\r\n p['delayTime'] = 0.0\r\n p['maxTextUpdateList'] = 12\r\n \r\n return p", "def construct_parameters(self, method= \"random\", W = np.zeros(1), b = np.zeros(1), initialization=True):\n #W = np.asarray(W, dtype=object)\n #b = np.asarray(b, dtype=object)\n for i in reversed(range(1,len(self.architecture))):\n \n if initialization==True:\n if self.activations[i-1] in {'relu' , 'leakyrelu' , 'ealu'}:\n variance = np.sqrt(2/(self.architecture[i-1])) #He initialization\n elif self.activations[i-1] == 'tanh':\n variance = np.sqrt(6/(self.architecture[i-1] + self.architecture[i])) #Xavier initialization\n elif self.activations[i-1] in ('swish' , 'sigmoid'):\n variance = np.sqrt(1/(self.architecture[i-1]))\n else:\n variance = 1\n \n elif initialization == False:\n variance = 1\n \n if method == 'random':\n self.weights_and_biases[f'W{i}'] = np.random.rand(self.architecture[i-1], self.architecture[i])*variance #randomised initialisation \n self.weights_and_biases[f'b{i}'] = np.zeros(self.architecture[i])*variance\n \n elif method == 'manual': #manual initialisation using given weights and biases\n self.weights_and_biases[f'W{i}'] = W[i-1]\n self.weights_and_biases[f'b{i}'] = b[i-1] \n return self.weights_and_biases", "def get_params(self):\n return {\n \"nspecies\": self.nspecies,\n \"lmax\": self.lmax,\n \"nmax\": self.nmax,\n \"rcut\": self.rcut,\n \"sigma\": self.sigma,\n \"trans_width\": self.trans_width\n }", "def _core_init_params(self) :\n\t\ta_list,b_list = [],[]\n\t\tg_list,h_list = [],[]\n\t\t\n\t\t\n\t\tfor eqnid,eqn in enumerate(self.equations) : \n\t\t\treg_p = self.regressors[eqnid]['prod']\n\t\t\treg_d = self.regressors[eqnid]['degrad']\n\t\t\th_eqn = self.initsol['h'][eqn-1]\n\t\t\tg_eqn = self.initsol['g'][eqn-1]\n\n\n\t\t\ta_list.append(self.initsol['alpha'][eqn-1])\n\t\t\tb_list.append(self.initsol['beta'][eqn-1])\n\t\t\t\n\t\t\tg_eqn = np.array([g_eqn[reg-1] for reg in reg_p])\n\t\t\th_eqn = np.array([h_eqn[reg-1] for reg in reg_d])\n\t\t\th_list.append(h_eqn)\n\t\t\tg_list.append(g_eqn)\n\t\n\t\treturn (a_list,b_list,g_list,h_list)", "def addPppParams(model):\n \n ### GAPDP Parameters ####\n model.addParameter('GAPDP','KmSub2',0.385) # nadp\n model.addParameter('GAPDP','KmProd2',0.202) # nadph\n model.addParameter('GAPDP','kcatF',2.8)\n model.addParameter('GAPDP','kcatR',0)\n\n ### FMETTRS Parameters ###\n model.addParameter('FMETTRS','kcatF',0.45)\n\n ### MTHFC Parameters ###\n model.addParameter('MTHFC','kcatF',185)\n\n #### GHMT2 Paramters ####\n model.addParameter('GHMT2','kcatF',0.0)\n model.addParameter('GHMT2','kcatR',0.0)\n \n #### TKT1 Parameters ####\n model.addParameter('TKT1',rxnFormKey='kcatF',value=20.58)\n model.addParameter('TKT1',rxnFormKey='kcatR',value=0.8)\n \n model.addParameter('TKT1',rxnFormKey='KmSub1',value=0.743) #g3p\n model.addParameter('TKT1',rxnFormKey='KmSub2',value=3.7298) #s7p\n model.addParameter('TKT1',rxnFormKey='KmProd1',value=0.4717) #r5p\n model.addParameter('TKT1',rxnFormKey='KmProd2',value=0.134) #xu5p\n \n #### TKT2 Parameters ####\n model.addParameter('TKT2',rxnFormKey='kcatF',value=26.87)\n model.addParameter('TKT2',rxnFormKey='kcatR',value=1.4)\n \n model.addParameter('TKT2',rxnFormKey='KmSub1',value=0.25) #f6p\n model.addParameter('TKT2',rxnFormKey='KmSub2',value=0.743) #g3p\n model.addParameter('TKT2',rxnFormKey='KmProd1',value=0.0227) #e4p\n model.addParameter('TKT2',rxnFormKey='KmProd2',value=0.134) #xu5p\n \n #### TALA Parameters ####\n model.addParameter('TALA',rxnFormKey='kcatF',value=22.3)\n model.addParameter('TALA',rxnFormKey='kcatR',value=0.54)\n \n model.addParameter('TALA',rxnFormKey='KmSub1',value=0.0401) #e4p\n model.addParameter('TALA',rxnFormKey='KmSub2',value=0.6688) #f6p\n model.addParameter('TALA',rxnFormKey='KmProd1',value=1.9) #g3p\n model.addParameter('TALA',rxnFormKey='KmProd2',value=0.285) #s7p\n\n \n #### Speed up DGSN Pathway ####\n model.addParameter('DGSNK',rxnFormKey='kcatF',value=2.25)\n\n #### Speed up DADN pathway ####\n model.addParameter('PUNP2',rxnFormKey='kcatF',value=13.3)\n\n #### Speed up FBA rxn ####\n #model.addParameter('FBA',rxnFormKey='kcatF',value=64.5)\n\n model.addParameter('RNDR2',rxnFormKey='KmSub1',value=0.24)\n\n \n# #### RPI Parameters ####\n model.addParameter('RPI',rxnFormKey='kcatF',value=10.0)\n model.addParameter('RPI',rxnFormKey='kcatR',value=1.0)\n \n #model.addParameter('RPI',rxnFormKey='KmSub1',value=1.0)\n #model.addParameter('RPI',rxnFormKey='KmProd1',value=1.0)\n \n model.addParameter('FBA',rxnFormKey='KmSub1',value=0.12)\n model.addParameter('FBA',rxnFormKey='KmProd2',value=0.05)\n \n \n model.addParameter('GAPD',rxnFormKey='kcatF',value=442.0) \n model.addParameter('GAPD',rxnFormKey='kcatR',value=73.6) \n \n\n model.addParameter('FBA',rxnFormKey='kcatR',value=12.6)\n \n\n model.addParameter('TPI',rxnFormKey='kcatR',value=67)\n \n model.addParameter('TPI',rxnFormKey='KmSub1',value=0.077)\n model.addParameter('TPI',rxnFormKey='KmProd1',value=0.084) \n \n\n model.addParameter('FBA',rxnFormKey='kcatF',value=21.0)\n \n \n model.addParameter('PGK',rxnFormKey='kcatR',value=3.4)\n \n model.addParameter('PGM',rxnFormKey='KmSub1',value=3.6)\n model.addParameter('PGM',rxnFormKey='KmProd1',value=0.2)\n \n \n model.addParameter('PGK',rxnFormKey='KmSub1',value=0.01)\n model.addParameter('PGK',rxnFormKey='KmProd1',value=0.1)\n \n \n model.addParameter('GAPD',rxnFormKey='KmProd1',value=0.47)\n model.addParameter('GAPD',rxnFormKey='KmProd2',value=0.061)\n \n \n model.addParameter('DRPA',rxnFormKey='kcatR',value=34.0)\n \n model.addParameter('DRPA',rxnFormKey='KmProd1',value=0.267)\n model.addParameter('DRPA',rxnFormKey='KmProd2',value=0.2)\n\n \n model.addParameter('PPM2',rxnFormKey='kcatF',value=173)\n \n model.addParameter('PPM2',rxnFormKey='KmSub1',value=0.013)\n model.addParameter('PPM2',rxnFormKey='KmProd1',value=1.2)\n\n\n\n# print('Updated PPP Parameters')\n\n return", "def derive_sample_params(self, global_state):\n return self._numerator.derive_sample_params(global_state.sum_state)", "def random():\n gauss_scale = 10**np.random.uniform(1, 3)\n lorentz_scale = 10**np.random.uniform(1, 3)\n cor_length_static = 10**np.random.uniform(0, 3)\n cor_length_dynamic = 10**np.random.uniform(0, 3)\n pars = dict(\n #background=0,\n scale=1,\n gauss_scale=gauss_scale,\n lorentz_scale=lorentz_scale,\n cor_length_static=cor_length_static,\n cor_length_dynamic=cor_length_dynamic,\n )\n return pars", "def set_params(self, params: Dict) -> None:\n self.leak.set_g(params[\"g_leak\"])\n self.kvhh.set_g(params[\"g_kvhh\"])\n self.cav.set_g(params[\"g_cav\"])\n self.kca.set_g(params[\"g_kca\"])\n self.nap.set_g(params[\"g_nap\"])\n self.tau_ca = params[\"t_ca\"]", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['location'] = self.location\n paramDict['scale' ] = self.scale\n return paramDict", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['location'] = self.location\n paramDict['scale' ] = self.scale\n return paramDict", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['n' ] = self.n\n paramDict['p' ] = self.p\n return paramDict", "def set_priors(parnames, limits, linenames, vsyst, nssps=1):\n priors = {}\n for parname in parnames:\n name = parname.split(\"_\")[0]\n if name in limits: #all the CvD ssp parameters\n vmin, vmax = limits[name]\n# print(parname,vmin,vmax)\n delta = vmax - vmin\n priors[parname] = stats.uniform(loc=vmin, scale=delta)\n elif parname in vsyst:\n priors[parname] = stats.norm(loc=vsyst[parname], scale=500)\n elif parname == \"eta\": #what does eta do?\n priors[\"eta\"] = stats.uniform(loc=1., scale=19)#uniform distribution in range [1,19]\n elif parname == \"nu\": #what does nu do?\n priors[\"nu\"] = stats.uniform(loc=2, scale=20)#uniform distribution in range [2,20]\n elif parname == \"sigma\":\n priors[\"sigma\"] = stats.uniform(loc=50, scale=300)#obtains the uniform distribution on [loc, loc + scale]. i.e. uniform in range [50,300]\n elif parname == \"sigma_gas\":\n priors[parname] = stats.uniform(loc=50, scale=100)#uniform between [50,100]km/s\n elif name == \"w\":\n priors[parname] = stats.uniform(loc=0, scale=1)#weights uniform between 0 and 1\n elif name in linenames:\n# priors[parname] = stats.expon(loc=0, scale=0.5)#favors low values>~0; make even stronger by decreasing scale. \n priors[parname] = stats.expon(loc=0, scale=0.2)#favors low values>~0; make even stronger by decreasing scale. \n elif name in [\"pred\", \"pblue\"]:\n porder = int(parname.split(\"_\")[1])\n if porder == 0:\n mu, sd = 1 / nssps, 1\n a, b = (0 - mu) / sd, (np.infty - mu) / sd\n priors[parname] = stats.truncnorm(a, b, mu, sd)\n else:\n priors[parname] = stats.norm(0, 0.05)\n else:\n print(f\"parameter without prior: {parname}\")\n return priors", "def _build_param_dict(self):\n # Add parameter handlers to parameter dict.\n self._param_dict = ProtocolParameterDict()\n \n self._param_dict.add(Parameter.CYCLE_TIME,\n r'(\\d+)\\s+= Cycle Time \\(.*\\)\\r\\n(0|1)\\s+= Minutes or Seconds Cycle Time',\n lambda match : self._to_seconds(int(match.group(1)),\n int(match.group(2))),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_WRITE,\n startup_param=True,\n direct_access=False,\n default_value=20,\n menu_path_read=SubMenu.SHOW_PARAM,\n submenu_read=[],\n menu_path_write=SubMenu.CHANGE_PARAM,\n submenu_write=[[\"1\", Prompt.CYCLE_TIME_PROMPT]])\n \n self._param_dict.add(Parameter.VERBOSE,\n r'', # Write-only, so does it really matter?\n lambda match : None,\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=True,\n init_value=1,\n menu_path_write=SubMenu.CHANGE_PARAM,\n submenu_write=[[\"2\", Prompt.VERBOSE_PROMPT]])\n \n self._param_dict.add(Parameter.METADATA_POWERUP,\n r'(0|1)\\s+= Metadata Print Status on Power up',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=True,\n init_value=0,\n menu_path_write=SubMenu.CHANGE_PARAM,\n submenu_write=[[\"3\", Prompt.METADATA_PROMPT]])\n\n self._param_dict.add(Parameter.METADATA_RESTART,\n r'(0|1)\\s+= Metadata Print Status on Restart Data Collection',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=True,\n init_value=0,\n menu_path_write=SubMenu.CHANGE_PARAM,\n submenu_write=[[\"4\", Prompt.METADATA_PROMPT]])\n \n self._param_dict.add(Parameter.RES_SENSOR_POWER,\n r'(0|1)\\s+= Res Power Status',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=False,\n init_value=1,\n menu_path_read=SubMenu.SHOW_PARAM,\n submenu_read=[],\n menu_path_write=SubMenu.SENSOR_POWER,\n submenu_write=[[\"1\"]])\n\n self._param_dict.add(Parameter.INST_AMP_POWER,\n r'(0|1)\\s+= Thermocouple & Hydrogen Amp Power Status',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=False,\n init_value=1,\n menu_path_read=SubMenu.SHOW_PARAM,\n submenu_read=[],\n menu_path_write=SubMenu.SENSOR_POWER,\n submenu_write=[[\"2\"]])\n\n self._param_dict.add(Parameter.EH_ISOLATION_AMP_POWER,\n r'(0|1)\\s+= eh Amp Power Status',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=False,\n init_value=1,\n menu_path_read=SubMenu.SHOW_PARAM,\n submenu_read=[],\n menu_path_write=SubMenu.SENSOR_POWER,\n submenu_write=[[\"3\"]])\n \n self._param_dict.add(Parameter.HYDROGEN_POWER,\n r'(0|1)\\s+= Hydrogen Sensor Power Status',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=False,\n init_value=1,\n menu_path_read=SubMenu.SHOW_PARAM,\n submenu_read=[],\n menu_path_write=SubMenu.SENSOR_POWER,\n submenu_write=[[\"4\"]])\n \n self._param_dict.add(Parameter.REFERENCE_TEMP_POWER,\n r'(0|1)\\s+= Reference Temperature Power Status',\n lambda match : int(match.group(1)),\n self._int_to_string,\n visibility=ParameterDictVisibility.READ_ONLY,\n startup_param=True,\n direct_access=False,\n init_value=1,\n menu_path_read=SubMenu.SHOW_PARAM,\n submenu_read=[],\n menu_path_write=SubMenu.SENSOR_POWER,\n submenu_write=[[\"5\"]])", "def generate_parameters(self):\n self.parameters = np.zeros(self.D)\n for l in range(self.D):\n if self.p_l[l] >= np.random.uniform(0,1):\n self.parameters[l] = 1", "def set_hyperparams(self, params):", "def ComputeGeometricParameters(self):\n # extracting inner orientation params\n a0 = self.innerOrientationParameters[0]\n b0 = self.innerOrientationParameters[1]\n a1 = self.innerOrientationParameters[2]\n a2 = self.innerOrientationParameters[3]\n b1 = self.innerOrientationParameters[4]\n b2 = self.innerOrientationParameters[5]\n\n # computing algebric params\n tx = a0;\n ty = b0\n theta = np.arctan(b1 / b2)\n gamma = np.arctan((a1 * np.sin(theta) + a2 * np.cos(theta)) / (b1 * np.sin(theta) + b2 * np.cos(theta)))\n sx = a1 * np.cos(theta) - a2 * np.sin(theta)\n sy = (a1 * np.sin(theta) + a2 * np.cos(theta)) / np.sin(gamma)\n\n return {\"translationX\": tx, \"translationY\": ty, \"rotationAngle\": np.rad2deg(theta), \"scaleFactorX\": sx,\n \"scaleFactorY\": sy, \"shearAngle\": np.rad2deg(gamma)}", "def set_params(self, dic):\n if dic is not None:\n for key, val in zip(dic.keys(), dic.values()):\n if key in self.__dict__.keys():\n self.__dict__[key] = val\n\n if 'scale_params' in self.__dict__.keys():\n self.scale_params.set_params(dic)\n if 'atmospheric_params' in self.__dict__.keys():\n if self.atmospheric_params is not None:\n self.atmospheric_params.set_params(dic)\n\n if 'atemperature_params' in self.__dict__.keys():\n if self.atemperature_params is not None:\n self.atemperature_params.set_params(dic)\n\n if 'oceanic_params' in self.__dict__.keys():\n if self.oceanic_params is not None:\n self.oceanic_params.set_params(dic)\n\n if 'ground_params' in self.__dict__.keys():\n if self.ground_params is not None:\n self.ground_params.set_params(dic)\n\n if 'otemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n self.gotemperature_params.set_params(dic)\n\n if 'gtemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n self.gotemperature_params.set_params(dic)", "def get_hyperparams(self):", "def _generate_params(self):\n return {\n 'lis_outcome_service_url': self.lis_outcome_service_url,\n 'lis_result_sourcedid': self.lis_result_sourcedid,\n 'oauth_consumer_key': self.key\n }", "def get_params(self, G=6.67430e-8, R_k=8.314462618e7):\n self.mu = 4/(5*self.X - self.Z + 3)\n self.xi_s, self.dtheta_s = self.poly[0, -1], self.poly[2, -1]\n self.rho_0 = - self.M * self.xi_s / (4*np.pi * self.R**3 * self.dtheta_s)\n self.P_0 = G * self.M**2 / (self.R**4 * 4 * np.pi * (self.n+1)*self.dtheta_s**2)\n self.T_0 = - self.mu * G * self.M / (R_k * self.R * (self.n+1) * self.xi_s * self.dtheta_s)\n self.rho = self.rho_0 * self.poly[1]**self.n\n self.P = self.P_0 * self.poly[1]**(self.n + 1)\n self.T = self.T_0 * self.poly[1]\n self.m = (self.poly[0]/self.xi_s)**2 * self.poly[2]/self.dtheta_s", "def updateRNGParam(self, dictParam):\n for key in dictParam:\n if key == 'tolerance':\n self.RNGtolerance = dictParam['tolerance']\n elif key == 'initialGridDisc':\n self.RNGInitDisc = dictParam['initialGridDisc']\n self._distribution.updateRNGparameter(self.RNGtolerance,self.RNGInitDisc)", "def _update_params(self):\n with self.sphere.sphere_lock:\n self._args_to_params(self.sphere.bai_1d_args, self.bai_1d_pars)\n self._args_to_params(self.sphere.bai_2d_args, self.bai_2d_pars)\n #self._args_to_params(self.sphere.mg_args, self.mg_pars)", "def getInitParams(self):\n paramDict = BoostDistribution.getInitParams(self)\n paramDict['lambda'] = self.lambdaVar\n paramDict['k' ] = self.k\n paramDict['low' ] = self.low\n return paramDict", "def init_paramters(self):\r\n carb_bg_ratio = 5.0\r\n time_to_breakdown = 45.0\r\n insulin_bg_ratio = 50.0\r\n time_to_peak = 45.0\r\n basal_rate = 0.0\r\n digestion_speed = 1.0\r\n activation_speed = 1.0\r\n\r\n # set state to initial\r\n self.S = [self.carb_bg_ratio, self.time_to_breakdown,\r\n self.insulin_bg_ratio, self.time_to_peak,\r\n self.basal_rate, self.digestion_speed,\r\n self.activation_speed]", "def param_computation(self, param):\n result = {}\n\n result[\"V_t_V\"] = param_computation_V_t_V(param)\n\n if compute_gammas:\n result[\"V_t_g_n\"] = param_computation_V_t_g_n(param)\n if compute_lambdas:\n result[\"V_n_t_V\"] = param_computation_V_n_t_V(param)\n\n param_computation_memory_cleanup(param)\n\n return result" ]
[ "0.6686416", "0.6469411", "0.64443755", "0.6407086", "0.5964358", "0.5906427", "0.5815626", "0.57940143", "0.5791521", "0.5775666", "0.5771342", "0.5738936", "0.57148474", "0.57035136", "0.56996524", "0.5679744", "0.56791604", "0.56763065", "0.56688666", "0.56636876", "0.56462115", "0.5642801", "0.56396246", "0.56326836", "0.562053", "0.55855", "0.5557907", "0.5498345", "0.5497285", "0.5497285", "0.54958254", "0.5492347", "0.5483671", "0.5479683", "0.54749185", "0.54647094", "0.54643524", "0.54570943", "0.5443914", "0.5436654", "0.5436382", "0.54333144", "0.54209405", "0.54201293", "0.5414569", "0.541384", "0.5406703", "0.5402773", "0.5397412", "0.53962785", "0.5393573", "0.5392612", "0.5386863", "0.5384426", "0.5384426", "0.53749245", "0.5374781", "0.5372806", "0.53708607", "0.5368216", "0.53666973", "0.53579384", "0.5342457", "0.53403777", "0.5339616", "0.5327551", "0.5320168", "0.5307338", "0.5303594", "0.5299782", "0.5283269", "0.52831686", "0.5265419", "0.52613086", "0.5250882", "0.52474064", "0.52305365", "0.5229453", "0.522661", "0.52246284", "0.5223445", "0.52195644", "0.52171904", "0.521261", "0.521261", "0.5211957", "0.52092606", "0.5208785", "0.519445", "0.51909196", "0.5184222", "0.5183615", "0.5183299", "0.51826495", "0.5181828", "0.5181584", "0.5169326", "0.5168273", "0.51664084", "0.5161326" ]
0.70243114
0
Method uses the current values in the param_dict to update the strength of the correlation between sec_haloprop and galprop at each value of prim_galprop.
def _set_correlation_strength(self): if hasattr(self, 'correlation_strength_abcissa'): abcissa = self.correlation_strength_abcissa ordinates = [self.param_dict['correlation_param'+str(i+1)] for i in range(len(abcissa))] correlation_strength_spline = model_helpers.custom_spline(abcissa, ordinates, k=custom_len(abcissa)-1) self.correlation_strength = correlation_strength_spline(self.prim_galprop_bins) else: self.correlation_strength = np.repeat(self.param_dict['correlation_param1'], len(self.prim_galprop_bins)) self.correlation_strength[self.correlation_strength > 1] = 1 self.correlation_strength[self.correlation_strength <- 1] = -1 self.correlation_strength = np.append( self.correlation_strength, self.correlation_strength[-1])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _mc_galprop(self, seed=None, **kwargs):\n model_helpers.update_param_dict(self, **kwargs)\n self._set_correlation_strength()\n\n if ('galaxy_table' in kwargs.keys()) & ('halos' in kwargs.keys()):\n msg = (\"The mc_\"+self.galprop_key+\" method accepts either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\" + \n \" but never both.\")\n raise KeyError(msg)\n elif 'galaxy_table' in kwargs.keys():\n galaxy_table = kwargs['galaxy_table']\n operative_sec_haloprop_key = (\n model_defaults.host_haloprop_prefix + self.sec_haloprop_key)\n elif 'halos' in kwargs.keys():\n galaxy_table = kwargs['halos']\n operative_sec_haloprop_key = self.sec_haloprop_key\n else:\n msg = (\"The mc_\"+self.galprop_key+\" requires either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\")\n raise KeyError(msg)\n\n self.add_new_haloprops(galaxy_table)\n\n # All at once, draw all the randoms we will need\n np.random.seed(seed=seed)\n all_randoms = np.random.random(len(galaxy_table)*2)\n galprop_cumprob = all_randoms[0:len(galaxy_table)]\n galprop_scatter = all_randoms[len(galaxy_table):]\n\n # Initialize the output array\n output_galprop = np.zeros(len(galaxy_table))\n\n # Determine binning and loop range\n if 'galaxy_table_slice_array' not in kwargs.keys():\n binned_prim_galprop = np.digitize(\n galaxy_table[self.prim_galprop_key], \n self.prim_galprop_bins)\n prim_galprop_loop_range = set(binned_prim_galprop)\n else:\n prim_galprop_loop_range = range(len(self.one_point_lookup_table))\n\n for i in prim_galprop_loop_range:\n\n # Determine the slice corresponding to the i^th prim_galprop bin\n if 'galaxy_table_slice_array' not in kwargs.keys():\n idx_bini = np.where(binned_prim_galprop==i)[0]\n num_bini = len(idx_bini)\n else:\n idx_bini = kwargs['galaxy_table_slice_array'][i]\n num_bini = len(galaxy_table[idx_bini])\n\n if len(idx_bini) > 0:\n # Fetch the appropriate number of randoms\n # for the i^th prim_galprop bin\n galprop_cumprob_bini = galprop_cumprob[idx_bini]\n galprop_scatter_bini = galprop_scatter[idx_bini]\n\n # Fetch the halos in the i^th prim_galprop bin, \n # and determine how they are sorted\n haloprop_bini = galaxy_table[idx_bini][operative_sec_haloprop_key]\n idx_sorted_haloprop_bini = np.argsort(haloprop_bini)\n\n galprop_bini = self._condition_matched_galprop(\n haloprop_bini[idx_sorted_haloprop_bini], \n galprop_cumprob_bini, i, galprop_scatter_bini, self.tol)\n\n # Assign the final values to the \n # appropriately sorted subarray of output_galprop\n output_galprop[idx_bini[idx_sorted_haloprop_bini]] = galprop_bini\n\n return output_galprop", "def update_params(self, learning_rate):\n\t\t#######################################################################\n\t\t# ** START OF YOUR CODE **\n\t\t#######################################################################\n\t\tself._W = self._W - learning_rate * self._grad_W_current\n\t\tself._b = self._b - learning_rate * self._grad_b_current\n\t\t#######################################################################\n\t\t# ** END OF YOUR CODE **\n\t\t#######################################################################", "def update_parameters(self):\n self.alignment_factor = rospy.get_param('/dyn_reconf/alignment_factor')\n self.cohesion_factor = rospy.get_param('/dyn_reconf/cohesion_factor')\n self.separation_factor = rospy.get_param('/dyn_reconf/separation_factor')\n self.avoid_factor = rospy.get_param('/dyn_reconf/avoid_factor')\n self.max_speed = rospy.get_param('/dyn_reconf/max_speed')\n self.max_force = rospy.get_param('/dyn_reconf/max_force')\n self.friction = rospy.get_param('/dyn_reconf/friction')\n self.crowd_radius = rospy.get_param('/dyn_reconf/crowd_radius')\n self.search_radius = rospy.get_param('/dyn_reconf/search_radius')\n\n rospy.loginfo(rospy.get_caller_id() + \" -> Parameters updated\")\n if DEBUG:\n print('alignment_factor: ', self.alignment_factor)\n print('cohesion_factor: ', self.cohesion_factor)\n print('separation_factor: ', self.separation_factor)\n print('avoid_factor: ', self.avoid_factor)\n print('max_speed: ', self.max_speed)\n print('max_force: ', self.max_force)\n print('friction: ', self.friction)\n print('crowd_radius: ', self.crowd_radius)\n print('search_radius: ', self.search_radius)", "def polarParams(pol, chord, cl_lin_method='leastsquare', DS_constants='OpenFAST', tau=None):\n # Return interpolant\n fPolar = pol.interpolant(variables=['cl','cd','cm','fs','cl_inv','cl_fs'], radians=True)\n\n p=dict()\n p['Polar'] = pol # backup\n p['fPolar'] = fPolar\n\n # Linear region\n linear_region = np.array([-5, 10])*np.pi/180\n Cl_slope, alpha_0 = pol.cl_linear_slope(window=linear_region, method=cl_lin_method, radians=True)\n #print('Cl_slope',Cl_slope, '[1/rad] - alpha_0', alpha_0*180/np.pi,'[deg]')\n\n p['alpha_0'] = alpha_0 # TODO HARMONIZATION WITH DS\n p['Cl_slope'] = Cl_slope # TODO HARMONIZATION WITH DS\n p['alpha_range'] = None\n p['alpha_range_lin'] = None\n\n # Dynamic stall\n p.update(dynstall_mhh_param_from_polar(pol, chord, constants=DS_constants))\n p.update(dynstall_oye_param_from_polar(pol, tau=tau)) # TODO\n return p", "def update_parameters(self):\n # We update gamma, gamma0, lambda and nu in turn (Bottolo et al, 2011)\n self.update_gamma()\n self.update_gamma0()\n self.update_lambda()\n self.update_nu()\n if self.sample_xi:\n self.update_xi()", "def updateRNGParam(self, dictParam):\n for key in dictParam:\n if key == 'tolerance':\n self.RNGtolerance = dictParam['tolerance']\n elif key == 'initialGridDisc':\n self.RNGInitDisc = dictParam['initialGridDisc']\n self._distribution.updateRNGparameter(self.RNGtolerance,self.RNGInitDisc)", "def update_param(param, param_dict, alg=\"IID_LINEAR\", prefix=\"\"):\n default_len = len(param.defaults)\n if param.defaults:\n for index, value in enumerate(reversed(param.args)):\n if value not in [\"self\", \"W\", \"method\", \"causal_matrix\", \"topology_matrix\"]:\n if index < default_len:\n p_value = list(reversed(param.defaults))[index]\n else:\n p_value = None\n if value is \"sem_type\":\n p_value = sem_type_set(\"sem_type\", alg)[0]\n param_dict.update({prefix + value: p_value})", "def _update_parameter(self, dWxh, dbh, dWhy, dby):\n # Add code to update all the weights and biases here", "def _set_leg_params(self):\n self.p = 0.01600\n self.q = 0.00000\n self.r = 0.02000\n self.c = 0.01811\n self.u = 0.00000\n self.v = 0.00000\n self.e = -0.06000\n self.h = -0.02820\n self.s = 0.02200\n self.d1 = 0.0\n self.d2 = 0.0\n self.d3 = 0.0\n self.stability = 0.0", "def updateParams(self,gradients):\n for i in xrange(len(self.params)):\n self.params[i].set_value(self.params[i].get_value()-gradients[i]/(1/self.learning_rate+self.iterations))", "def _build_param_dict(self, **kwargs):\n \n if 'correlation_strength' in kwargs.keys():\n\n correlation_strength = kwargs['correlation_strength']\n if custom_len(correlation_strength) > 1:\n try:\n self.correlation_strength_abcissa = kwargs['correlation_strength_abcissa']\n except KeyError:\n msg = (\"If correlation_strength keyword is passed to the constructor, \\n\" + \n \"you must also pass a correlation_strength_abcissa keyword argument \" + \n \"storing an array of the same length as correlation_strength.\")\n raise(msg)\n else:\n self.correlation_strength_abcissa = [0]\n correlation_strength = [correlation_strength]\n\n self._param_dict_keys = ['correlation_param' + str(i+1) for i in range(len(correlation_strength))]\n self.param_dict = {key:value for key, value in zip(self._param_dict_keys, correlation_strength)}\n else:\n self.param_dict = {'correlation_param1': 1.0}\n self._set_correlation_strength()", "def update_parameters(parameters, grads, learning_rate):\n pass", "def _update_params(self, gradients: dict, learning_rate: float):\n L = len(self.activations)\n\n for l in range(L):\n self.params[\"W_\" + str(l + 1)] = self.params[\"W_\" + str(l + 1)] - learning_rate * gradients[\n \"dW\" + str(l + 1)]\n\n self.params[\"b_\" + str(l + 1)] = self.params[\"b_\" + str(l + 1)] - learning_rate * gradients[\n \"db\" + str(l + 1)]", "def evaluate_reco_param(self):\n evals = self.input_binning['true_energy'].weighted_centers.magnitude\n n_e = len(self.input_binning['true_energy'].weighted_centers.magnitude)\n n_cz = len(self.input_binning['true_coszen'].weighted_centers.magnitude)\n eval_dict = deepcopy(self.param_dict)\n for flavintgroup, dim_dict in eval_dict.items():\n for dim, dist_list in dim_dict.items():\n for dist_prop_dict in dist_list:\n for dist_prop in dist_prop_dict.keys():\n if dist_prop == 'dist':\n continue\n if callable(dist_prop_dict[dist_prop]):\n func = dist_prop_dict[dist_prop]\n vals = func(evals)\n dist_prop_dict[dist_prop] =\\\n np.repeat(vals,n_cz).reshape((n_e,n_cz))\n elif isinstance(dist_prop_dict[dist_prop], dict):\n assert dist_prop == 'kwargs'\n for kwarg in dist_prop_dict['kwargs'].keys():\n func = dist_prop_dict['kwargs'][kwarg]\n vals = func(evals)\n dist_prop_dict['kwargs'][kwarg] =\\\n np.repeat(vals,n_cz).reshape((n_e,n_cz))\n # Now check for consistency, to not have to loop over all dict\n # entries again at a later point in time\n self.check_reco_dist_consistency(dist_list)\n return eval_dict", "def glcmProps(P, prop='contrast'):\n\n (num_level, num_level2, num_dist, num_angle) = P.shape\n assert num_level == num_level2\n assert num_dist > 0\n assert num_angle > 0\n\n # create weights for specified property\n I, J = np.ogrid[0:num_level, 0:num_level]\n if prop == 'contrast':\n weights = (I - J) ** 2\n elif prop in ['ASM', 'energy', 'correlation']:\n pass\n elif prop == 'mean':\n weights, _ = np.mgrid[0:num_level, 0:num_level]\n elif prop == 'dissimilarity':\n weights = np.abs(I - J)\n elif prop == 'homogeneity':\n weights = 1. / (1. + (I - J) ** 2)\n else:\n raise ValueError('%s is an invalid property' % (prop))\n\n # compute property for each GLCM\n if prop == 'energy':\n asm = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]\n results = np.sqrt(asm)\n elif prop == 'ASM':\n results = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]\n elif prop == 'correlation':\n results = np.zeros((num_dist, num_angle), dtype=np.float64)\n I = np.array(range(num_level)).reshape((num_level, 1, 1, 1))\n J = np.array(range(num_level)).reshape((1, num_level, 1, 1))\n diff_i = I - np.apply_over_axes(np.sum, (I * P), axes=(0, 1))[0, 0]\n diff_j = J - np.apply_over_axes(np.sum, (J * P), axes=(0, 1))[0, 0]\n\n std_i = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_i) ** 2),\n axes=(0, 1))[0, 0])\n std_j = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_j) ** 2),\n axes=(0, 1))[0, 0])\n cov = np.apply_over_axes(np.sum, (P * (diff_i * diff_j)),\n axes=(0, 1))[0, 0]\n\n # handle the special case of standard deviations near zero\n mask_0 = std_i < 1e-15\n mask_0[std_j < 1e-15] = True\n results[mask_0] = 1\n\n # handle the standard case\n mask_1 = mask_0 == False\n results[mask_1] = cov[mask_1] / (std_i[mask_1] * std_j[mask_1])\n elif prop in ['contrast', 'dissimilarity', 'homogeneity', 'mean']:\n weights = weights.reshape((num_level, num_level, 1, 1))\n results = np.apply_over_axes(np.sum, (P * weights), axes=(0, 1))[0, 0]\n\n return results", "def add_to_dict(param_dict):\n ### Sample - Int\n sample_s = param_dict['ml_args'].sample_s\n ### Sample - Mr\n sample_Mr = param_dict['ml_args'].sample_Mr\n ## Sample volume\n # Units (Mpc/h)**3\n volume_sample = { '18': 37820 / 0.01396,\n '19': 6046016.60311 ,\n '20': 2.40481e7 ,\n '21': 8.79151e7 }\n vol_mr = volume_sample[sample_s]\n ##\n ## Choice of Centrals and Satellites\n cens = int(1)\n sats = int(0)\n ## Other constants\n # Speed of light - In km/s\n speed_c = ac.c.to(u.km/u.s).value\n ## Number of CPU's to use\n cpu_number = int(cpu_count() * param_dict['cpu_frac'])\n ##\n ## Plotting constants\n plot_dict = { 'size_label':23,\n 'size_title':25,\n 'color_ham' :'red',\n 'color_dyn' :'blue'}\n ##\n ## Catalogue Prefix string\n catl_str_fig = param_dict['ml_args'].catl_alg_comp_fig_str()\n ##\n ## Saving to `param_dict`\n param_dict['sample_s' ] = sample_s\n param_dict['sample_Mr' ] = sample_Mr\n param_dict['vol_mr' ] = vol_mr\n param_dict['cens' ] = cens\n param_dict['sats' ] = sats\n param_dict['speed_c' ] = speed_c\n param_dict['cpu_number' ] = cpu_number\n param_dict['plot_dict' ] = plot_dict\n param_dict['catl_str_fig'] = catl_str_fig\n\n return param_dict", "def set_params(self, params: Dict) -> None:\n self.leak.set_g(params[\"g_leak\"])\n self.kvhh.set_g(params[\"g_kvhh\"])\n self.cav.set_g(params[\"g_cav\"])\n self.kca.set_g(params[\"g_kca\"])\n self.nap.set_g(params[\"g_nap\"])\n self.tau_ca = params[\"t_ca\"]", "def _update_params(self):\n with self.sphere.sphere_lock:\n self._args_to_params(self.sphere.bai_1d_args, self.bai_1d_pars)\n self._args_to_params(self.sphere.bai_2d_args, self.bai_2d_pars)\n #self._args_to_params(self.sphere.mg_args, self.mg_pars)", "def update_parameters(parameters, grads, learning_rate=0.01):\n # Retrieve each parameter from the dictionary \"parameters\"\n ### START CODE HERE ### (≈ 4 lines of code)\n W1 = parameters[\"W1\"]\n b1 = parameters[\"b1\"]\n W2 = parameters[\"W2\"]\n b2 = parameters[\"b2\"]\n W3 = parameters[\"W3\"]\n b3 = parameters[\"b3\"]\n ### END CODE HERE ###\n\n # Retrieve each gradient from the dictionary \"grads\"\n ### START CODE HERE ### (≈ 4 lines of code)\n dW1 = grads[\"dW1\"]\n db1 = grads[\"db1\"]\n dW2 = grads[\"dW2\"]\n db2 = grads[\"db2\"]\n dW3 = grads[\"dW3\"]\n db3 = grads[\"db3\"]\n ## END CODE HERE ###\n\n # Update rule for each parameter\n ### START CODE HERE ### (≈ 4 lines of code)\n W1 = W1 - (learning_rate * dW1)\n b1 = b1 - (learning_rate * db1)\n W2 = W2 - (learning_rate * dW2)\n b2 = b2 - (learning_rate * db2)\n W3 = W3 - (learning_rate * dW3)\n b3 = b3 - (learning_rate * db3)\n ### END CODE HERE ###\n\n parameters = {\"W1\": W1,\n \"b1\": b1,\n \"W2\": W2,\n \"b2\": b2,\n \"W3\": W3,\n \"b3\": b3}\n\n return parameters", "def update_distribution(self, gp_conn=None):\n results = self.results_from_db(gp_conn)\n\n for i in self.active_arms:\n try:\n self.arms_dict_params[i]['success'].append(results[i]['success'])\n self.arms_dict_params[i]['trials'].append(results[i]['trials'])\n self.arms_dict_params[i]['current_alpha'] += self.arms_dict_params[i]['success'][-1]\n self.arms_dict_params[i]['current_beta'] += self.arms_dict_params[i]['trials'][-1] - \\\n self.arms_dict_params[i]['success'][-1]\n except:\n # вот тут надо рэйзить ошибку\n self.arms_dict_params[i]['success'].append(0)\n self.arms_dict_params[i]['trials'].append(0)\n\n return self.arms_dict_params", "def update_param(self, lr):\n\n\n self.W=self.W-lr*self.W_grad\n self.b = self.b - lr*self.b_grad", "def updateParameters(self, paramDict):\n\n params = ['taux', 'mu', 'G', 'alpha_0', 'delta', 'p', 'I0', 'kparam']\n\n # Now set the parameters\n for k in paramDict.keys():\n mycode = 'self.' + k + \"=paramDict[\\'\" + k + \"\\']\"\n exec(mycode)", "def HC_update(self, GS_HC, hc_ro):\n\n # Backward inference using GS - HC connectivity (generative model).\n # Weighted mean of GS - HC connectivity, with HC estimate as weights.\n hc_fb = np.average(GS_HC, 0, hc_ro)\n hc_fb = hc_fb / hc_fb.sum()\n\n self.P = hc_fb * self.P\n self.pass_through_lateral_conn()", "def _update_parameters(self, curr_state, reward, next_state):\n phi = self._features.vector(curr_state)\n phi_dash = self._features.vector(next_state)\n\n self._A += np.outer(phi, (phi - self._gamma * phi_dash))\n self._b += reward * phi", "def update_parameters(parameters, grads, learning_rate):\n L = len(parameters) // 2\n\n for i in range(L):\n parameters[\"W\"+str(i+1)] = parameters[\"W\"+str(i+1)] - learning_rate * grads[\"dW\"+str(i+1)]\n parameters[\"b\"+str(i+1)] = parameters[\"b\"+str(i+1)] - learning_rate * grads[\"db\"+str(i+1)]\n\n return parameters", "def update(self, newparams):\n for k, v in list(newparams.items()):\n if k in self.basis_params:\n # Make sure parameter is in dict, and check if it changed\n if k not in self.params:\n self.basis_dirty = True\n self.params[k] = v\n if np.any(v != self.params.get(k)):\n self.basis_dirty = True\n else:\n try:\n # here the sps.params.dirtiness should increase to 2 if\n # there was a change\n self.ssp.params[k] = v[0]\n except KeyError:\n pass\n # now update params\n self.params[k] = np.copy(np.atleast_1d(v))\n # if we changed only csp_params but are relying on COMPSP, make\n # sure we remake the basis\n if self.safe and (self.ssp.params.dirtiness == 1):\n self.basis_dirty = True\n # if we changed only csp_params propagate them through but don't\n # force basis remake (unless basis_dirty)\n if self.ssp.params.dirtiness == 1:\n self.ssp._update_params()\n\n if self.basis_dirty | (self.ssp.params.dirtiness == 2):\n self.build_basis()", "def update_parameters(parameters, grads, learning_rate = 1.2):\n\t# Retrieve each parameter from the dictionary \"parameters\"\n\tW1 = parameters['W1']\n\tb1 = parameters['b1']\n\tW2 = parameters['W2']\n\tb2 = parameters['b2']\n\n\t# Retrieve each gradient from the dictionary \"grads\"\n\tdW1 = grads['dW1']\n\tdb1 = grads['db1']\n\tdW2 = grads['dW2']\n\tdb2 = grads['db2']\n\n\t# Update rule for each parameter\n\tW1 = W1 - learning_rate*dW1\n\tb1 = b1 - learning_rate*db1\n\tW2 = W2 - learning_rate*dW2\n\tb2 = b2 - learning_rate*db2\n\n\tparameters = {\"W1\": W1,\n\t\t\t\t\t\"b1\": b1,\n\t\t\t\t\t\"W2\": W2,\n\t\t\t\t\t\"b2\": b2}\n\n\treturn parameters", "def update_parameters_with_gd(parameters, grads, learning_rate):\n\n L = len(parameters) // 2 # number of layers in the neural networks\n\n # Update rule for each parameter\n for l in range(L):\n ### START CODE HERE ### (approx. 2 lines)\n parameters[\"W\" + str(l+1)] = parameters[\"W\" + str(l+1)]-learning_rate* grads[\"dW\" + str(l+1)]\n parameters[\"b\" + str(l+1)] = parameters[\"b\" + str(l+1)]-learning_rate* grads[\"db\" + str(l+1)]\n ### END CODE HERE ###\n \n return parameters", "def update_arm_parameters(self, arm_intuition, arm_selection, success):\n if success:\n self.alpha_params[arm_intuition, arm_selection] += 1\n else:\n self.beta_params[arm_intuition, arm_selection] += 1", "def set_params(self, params: Dict) -> None:\n self.leak.set_g(params['g_leak'])\n self.nav.set_g(params['g_nav'])\n self.kvhh.set_g(params['g_kvhh'])\n self.kva.set_g(params['g_kva'])\n self.kvsi.set_g(params['g_kvsi'])\n self.cav.set_g(params['g_cav'])\n self.kca.set_g(params['g_kca'])\n self.nap.set_g(params['g_nap'])\n self.kir.set_g(params['g_kir'])\n self.ampar.set_g(params['g_ampar'])\n self.nmdar.set_g(params['g_nmdar'])\n self.gabar.set_g(params['g_gabar'])\n self.tau_ca = params['t_ca']", "def update_distr_and_return_proba(self, gp_conn=None): \n\n results = self.results_from_db(gp_conn)\n self.arms_probability = dict.fromkeys(self.active_arms)\n\n for i in self.active_arms:\n try:\n self.arms_dict_params[i]['success'].append(results[i]['success'])\n self.arms_dict_params[i]['trials'].append(results[i]['trials'])\n self.arms_dict_params[i]['current_alpha'] += self.arms_dict_params[i]['success'][-1]\n self.arms_dict_params[i]['current_beta'] += self.arms_dict_params[i]['trials'][-1] - \\\n self.arms_dict_params[i]['success'][-1]\n except:\n # вот тут надо рэйзить ошибку\n self.arms_dict_params[i]['success'].append(0)\n self.arms_dict_params[i]['trials'].append(0)\n\n self.arms_probability[i] = random.betavariate(self.arms_dict_params[i]['current_alpha'],\n self.arms_dict_params[i]['current_beta'])\n self.arms_dict_params[i]['probability'].append(self.arms_probability[i])\n\n return self.arms_dict_params, self.arms_probability", "def update_params(self, optim, lr):\n\n for module in self.modules:\n if isinstance(module, Layer):\n module._update_params(optim, lr)", "def set_params(self, dic):\n if dic is not None:\n for key, val in zip(dic.keys(), dic.values()):\n if key in self.__dict__.keys():\n self.__dict__[key] = val\n\n if 'scale_params' in self.__dict__.keys():\n self.scale_params.set_params(dic)\n if 'atmospheric_params' in self.__dict__.keys():\n if self.atmospheric_params is not None:\n self.atmospheric_params.set_params(dic)\n\n if 'atemperature_params' in self.__dict__.keys():\n if self.atemperature_params is not None:\n self.atemperature_params.set_params(dic)\n\n if 'oceanic_params' in self.__dict__.keys():\n if self.oceanic_params is not None:\n self.oceanic_params.set_params(dic)\n\n if 'ground_params' in self.__dict__.keys():\n if self.ground_params is not None:\n self.ground_params.set_params(dic)\n\n if 'otemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n self.gotemperature_params.set_params(dic)\n\n if 'gtemperature_params' in self.__dict__.keys():\n if self.gotemperature_params is not None:\n self.gotemperature_params.set_params(dic)", "def update_parameters(self, learning_rate):\n for i in range(self.L - 1):\n self.W[i] -= learning_rate * self.dW[i]\n self.b[i] -= learning_rate * self.db[i]", "def _update_module_kl_coeff(\n self, module_id: ModuleID, hps: AppoLearnerHyperparameters, sampled_kl: float\n ) -> Mapping[str, Any]:", "def update_params(self, learning_rate):\n\t\t#######################################################################\n\t\t# ** START OF YOUR CODE **\n\t\t#######################################################################\n\n\t\tfor layer in self._layers:\n\t\t\tlayer.update_params(learning_rate)\n\n\t\t#######################################################################\n\t\t# ** END OF YOUR CODE **\n\t\t#######################################################################", "def update(self, parameters):\n self.set_frequencies(parameters) # f_i\n self.set_coupling_weights(parameters) # w_ij\n self.set_phase_bias(parameters) # theta_i\n self.set_amplitudes_rate(parameters) # a_i\n self.set_nominal_amplitudes(parameters) # R_i", "def prior_param(self, param_dict={}): \n self.param_obj = Params(param_dict) # parameter object \n self.param_names = param_dict.keys() \n self.n_params = len(param_dict.keys()) # number of parameters in theta ", "def update_parameters(\n model_param: Dict[str, Union[float, List[float]]]\n ) -> Dict[str, float]:\n\n updated_param = {}\n\n for i, _ in enumerate(model_param[\"teff\"]):\n updated_param[f\"teff_{i}\"] = model_param[\"teff\"][i]\n updated_param[f\"radius_{i}\"] = model_param[\"radius\"][i]\n\n if \"parallax\" in model_param:\n updated_param[\"parallax\"] = model_param[\"parallax\"]\n elif \"distance\" in model_param:\n updated_param[\"distance\"] = model_param[\"distance\"]\n\n return updated_param", "def update_params(self, learning_rate=0.1):\n\n self.params['W'] = self.params['W'] - learning_rate * self.dW # update weights\n self.params['b'] = self.params['b'] - learning_rate * self.db # update bias(es)", "def update_parameters(parameters: Dict,\n grads: Dict, learning_rate: float) -> Dict:\n L = len(parameters)//2 # number of layers\n\n for l in range(1, L+1):\n parameters['W'+str(l)] -= learning_rate * grads['dW'+str(l)]\n parameters['b'+str(l)] -= learning_rate * grads['db'+str(l)]\n\n return parameters", "def change_params(self, dict, node=None, comp=None):\n\n if comp is None:\n for param, val in dict.items():\n self.change_general_param(param, val)\n else:\n for param, val in dict.items():\n self.change_param(node, comp, param, val)", "def update_probabilities(self):\n self.probabilities = self.pheromones**self.EXP_PH * self.mcv**self.EXP_MCV", "def updateParameters(self, parameters):\r\n\t\tin_wikiplace_IRI = parameters[0]\r\n\t\tin_relation_degree = parameters[1]\r\n\t\tin_first_property_dir = parameters[2]\r\n\t\tin_first_property = parameters[3]\r\n\t\tin_second_property_dir = parameters[4]\r\n\t\tin_second_property = parameters[5]\r\n\t\tin_third_property_dir = parameters[6]\r\n\t\tin_third_property = parameters[7]\r\n\t\tin_fourth_property_dir = parameters[8]\r\n\t\tin_fourth_property = parameters[9]\r\n\t\tout_location = parameters[10]\r\n\t\tout_table_name = parameters[11]\r\n\t\tout_points_name = parameters[12]\r\n\r\n\t\t\r\n\r\n\t\tif in_relation_degree.altered:\r\n\t\t\trelationDegree = int(in_relation_degree.valueAsText)\r\n\t\t\tif relationDegree == 1:\r\n\t\t\t\tin_first_property.enabled = True\r\n\t\t\t\tin_first_property_dir.enabled = True\r\n\t\t\t\tin_second_property.enabled = False\r\n\t\t\t\tin_second_property_dir.enabled = False\r\n\t\t\t\tin_third_property.enabled = False\r\n\t\t\t\tin_third_property_dir.enabled = False\r\n\t\t\t\tin_fourth_property.enabled = False\r\n\t\t\t\tin_fourth_property_dir.enabled = False\r\n\t\t\telif relationDegree == 2:\r\n\t\t\t\tin_first_property.enabled = True\r\n\t\t\t\tin_first_property_dir.enabled = True\r\n\t\t\t\tin_second_property.enabled = True\r\n\t\t\t\tin_second_property_dir.enabled = True\r\n\t\t\t\tin_third_property.enabled = False\r\n\t\t\t\tin_third_property_dir.enabled = False\r\n\t\t\t\tin_fourth_property.enabled = False\r\n\t\t\t\tin_fourth_property_dir.enabled = False\r\n\t\t\telif relationDegree == 3:\r\n\t\t\t\tin_first_property.enabled = True\r\n\t\t\t\tin_first_property_dir.enabled = True\r\n\t\t\t\tin_second_property.enabled = True\r\n\t\t\t\tin_second_property_dir.enabled = True\r\n\t\t\t\tin_third_property.enabled = True\r\n\t\t\t\tin_third_property_dir.enabled = True\r\n\t\t\t\tin_fourth_property.enabled = False\r\n\t\t\t\tin_fourth_property_dir.enabled = False\r\n\t\t\telif relationDegree == 4:\r\n\t\t\t\tin_first_property.enabled = True\r\n\t\t\t\tin_first_property_dir.enabled = True\r\n\t\t\t\tin_second_property.enabled = True\r\n\t\t\t\tin_second_property_dir.enabled = True\r\n\t\t\t\tin_third_property.enabled = True\r\n\t\t\t\tin_third_property_dir.enabled = True\r\n\t\t\t\tin_fourth_property.enabled = True\r\n\t\t\t\tin_fourth_property_dir.enabled = True\r\n\t\t\r\n\t\t\tif in_wikiplace_IRI.value:\r\n\t\t\t\tinputFeatureClassName = in_wikiplace_IRI.valueAsText\r\n\t\t\t\tlastIndexOFGDB = inputFeatureClassName.rfind(\"\\\\\")\r\n\t\t\t\tfeatureClassName = inputFeatureClassName[(lastIndexOFGDB+1):]\r\n\t\t\t\tcurrentWorkspace = inputFeatureClassName[:lastIndexOFGDB]\r\n\r\n\t\t\t\tarcpy.env.workspace = currentWorkspace\r\n\t\t\t\tout_location.value = currentWorkspace\r\n\r\n\t\t\t\tout_table_name.value = featureClassName + \"PathQueryTripleStore\"\r\n\r\n\t\t\t\tout_points_name.value = featureClassName + \"PathQueryLocation\"\r\n\r\n\r\n\t\t\t\toutLocation = out_location.valueAsText\r\n\t\t\t\toutTableName = out_table_name.valueAsText\r\n\t\t\t\toutputTableName = os.path.join(outLocation,outTableName)\r\n\t\t\t\tif arcpy.Exists(outputTableName):\r\n\t\t\t\t\tarcpy.AddError(\"The output table already exists in current workspace!\")\r\n\t\t\t\t\traise arcpy.ExecuteError\r\n\r\n\t\t\t\toutFeatureClassName = out_points_name.valueAsText\r\n\t\t\t\toutputFeatureClassName = os.path.join(outLocation,outFeatureClassName)\r\n\t\t\t\tif arcpy.Exists(outputFeatureClassName):\r\n\t\t\t\t\tarcpy.AddError(\"The output Feature Class already exists in current workspace!\")\r\n\t\t\t\t\traise arcpy.ExecuteError\r\n\r\n\r\n\t\t\t\t# get all the IRI from input point feature class of wikidata places\r\n\t\t\t\tinplaceIRIList = []\r\n\t\t\t\tcursor = arcpy.SearchCursor(inputFeatureClassName)\r\n\t\t\t\tfor row in cursor:\r\n\t\t\t\t\tinplaceIRIList.append(row.getValue(\"URL\"))\r\n\r\n\r\n\r\n\t\t\t\t# get the first property URL list and label list\r\n\t\t\t\tif in_first_property_dir.value:\r\n\t\t\t\t\tfristDirection = in_first_property_dir.valueAsText\r\n\t\t\t\t\t# get the first property URL list\r\n\t\t\t\t\tfirstPropertyURLListJsonBindingObject = SPARQLQuery.relFinderCommonPropertyQuery(inplaceIRIList, relationDegree, [fristDirection], [\"\", \"\", \"\"])\r\n\t\t\t\t\tfirstPropertyURLList = []\r\n\t\t\t\t\tfor jsonItem in firstPropertyURLListJsonBindingObject:\r\n\t\t\t\t\t\tfirstPropertyURLList.append(jsonItem[\"p1\"][\"value\"])\r\n\r\n\t\t\t\t\tfirstPropertyLabelJSON = SPARQLQuery.locationCommonPropertyLabelQuery(firstPropertyURLList)\r\n\t\t\t\t\t# firstPropertyLabelJSON = firstPropertyLabelJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t# get the first property label list\r\n\t\t\t\t\tfirstPropertyURLList = []\r\n\t\t\t\t\tfirstPropertyLabelList = []\r\n\t\t\t\t\tfor jsonItem in firstPropertyLabelJSON:\r\n\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\tfirstPropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\tpropertyName = jsonItem[\"propertyLabel\"][\"value\"]\r\n\t\t\t\t\t\tfirstPropertyLabelList.append(propertyName)\r\n\r\n\t\t\t\t\tRelFinder.firstPropertyLabelURLDict = dict(zip(firstPropertyLabelList, firstPropertyURLList))\r\n\r\n\t\t\t\t\tin_first_property.filter.list = firstPropertyLabelList\r\n\r\n\t\t\t\t\t# get the second property URL list and label list\r\n\t\t\t\t\tif in_second_property_dir.value:\r\n\t\t\t\t\t\tfristDirection = in_first_property_dir.valueAsText\r\n\t\t\t\t\t\tfirstProperty = in_first_property.valueAsText\r\n\r\n\t\t\t\t\t\tif firstProperty == None:\r\n\t\t\t\t\t\t\tfirstProperty = \"\"\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\tfirstProperty = RelFinder.firstPropertyLabelURLDict[firstProperty]\r\n\r\n\t\t\t\t\t\tsecondDirection = in_second_property_dir.valueAsText\r\n\t\t\t\t\t\t\r\n\t\t\t\t\t\t# get the second property URL list\r\n\t\t\t\t\t\tsecondPropertyURLListJsonBindingObject = SPARQLQuery.relFinderCommonPropertyQuery(inplaceIRIList, relationDegree, [fristDirection, secondDirection], [firstProperty, \"\", \"\"])\r\n\t\t\t\t\t\tsecondPropertyURLList = []\r\n\t\t\t\t\t\tfor jsonItem in secondPropertyURLListJsonBindingObject:\r\n\t\t\t\t\t\t\tsecondPropertyURLList.append(jsonItem[\"p2\"][\"value\"])\r\n\r\n\t\t\t\t\t\tsecondPropertyLabelJSON = SPARQLQuery.locationCommonPropertyLabelQuery(secondPropertyURLList)\r\n\t\t\t\t\t\t# secondPropertyLabelJSON = secondPropertyLabelJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\t# get the second property label list\r\n\t\t\t\t\t\tsecondPropertyURLList = []\r\n\t\t\t\t\t\tsecondPropertyLabelList = []\r\n\t\t\t\t\t\tfor jsonItem in secondPropertyLabelJSON:\r\n\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\tsecondPropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\tpropertyName = jsonItem[\"propertyLabel\"][\"value\"]\r\n\t\t\t\t\t\t\tsecondPropertyLabelList.append(propertyName)\r\n\r\n\t\t\t\t\t\tRelFinder.secondPropertyLabelURLDict = dict(zip(secondPropertyLabelList, secondPropertyURLList))\r\n\r\n\t\t\t\t\t\tin_second_property.filter.list = secondPropertyLabelList\r\n\r\n\t\t\t\t\t\t# get the third property URL list and label list\r\n\t\t\t\t\t\tif in_third_property_dir.value:\r\n\t\t\t\t\t\t\tfristDirection = in_first_property_dir.valueAsText\r\n\t\t\t\t\t\t\tfirstProperty = in_first_property.valueAsText\r\n\r\n\t\t\t\t\t\t\tsecondDirection = in_second_property_dir.valueAsText\r\n\t\t\t\t\t\t\tsecondProperty = in_second_property.valueAsText\r\n\r\n\t\t\t\t\t\t\tif firstProperty == None:\r\n\t\t\t\t\t\t\t\tfirstProperty = \"\"\r\n\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\tfirstProperty = RelFinder.firstPropertyLabelURLDict[firstProperty]\r\n\t\t\t\t\t\t\tif secondProperty == None:\r\n\t\t\t\t\t\t\t\tsecondProperty = \"\"\r\n\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\tsecondProperty = RelFinder.secondPropertyLabelURLDict[secondProperty]\r\n\r\n\t\t\t\t\t\t\tthirdDirection = in_third_property_dir.valueAsText\r\n\t\t\t\t\t\t\t\r\n\t\t\t\t\t\t\t# get the third property URL list\r\n\t\t\t\t\t\t\tthirdPropertyURLListJsonBindingObject = SPARQLQuery.relFinderCommonPropertyQuery(inplaceIRIList, relationDegree, [fristDirection, secondDirection, thirdDirection], [firstProperty, secondProperty, \"\"])\r\n\t\t\t\t\t\t\tthirdPropertyURLList = []\r\n\t\t\t\t\t\t\tfor jsonItem in thirdPropertyURLListJsonBindingObject:\r\n\t\t\t\t\t\t\t\tthirdPropertyURLList.append(jsonItem[\"p3\"][\"value\"])\r\n\r\n\t\t\t\t\t\t\tthirdPropertyLabelJSON = SPARQLQuery.locationCommonPropertyLabelQuery(thirdPropertyURLList)\r\n\t\t\t\t\t\t\t# thirdPropertyLabelJSON = thirdPropertyLabelJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\t\t# get the third property label list\r\n\t\t\t\t\t\t\tthirdPropertyURLList = []\r\n\t\t\t\t\t\t\tthirdPropertyLabelList = []\r\n\t\t\t\t\t\t\tfor jsonItem in thirdPropertyLabelJSON:\r\n\t\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\t\tthirdPropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\t\tpropertyName = jsonItem[\"propertyLabel\"][\"value\"]\r\n\t\t\t\t\t\t\t\tthirdPropertyLabelList.append(propertyName)\r\n\r\n\t\t\t\t\t\t\tRelFinder.thirdPropertyLabelURLDict = dict(zip(thirdPropertyLabelList, thirdPropertyURLList))\r\n\r\n\t\t\t\t\t\t\tin_third_property.filter.list = thirdPropertyLabelList\r\n\r\n\t\t\t\t\t\t\t# get the fourth property URL list and label list\r\n\t\t\t\t\t\t\tif in_fourth_property_dir.value:\r\n\t\t\t\t\t\t\t\tfristDirection = in_first_property_dir.valueAsText\r\n\t\t\t\t\t\t\t\tfirstProperty = in_first_property.valueAsText\r\n\r\n\t\t\t\t\t\t\t\tsecondDirection = in_second_property_dir.valueAsText\r\n\t\t\t\t\t\t\t\tsecondProperty = in_second_property.valueAsText\r\n\r\n\t\t\t\t\t\t\t\tthirdDirection = in_third_property_dir.valueAsText\r\n\t\t\t\t\t\t\t\tthirdProperty = in_third_property.valueAsText\r\n\r\n\t\t\t\t\t\t\t\tif firstProperty == None:\r\n\t\t\t\t\t\t\t\t\tfirstProperty = \"\"\r\n\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\tfirstProperty = RelFinder.firstPropertyLabelURLDict[firstProperty]\r\n\t\t\t\t\t\t\t\tif secondProperty == None:\r\n\t\t\t\t\t\t\t\t\tsecondProperty = \"\"\r\n\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\tsecondProperty = RelFinder.secondPropertyLabelURLDict[secondProperty]\r\n\t\t\t\t\t\t\t\tif thirdProperty == None:\r\n\t\t\t\t\t\t\t\t\tthirdProperty = \"\"\r\n\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\tthirdProperty = RelFinder.thirdPropertyLabelURLDict[thirdProperty]\r\n\r\n\t\t\t\t\t\t\t\tfourthDirection = in_fourth_property_dir.valueAsText\r\n\t\t\t\t\t\t\t\t\r\n\t\t\t\t\t\t\t\t# get the fourth property URL list\r\n\t\t\t\t\t\t\t\tfourthPropertyURLListJsonBindingObject = SPARQLQuery.relFinderCommonPropertyQuery(inplaceIRIList, relationDegree, [fristDirection, secondDirection, thirdDirection, fourthDirection], [firstProperty, secondProperty, thirdProperty])\r\n\t\t\t\t\t\t\t\tfourthPropertyURLList = []\r\n\t\t\t\t\t\t\t\tfor jsonItem in fourthPropertyURLListJsonBindingObject:\r\n\t\t\t\t\t\t\t\t\tfourthPropertyURLList.append(jsonItem[\"p4\"][\"value\"])\r\n\r\n\t\t\t\t\t\t\t\tfourthPropertyLabelJSON = SPARQLQuery.locationCommonPropertyLabelQuery(fourthPropertyURLList)\r\n\t\t\t\t\t\t\t\t# fourthPropertyLabelJSON = fourthPropertyLabelJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\t\t\t# get the fourth property label list\r\n\t\t\t\t\t\t\t\tfourthPropertyURLList = []\r\n\t\t\t\t\t\t\t\tfourthPropertyLabelList = []\r\n\t\t\t\t\t\t\t\tfor jsonItem in fourthPropertyLabelJSON:\r\n\t\t\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\t\t\tfourthPropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\t\t\tpropertyName = jsonItem[\"propertyLabel\"][\"value\"]\r\n\t\t\t\t\t\t\t\t\tfourthPropertyLabelList.append(propertyName)\r\n\r\n\t\t\t\t\t\t\t\tRelFinder.fourthPropertyLabelURLDict = dict(zip(fourthPropertyLabelList, fourthPropertyURLList))\r\n\r\n\t\t\t\t\t\t\t\tin_fourth_property.filter.list = fourthPropertyLabelList\r\n\r\n\r\n\t\treturn", "def _update_params(self, perf_params, loop_info):\n for vartype in list(perf_params.keys()):\n for var in perf_params[vartype]:\n self.tspec_params['performance_params'][var] = \\\n self.indent + 'param %s[] = %s;\\t#%s\\n' % (var, repr(default_perf_params[vartype]), vartype)\n\n #loop_info.vars: set of input vars", "def _update(self):\n self.all_params = {}\n self._update_experiment_params()\n self._update_preprocessing_params()\n self._update_model_params()", "def init_P_PHM_GIVEN_PHI():\n global P_PHM_GIVEN_PHI\n for i in INTERFACE_LEVEL_ACTIONS: # ui\n P_PHM_GIVEN_PHI[i] = collections.OrderedDict()\n for j in INTERFACE_LEVEL_ACTIONS: # um\n if i == j:\n # try to weight the true command more for realistic purposes. Can be offset by using a high UM_GIVEN_UI_NOISE\n P_PHM_GIVEN_PHI[i][j] = 1.0\n else:\n # P_PHM_GIVEN_PHI[i][j] = np.random.random()*UM_GIVEN_UI_NOISE#IF UM_GIVEN_UI_NOISE is 0, then the p(um|ui) is a deterministic mapping\n P_PHM_GIVEN_PHI[i][j] = 0.0\n\n delta_dist = np.array(P_PHM_GIVEN_PHI[i].values())\n uniform_dist = (1.0 / len(INTERFACE_LEVEL_ACTIONS)) * np.ones(len(INTERFACE_LEVEL_ACTIONS))\n blended_dist = (1 - PHM_GIVEN_PHI_NOISE) * delta_dist + PHM_GIVEN_PHI_NOISE * uniform_dist # np.array\n for index, j in enumerate(INTERFACE_LEVEL_ACTIONS):\n P_PHM_GIVEN_PHI[i][j] = blended_dist[index]", "def update_parameters(self, ob_no, hidden, ac_na, fixed_log_probs, q_n, adv_n):\n self.update_critic(ob_no, hidden, q_n)\n self.update_policy(ob_no, hidden, ac_na, fixed_log_probs, adv_n)", "def updateConfBlendWeights(percent):\n global confWeight\n confWeight = float(percent)/100.0", "def _set_primary_behaviors(self):\n\n for component_model in self.model_dictionary.values():\n gal_type = component_model.gal_type\n feature_name = component_model.feature_name\n\n try:\n component_model_galprop_dtype = component_model._galprop_dtypes_to_allocate\n except AttributeError:\n component_model_galprop_dtype = np.dtype([])\n\n methods_to_inherit = list(set(\n component_model._methods_to_inherit))\n\n for methodname in methods_to_inherit:\n new_method_name = methodname + '_' + gal_type\n new_method_behavior = self._update_param_dict_decorator(\n component_model, methodname)\n setattr(self, new_method_name, new_method_behavior)\n setattr(getattr(self, new_method_name), \n '_galprop_dtypes_to_allocate', component_model_galprop_dtype)\n setattr(getattr(self, new_method_name), 'gal_type', gal_type)\n setattr(getattr(self, new_method_name), 'feature_name', feature_name)\n\n attrs_to_inherit = list(set(\n component_model._attrs_to_inherit))\n for attrname in attrs_to_inherit:\n new_attr_name = attrname + '_' + gal_type\n attr = getattr(component_model, attrname)\n setattr(self, new_attr_name, attr)\n\n # Repeatedly overwrite self.threshold \n # This is harmless provided that all gal_types are ensured to have the same threshold, \n # which is guaranteed by the _test_dictionary_consistency method\n if hasattr(component_model, 'threshold'):\n setattr(self, 'threshold_' + gal_type, component_model.threshold)\n self.threshold = getattr(self, 'threshold_' + gal_type)", "def explore(self):\n for k, v in self._hyperparameters.items():\n mutation = random.choice([0.8, 1.2])\n self._hyperparameters[k] = mutation * v", "def change_parameters(self,params):\n no_of_params = 0\n for core_param in range(len(self.q)):\n for approx_param in range(self.q[core_param].param_no):\n self.q[core_param].vi_change_param(approx_param, params[no_of_params])\n no_of_params += 1", "def set_calculated_shape_params(self, coincident=True, p_cm=0.,\n q_cm=0., inc_deg=0.):\n if self.FCYL and self.CYL_ANG == 90.0:\n # sagittal curvature\n self.F_EXT=1\n\n # RADIUS = (2 F1 F2 sin (theta)) / (F1+F2)\n if coincident:\n p_cm = self.T_SOURCE\n q_cm = self.T_IMAGE\n inc_deg = self.T_REFLECTION\n\n self.RMIRR = ( (2 * p_cm * q_cm) / (p_cm + q_cm) ) * math.sin(math.radians(90-inc_deg))\n else:\n self.F_EXT=0\n if coincident:\n self.set_auto_focus(f_default=1)\n else:\n self.set_auto_focus(f_default=0, ssour=p_cm,\n simag=q_cm, theta=inc_deg)", "def cell_params(x,y,**kwargs):\n\n GR = glo.global_results()\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n cloudy_library = clo.library()\n lookup_table = cloudy_library._restore_lookup_table()\n lookup_table['logG0s'] = lookup_table['logFUVs']\n if x == 'NH': \n x_cloudy,R_NIR_FUV_cl = aux.get_NH_from_cloudy()\n else:\n x_cloudy = np.unique(lookup_table['log'+x+'s'])\n if y == 'NH': \n y_cloudy,R_NIR_FUV_cl = aux.get_NH_from_cloudy()\n else:\n y_cloudy = np.unique(lookup_table['log'+y+'s'])\n\n if not p.ylim:\n p.ylim = [1e-3,30]\n if not p.xlim:\n p.xlim = [1e-7,1e3]\n \n # SELECT GALAXIES\n rand_gal_index = np.random.randint(0, GR.N_gal, size=(p.bins))\n if p.bins == GR.N_gal: rand_gal_index = np.arange(GR.N_gal)\n if p.gal_index: \n rand_gal_index = [p.gal_index]\n print(rand_gal_index)\n xs = np.array([])\n ys = np.array([])\n m_tot,m_encomp,m_y0 = 0,0,0\n for gal_index in rand_gal_index:\n print(gal_index)\n gal_ob = gal.galaxy(gal_index)\n df = gal_ob.cell_data.get_dataframe()\n df['nSFR'] = df.nSFR.values#/(0.2**3)\n #df['nSFR'] = df['SFR_density']\n #df['NH'] = 10.**df['NH']\n x1 = df[x].values\n y1 = df[y].values\n x1[x1 <= p.xlim[0]] = p.xlim[0]\n y1[y1 <= p.ylim[0]] = p.ylim[0]\n m_tot += np.sum(df.m.values)\n m_encomp += np.sum(df.m[(x1>=p.xlim[0]) & (y1>=p.ylim[0])].values)\n m_y0 += np.sum(df.m[(y1 == 0)].values)\n #print(x,x1.min(),x1.max())\n #print(y,y1.min(),y1.max())\n ys = np.append(ys,y1[(x1>=p.xlim[0]) & (y1>=p.ylim[0])])\n xs = np.append(xs,x1[(x1>=p.xlim[0]) & (y1>=p.ylim[0])])\n print('Min max of %s:' % x)\n print(xs.min(),xs.max())\n print('Min max of %s:' % y)\n print(ys.min(),ys.max())\n fig,ax = plt.subplots(figsize=(10,8))\n hb = ax.hexbin(xs,ys,xscale='log',yscale='log',bins='log',mincnt=1,lw=None,gridsize=50,cmap='inferno')\n cb = fig.colorbar(hb, ax=ax)\n cb.set_label('Number of cells in %i galaxies' % len(rand_gal_index))\n ax.set_xlabel(getlabel(x))\n ax.set_ylabel(getlabel(y))\n print('Total gas mass fraction encompassed: %.4f%%' % (m_encomp/m_tot*100))\n print('Total gas mass fraction with y = 0: %.4f%%' % (m_y0/m_tot*100))\n ax.set_xlim(p.xlim)\n ax.set_ylim(p.ylim)\n # Overplot Cloudy grid params\n print(x,x_cloudy)\n print(y,y_cloudy)\n for x1 in x_cloudy:\n ax.plot([10**x1,10**x1],ax.get_ylim(),'-',color='white',alpha=0.7)\n ax.plot([10**x1,10**x1],ax.get_ylim(),'--k',alpha=0.7)\n for y1 in y_cloudy:\n ax.plot(ax.get_xlim(),[10.**y1,10.**y1],'-',color='white',alpha=0.7)\n ax.plot(ax.get_xlim(),[10.**y1,10.**y1],'--k',alpha=0.7)\n\n if not os.path.isdir(p.d_plot + 'cell_data/'): os.mkdir(p.d_plot + 'cell_data/') \n plt.savefig('plots/cell_data/%s%s_cell_params_%s_%s_%s.png' % (p.sim_name,p.sim_run,p.z1,x,y),dpi=250, facecolor='w')", "def _set_model_parameters(self, verbose=False):\n from scipy.special import gamma\n\n z0 = self.z0\n\n # set parameters that are constants\n p_v, d_v, cs0, sigma, vout0 = (1, 2, 6.7, 0.1, 25.0)\n p_vB, d_vB, Mach0, p_M, d_M = (4, 2, 0.5, 1, 3)\n\n # calculate amplitudes that make the pdf integrate to 1\n A_v = np.log(10)*p_v/gamma(d_v/p_v)\n A_cs = np.log(10)/np.sqrt(2*np.pi)/sigma\n A_vB = np.log(10)*p_vB/gamma(d_vB/p_vB)\n A_M = np.log(10)*p_M/gamma(d_M/p_M)\n\n # store them in dictionaries\n self.cool_params = dict(A_v=A_v, p_v=p_v, d_v=d_v,\n A_cs=A_cs, cs0=cs0, sigma=sigma, vout0=vout0)\n self.hot_params = dict(A_vB=A_vB, p_vB=p_vB, d_vB=d_vB,\n A_M=A_M, Mach0=Mach0,p_M=p_M,d_M=d_M)\n # SN related parameters that set the reference values for loading factors\n self.params = dict(Esn=1.e51*au.erg, mstar=95.5*au.M_sun, vcool=200*au.km/au.s,\n Mej=10.*au.M_sun, ZSN=0.2, ZISM0=0.02)\n self.params['vej'] = np.sqrt(2.0*self.params['Esn']/self.params['Mej']).to('km/s')\n self.ref_params = dict(Mref=self.params['mstar'],\n pref=self.params['Esn']/(2*self.params['vcool']),\n Eref=self.params['Esn'],\n Zref=self.params['Mej']*self.params['ZSN'])\n\n # coefficients used in conversion from mass to other PDFs\n self.vp = (self.ref_params['pref']/self.params['mstar']).to('km/s').value\n self.vE = np.sqrt(self.ref_params['Eref']/self.params['mstar']).to('km/s').value\n self.Ze = (self.ref_params['Zref']/self.params['mstar']).cgs.value\n\n # parameters for scaling relations from Paper~I\n a = np.array(fit_alpha[z0])\n b = np.array(fit_beta[z0])\n\n self.scaling_params = dict(a=a, b=b)\n if z0 == '2H':\n self.cool_params['vout0'] = 45\n self.cool_params['cs0'] = 7.5\n elif z0 == '500':\n self.cool_params['vout0'] = 45\n self.cool_params['cs0'] = 8.5\n elif z0 == '1000':\n self.cool_params['vout0'] = 60\n self.cool_params['cs0'] = 10.0\n self.scaling_params['A'] = np.round(10.**(np.array(self.scaling_params['a'])),2)\n self.scaling_params['p'] = 1.+np.array(self.scaling_params['b'])\n self.enum=dict(M_cool=0, M_int=1, M_hot=2, M_total=3,\n p_cool=4, p_int=5, p_hot=6, p_total=7,\n E_cool=8, E_int=9, E_hot=10, E_total=11,\n Z_cool=12, Z_int=13, Z_hot=14, Z_total=15)\n\n # print parameters\n if verbose:\n self.show_parameters()", "def init_params():\n p = {}\n \n # p['rootFolder'] = 'C:/Users/Umberto Gostoli/SPHSU/Social Care Model II'\n # p['rootFolder'] = 'N:/Social Care Model Paper III'\n \n p['noPolicySim'] = False\n p['multiprocessing'] = True\n p['numberProcessors'] = 9\n p['numRepeats'] = 3\n \n p['startYear'] = 1860\n p['endYear'] = 2040\n p['thePresent'] = 2012\n p['statsCollectFrom'] = 1990\n p['regressionCollectFrom'] = 1960 \n p['implementPoliciesFromYear'] = 2020\n p['yearOutcome'] = 2015\n \n p['favouriteSeed'] = 123\n p['loadFromFile'] = False\n p['verboseDebugging'] = False\n p['singleRunGraphs'] = False\n p['saveChecks'] = True\n p['getCheckVariablesAtYear'] = 2015\n # To change through command-line arguments\n\n p['numberPolicyParameters'] = 2\n p['valuesPerParam'] = 1\n p['numberScenarios'] = 3\n \n ############ Policy Parameters #######################\n p['incomeCareParam'] = 0.0005 #[0.00025 - 0.001]\n p['taxBreakRate'] = 0.0\n p['ageOfRetirement'] = 65\n p['socialSupportLevel'] = 5\n # p['educationCosts']\n #############################################################\n p['socialCareCreditShare'] = 0.0\n p['maxWtWChildAge'] = 5\n # The basics: starting population and year, etc.\n \n p['discountingFactor'] = 0.03\n \n \n p['initialPop'] = 600 \n \n p['minStartAge'] = 24\n p['maxStartAge'] = 45\n p['numberClasses'] = 5\n p['socialClasses'] = ['unskilled', 'skilled', 'lower', 'middle', 'upper']\n p['initialClassShares'] = [0.2, 0.25, 0.3, 0.2, 0.05]\n p['initialUnemployment'] = [0.25, 0.2, 0.15, 0.1, 0.1]\n p['unemploymentAgeBandParam'] = 0.3\n \n # doDeath function parameters\n p['mortalityBias'] = 0.85 # After 1950\n p['careNeedBias'] = 0.9\n p['unmetCareNeedBias'] = 0.5\n p['baseDieProb'] = 0.0001\n p['babyDieProb'] = 0.005\n p['maleAgeScaling'] = 14.0\n p['maleAgeDieProb'] = 0.00021\n p['femaleAgeScaling'] = 15.5\n p['femaleAgeDieProb'] = 0.00019\n \n p['orphansRelocationParam'] = 0.5\n \n # doBirths function parameters\n p['minPregnancyAge'] = 17\n p['maxPregnancyAge'] = 42\n p['growingPopBirthProb'] = 0.215\n p['fertilityCorrector'] = 1.0\n p['fertilityBias'] = 0.9\n \n # careTransitions function parameters\n p['zeroYearCare'] = 80.0\n p['childcareDecreaseRate'] = 0.25\n p['personCareProb'] = 0.0008\n p['maleAgeCareScaling'] = 18.0 # p['maleAgeCareProb'] = 0.0008\n p['femaleAgeCareScaling'] = 19.0 # p['femaleAgeCareProb'] = 0.0008\n p['baseCareProb'] = 0.0002\n p['careBias'] = 0.9\n p['careTransitionRate'] = 0.7\n\n p['unmetNeedExponent'] = 1.0 # 0.005 #[0.005 - 0.02]\n \n p['numCareLevels'] = 5\n p['careLevelNames'] = ['none','low','moderate','substantial','critical']\n p['careDemandInHours'] = [ 0.0, 8.0, 16.0, 32.0, 80.0 ]\n p['quantumCare'] = 4.0\n \n # careSupplies getCare and probSuppliers function parameters\n \n ######## Key parameter 1 ##############\n \n \n p['weeklyHours'] = 40.0\n \n \n p['priceChildCare'] = 0.76 # 6 \n p['schoolAge'] = 5\n p['maxFormalChildcareHours'] = 48\n p['schoolHours'] = 30\n p['freeChildcareHours'] = 15\n p['workingParentsFreeChildcareHours'] = 30\n p['minAgeStartChildCareSupport'] = 3\n p['minAgeStartChildCareSupportByIncome'] = 2\n p['maxHouseholdIncomeChildCareSupport'] = 40 # 320\n \n ######## Key parameter 2 ##############\n # 5: No public supply \n \n p['retiredHours'] = [48.0, 36.0, 20.0, 10.0] # 60.0\n p['studentHours'] = [24.0, 16.0, 8.0, 4.0]\n p['teenAgersHours'] = [16.0, 0.0, 0.0, 0.0]\n p['unemployedHours'] = [32.0, 24.0, 16.0, 8.0]\n p['employedHours'] = [28.0, 20.0, 12.0, 8.0]\n p['formalCareDiscountFactor'] = 0.5\n \n p['socialNetworkDistances'] = [0.0, 1.0, 2.0, 1.0, 2.0, 2.0, 3.0, 3.0]\n p['networkDistanceParam'] = 2.0\n p['socialCareWeightBias'] = 1.0\n p['unmetCareNeedDiscountParam'] = 0.5\n p['shareUnmetNeedDiscountParam'] = 0.5\n # p['pastShareUnmetNeedWeight'] = 0.5\n \n \n \n p['networkSizeParam'] = 10.0 # 1.0\n \n p['careSupplyBias'] = 0.5\n p['careIncomeParam'] = 0.001\n \n # Hospitalization Costs\n p['qalyBeta'] = 0.18\n p['qalyAlpha'] = 1.5\n p['qalyDiscountRate'] = 0.035\n p['qalyIndexes'] = [1.0, 0.8, 0.6, 0.4, 0.2]\n p['unmetCareHealthParam'] = 0.1\n p['hospitalizationParam'] = 0.5\n p['needLevelParam'] = 2.0\n p['unmetSocialCareParam'] = 2.0\n p['costHospitalizationPerDay'] = 400\n \n # ageTransitions, enterWorkForce and marketWage functions parameters\n p['ageTeenagers'] = 12\n p['minWorkingAge'] = 16\n \n ######## Key parameter 3 ##############\n \n p['careBankingSchemeOn'] = False\n p['socialCareBankingAge'] = 65\n \n p['absoluteCreditQuantity'] = False\n p['quantityYearlyIncrease'] = 0.0\n p['socialCareCreditQuantity'] = 0\n p['kinshipNetworkCarePropension'] = 0.5\n p['volunteersCarePropensionCoefficient'] = 0.01\n p['pensionContributionRate'] = 0.05\n \n p['hillHealthLevelThreshold'] = 3\n p['seriouslyHillSupportRate'] = 0.5\n \n ### Prices ####\n p['pricePublicSocialCare'] = 20.0 # [2.55] # 20\n p['priceSocialCare'] = 17.0 # [2.29] # 18\n p['taxBrackets'] = [663, 228, 0] # [28.16, 110.23] # [221, 865]\n p['taxBandsNumber'] = 3\n p['bandsTaxationRates'] = [0.4, 0.2, 0.0] # [0.0, 0.2, 0.4]\n # Tax Break Policy\n\n \n p['pensionWage'] = [5.0, 7.0, 10.0, 13.0, 18.0] # [0.64, 0.89, 1.27, 1.66, 2.29] # \n p['incomeInitialLevels'] = [5.0, 7.0, 9.0, 11.0, 14.0] #[0.64, 0.89, 1.15, 1.40, 1.78] # \n p['incomeFinalLevels'] = [10.0, 15.0, 22.0, 33.0, 50.0] #[1.27, 1.91, 2.80, 4.21, 6.37] # \n p['educationCosts'] = [0.0, 100.0, 150.0, 200.0] #[0.0, 12.74, 19.12, 25.49] # \n \n # Priced growth #####\n p['wageGrowthRate'] = 1.0 # 1.01338 # \n\n p['incomeGrowthRate'] = [0.4, 0.35, 0.35, 0.3, 0.25]\n \n # SES inter-generational mobility parameters\n p['leaveHomeStudentsProb'] = 0.5\n \n p['eduWageSensitivity'] = 0.2 # 0.5\n p['eduRankSensitivity'] = 3.0 # 5.0\n p['costantIncomeParam'] = 80.0 # 20.0\n p['costantEduParam'] = 10.0 # 10.0\n p['careEducationParam'] = 0.005 # 0.04\n \n \n \n # p['incEduExp'] = 0.25\n p['educationLevels'] = ['GCSE', 'A-Level', 'HND', 'Degree', 'Higher Degree']\n p['workingAge'] = [16, 18, 20, 22, 24]\n \n # doDivorce function parameters\n p['basicDivorceRate'] = 0.06\n p['variableDivorce'] = 0.06\n p['divorceModifierByDecade'] = [ 0.0, 1.0, 0.9, 0.5, 0.4, 0.2, 0.1, 0.03, 0.01, 0.001, 0.001, 0.001, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n p['divorceBias'] = 1.0\n \n # doMarriages function parameters\n p['deltageProb'] = [0.0, 0.1, 0.25, 0.4, 0.2, 0.05]\n p['incomeMarriageParam'] = 0.025\n p['studentFactorParam'] = 0.5\n ######## Key parameter 4 ##############\n p['betaGeoExp'] = 2.0 #[1.0 - 4.0]\n \n p['betaSocExp'] = 2.0\n p['rankGenderBias'] = 0.5\n p['basicMaleMarriageProb'] = 0.9\n p['maleMarriageModifierByDecade'] = [ 0.0, 0.16, 0.5, 1.0, 0.8, 0.7, 0.66, 0.5, 0.4, 0.2, 0.1, 0.05, 0.01, 0.0, 0.0, 0.0, 0.0 ]\n \n # jobMarket, updateWork and unemploymentRate functions parameters\n p['unemploymentClassBias'] = 0.75\n p['unemploymentAgeBias'] = [1.0, 0.55, 0.35, 0.25, 0.2, 0.2]\n p['numberAgeBands'] = 6\n p['jobMobilitySlope'] = 0.004\n p['jobMobilityIntercept'] = 0.05\n p['ageBiasParam'] = [7.0, 3.0, 1.0, 0.5, 0.35, 0.15]\n p['deltaIncomeExp'] = 0.05\n p['unemployedCareBurdernParam'] = 0.025\n # Potential key parameter\n p['relocationCareLossExp'] = 1.0 # 40.0 # \n p['incomeSocialCostRelativeWeight'] = 0.5\n \n p['firingParam'] = 0.2\n p['wageVar'] = 0.06\n p['workDiscountingTime'] = 0.75 # 0.8\n p['sizeWeightParam'] = 0.7\n p['minClassWeightParam'] = 1.0\n p['incomeDiscountingExponent'] = 4.0\n p['discountingMultiplier'] = 2.0\n #p['incomeDiscountingParam'] = 2.0\n \n # relocationPensioners function parameters\n p['agingParentsMoveInWithKids'] = 0.1\n p['variableMoveBack'] = 0.1\n p['retiredRelocationParam'] = 0.001 # 0.005\n \n # houseMap function parameters\n p['geoDistanceSensitivityParam'] = 2.0\n p['socDistanceSensitivityParam'] = 2.0\n p['classAffinityWeight'] = 4.0\n p['distanceSensitivityParam'] = 0.5\n \n # relocationProb function parameters\n p['baseRelocatingProb'] = 0.05\n p['relocationParameter'] = 1.0 \n p['apprenticesRelocationProb'] = 0.5\n #p['expReloc'] = 1.0\n \n # computeRelocationCost and relocation Propensity functions parameters\n p['yearsInTownSensitivityParam'] = 0.5\n \n ######## Key parameter 5 ##############\n p['relocationCostParam'] = 0.5 # 1.0 \n \n ######## Key parameter 6 ##############\n p['propensityRelocationParam'] = 2.0 # 2.0 \n p['denRelocationWeight'] = 0.5\n \n \n ## Description of the map, towns, and houses\n p['mapGridXDimension'] = 8\n p['mapGridYDimension'] = 12 \n p['townGridDimension'] = 70\n p['cdfHouseClasses'] = [ 0.6, 0.9, 5.0 ]\n p['ukMap'] = [[ 0.0, 0.1, 0.2, 0.1, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.1, 0.1, 0.2, 0.2, 0.3, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.2, 0.2, 0.3, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.2, 1.0, 0.5, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.4, 0.0, 0.2, 0.2, 0.4, 0.0, 0.0, 0.0 ],\n [ 0.6, 0.0, 0.0, 0.3, 0.8, 0.2, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.0, 0.6, 0.8, 0.4, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.2, 1.0, 0.8, 0.6, 0.1, 0.0 ],\n [ 0.0, 0.0, 0.1, 0.2, 1.0, 0.6, 0.3, 0.4 ],\n [ 0.0, 0.0, 0.5, 0.7, 0.5, 1.0, 1.0, 0.0 ],\n [ 0.0, 0.0, 0.2, 0.4, 0.6, 1.0, 1.0, 0.0 ],\n [ 0.0, 0.2, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0 ]]\n p['ukClassBias'] = [[ 0.0, -0.05, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ -0.05, -0.05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, -0.05, -0.05, 0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ -0.05, 0.0, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ -0.05, 0.0, 0.0, -0.05, -0.05, -0.05, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.0, -0.05, -0.05, -0.05, 0.0, 0.0 ],\n [ 0.0, 0.0, -0.05, -0.05, 0.0, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.0, -0.05, 0.0, -0.05, 0.0, 0.0, 0.0 ],\n [ 0.0, 0.0, 0.0, -0.05, 0.0, 0.2, 0.15, 0.0 ],\n [ 0.0, 0.0, 0.0, 0.0, 0.1, 0.2, 0.15, 0.0 ],\n [ 0.0, 0.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0 ] ]\n p['mapDensityModifier'] = 0.6\n # p['numHouseClasses'] = 3\n # p['houseClasses'] = ['small','medium','large']\n \n ## Graphical interface details\n p['interactiveGraphics'] = False #True\n p['delayTime'] = 0.0\n p['screenWidth'] = 1300\n p['screenHeight'] = 700\n p['bgColour'] = 'black'\n p['mainFont'] = 'Helvetica 18'\n p['fontColour'] = 'white'\n p['dateX'] = 70\n p['dateY'] = 20\n p['popX'] = 70\n p['popY'] = 50\n p['pixelsInPopPyramid'] = 2000\n p['num5YearAgeClasses'] = 28\n p['careLevelColour'] = ['blue','green','yellow','orange','red']\n p['houseSizeColour'] = ['brown','purple','yellow']\n p['pixelsPerTown'] = 56\n p['maxTextUpdateList'] = 22\n \n # p['eduEduSensitivity'] = 0.5\n # p['mortalityBias'] = [1.0, 0.92, 0.84, 0.76, 0.68]\n # p['fertilityBias'] = [1.0, 0.92, 0.84, 0.76, 0.68]\n # p['divorceBias'] = [2.0, 1.5, 1.0, 0.75, 0.5]\n\n ## Transitions to care statistics\n \n ## Availability of care statistics\n \n #p['childHours'] = 5.0\n # p['employedHours'] = 12.0\n #p['homeAdultHours'] = 30.0\n #p['workingAdultHours'] = 25.0\n #p['maxEmployedHours'] = 60.0\n \n #p['lowCareHandicap'] = 0.5\n #p['hourlyCostOfCare'] = 20.0\n \n ## Fertility statistics\n \n # p['steadyPopBirthProb'] = 0.13\n # p['transitionYear'] = 1965\n \n ## Class and employment statistics\n # p['numClasses'] = 5\n # p['occupationClasses'] = ['lower','intermediate','higher']\n # p['cdfOccupationClasses'] = [ 0.6, 0.9, 1.0 ]\n\n ## Age transition statistics\n # p['ageOfAdulthood'] = 17\n \n ## Marriage function parameters\n \n # p['basicFemaleMarriageProb'] = 0.25\n # p['femaleMarriageModifierByDecade'] = [ 0.0, 0.5, 1.0, 1.0, 1.0, 0.6, 0.5, 0.4, 0.1, 0.01, 0.01, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n # p['femaleMarriageProb'] = [0.01, 0.15, 0.3, 0.2, 0.1, 0.1, 0.06, 0.05, 0.02, 0.01, 0.01, 0.005]\n # p['maleMarriageProb'] = [0.005, 0.08, 0.25, 0.25, 0.15, 0.1, 0.07, 0.05, 0.03, 0.02, 0.01, 0.005]\n \n ## Leaving home and moving around statistics\n # p['probApartWillMoveTogether'] = 0.3\n # p['coupleMovesToExistingHousehold'] = 0.3\n # p['basicProbAdultMoveOut'] = 0.22\n # p['probAdultMoveOutModifierByDecade'] = [ 0.0, 0.2, 1.0, 0.6, 0.3, 0.15, 0.03, 0.03, 0.01, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n # p['basicProbSingleMove'] = 0.05\n # p['probSingleMoveModifierByDecade'] = [ 0.0, 1.0, 1.0, 0.8, 0.4, 0.06, 0.04, 0.02, 0.02, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]\n # p['basicProbFamilyMove'] = 0.03\n # p['probFamilyMoveModifierByDecade'] = [ 0.0, 0.5, 0.8, 0.5, 0.2, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1 ]\n\n \n return p", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_nav', 'g_kvhh', 'g_kva', 'g_kvsi', \n 'g_cav', 'g_kca', 'g_nap', 'g_kir']\n gX_log: np.ndarray = 4 * np.random.rand(9) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(9)) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n gR_name: List[str] = ['g_ampar', 'g_nmdar', 'g_gabar']\n gR_log: np.ndarray = 4 * np.random.rand(3) - 3 # from -3 to 1\n gR: np.ndarray = (10 * np.ones(3)) ** gR_log # 0.001 ~ 10\n gR_itr: Iterator = zip(gR_name, gR)\n\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n\n param_dict.update(gX_itr)\n param_dict.update(gR_itr)\n param_dict.update(tCa_dict)\n return param_dict", "def update_params(self):\n \n # read parameters from context broker\n for attr in ['Kp', 'Ti', 'Td', 'lim_low', 'lim_high', 'setpoint']:\n self.params[attr] = float(self.ORION_CB.get_entity_attribute_value(entity_name=self.params['name'], attribute_name=attr))\n self.params['reverse_act'] = str(self.ORION_CB.get_entity_attribute_value(entity_name=self.params['name'], attribute_name='reverse_act')) \n \n # update PID parameters\n self.PID.Kp = self.params['Kp']\n self.PID.Ti = self.params['Ti']\n self.PID.Td = self.params['Td']\n self.PID.lim_low = self.params['lim_low']\n self.PID.lim_high = self.params['lim_high']\n self.PID.reverse_act = eval(eval(self.params['reverse_act']))", "def update_parameters(self, event, gamma='2.4',\n gain=\"scale_default_value\"):\n print(\"gamma={}, gain={}\".format(gamma, gain))\n sys.stdout.flush()\n self.update_draw(gamma, gain)", "def test_update_learner_params(self):\n independent_pc = param_domain.ParamChange(\n 'a', 'Copier', {'value': 'firstValue', 'parse_with_jinja': False})\n dependent_pc = param_domain.ParamChange(\n 'b', 'Copier', {'value': '{{a}}', 'parse_with_jinja': True})\n\n exp_param_specs = {\n 'a': param_domain.ParamSpec('UnicodeString'),\n 'b': param_domain.ParamSpec('UnicodeString'),\n }\n\n old_params = {}\n new_params = self.get_updated_param_dict(\n old_params, [independent_pc, dependent_pc], exp_param_specs)\n self.assertEqual(new_params, {'a': 'firstValue', 'b': 'firstValue'})\n self.assertEqual(old_params, {})\n\n old_params = {'a': 'secondValue'}\n new_params = self.get_updated_param_dict(\n old_params, [dependent_pc], exp_param_specs)\n self.assertEqual(new_params, {'a': 'secondValue', 'b': 'secondValue'})\n self.assertEqual(old_params, {'a': 'secondValue'})\n\n # Jinja string evaluation fails gracefully on dependencies that do not\n # exist.\n old_params = {}\n new_params = self.get_updated_param_dict(\n old_params, [dependent_pc], exp_param_specs)\n self.assertEqual(new_params, {'b': ''})\n self.assertEqual(old_params, {})", "def _propagate_param_grad(self, parray, garray):\n if self.param_array.size != self.size:\n self._param_array_ = np.empty(self.size, dtype=np.float64)\n if self.gradient.size != self.size:\n self._gradient_array_ = np.empty(self.size, dtype=np.float64)\n\n pi_old_size = 0\n for pi in self.parameters:\n pislice = slice(pi_old_size, pi_old_size + pi.size)\n\n self.param_array[pislice] = pi.param_array.flat # , requirements=['C', 'W']).flat\n self.gradient_full[pislice] = pi.gradient_full.flat # , requirements=['C', 'W']).flat\n\n pi.param_array.data = parray[pislice].data\n pi.gradient_full.data = garray[pislice].data\n\n pi._propagate_param_grad(parray[pislice], garray[pislice])\n pi_old_size += pi.size", "def _update_params(self):\n log.debug(\"Updating parameter dict\")\n old_config = self._param_dict.get_config()\n self._get_config()\n new_config = self._param_dict.get_config() \n if (new_config != old_config):\n self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)", "def gen_params(self) -> Dict:\n param_dict: Dict = {}\n\n gX_name: List[str] = ['g_leak', 'g_nav', 'g_kvhh', 'g_kva', 'g_kvsi', \n 'g_cav', 'g_kca', 'g_nap', 'g_kir']\n gX_name: List[str] = list(itertools.compress(gX_name, list(self.channel_bool.values())[:9]))\n gX_log: np.ndarray = 4 * np.random.rand(len(gX_name)) - 2 # from -2 to 2\n gX: np.ndarray = (10 * np.ones(len(gX_name))) ** gX_log # 0.01 ~ 100\n gX_itr: Iterator = zip(gX_name, gX)\n\n gR_name: List[str] = ['g_ampar', 'g_nmdar', 'g_gabar']\n gR_name: List[str] = list(itertools.compress(gR_name, list(self.channel_bool.values())[9:12]))\n gR_log: np.ndarray = 4 * np.random.rand(len(gR_name)) - 3 # from -3 to 1\n gR: np.ndarray = (10 * np.ones(len(gR_name))) ** gR_log # 0.001 ~ 10\n gR_itr: Iterator = zip(gR_name, gR)\n\n param_dict.update(gX_itr)\n param_dict.update(gR_itr)\n\n if self.channel_bool['ca']:\n tCa_log: float = 2 * np.random.rand(1) + 1 # from 1 to 3\n tCa: float = 10 ** tCa_log # 10 ~ 1000\n tCa_dict: Dict = {'t_ca': tCa}\n param_dict.update(tCa_dict)\n\n return param_dict", "def variational_update(self):\n with self.elbo_check('update_p_allele_swap'):\n self.model.update_p_allele_swap()\n\n with self.elbo_check('p_cn'):\n self.model.update_p_cn()\n\n with self.elbo_check('p_breakpoint'):\n self.model.update_p_breakpoint()\n\n with self.elbo_check('p_outlier_total'):\n self.model.update_p_outlier_total()\n\n with self.elbo_check('p_outlier_allele'):\n self.model.update_p_outlier_allele()", "def update_arm_parameters(self, arm, success):\n if success:\n self.alpha_params[arm] += 1\n else:\n self.beta_params[arm] += 1", "def _update_params(self, new_params):\n # update all the parameters\n for old_param, new_param in zip(self.model.parameters(), new_params):\n old_param.data += torch.from_numpy(new_param).to(old_param.device)", "def update_params(self):\n pass", "def _finalize_params(fsdp_module: FullyShardedDataParallel) -> None:\n for handle in fsdp_module._handles:\n p = handle.flat_param\n if p.requires_grad:\n if hasattr(p, \"_post_backward_hook_state\"):\n p_assert(\n len(p._post_backward_hook_state) == 2, # type: ignore[attr-defined]\n \"p._post_backward_hook_state fields are not valid.\"\n )\n p._post_backward_hook_state[1].remove() # type: ignore[attr-defined]\n delattr(p, \"_post_backward_hook_state\")\n # Preserve the gradient accumulation state if not\n # synchronizing: `p.grad` remains the unsharded gradient\n # accumulated from prior `no_sync()` iterations, and\n # `p._saved_grad_shard` remains the sharded gradient from\n # the last synchronized iteration\n if not self._sync_gradients:\n continue\n # Set `p.grad` as needed to ensure optimizer correctness\n # since optimizers operate on the `grad` attribute\n if hasattr(p, \"_cpu_grad\"):\n p_assert(\n p.device == torch.device(\"cpu\"),\n f\"Device mismatch: p={p.device} \" # type: ignore[attr-defined]\n f\"p._cpu_grad={p._cpu_grad}\"\n )\n p.grad = p._cpu_grad # type: ignore[attr-defined]\n elif hasattr(p, \"_saved_grad_shard\"):\n p_assert(\n p.device == p._saved_grad_shard.device, # type: ignore[attr-defined]\n f\"Device mismatch: p={p.device} \" # type: ignore[attr-defined]\n f\"p._saved_grad_shard={p._saved_grad_shard.device}\"\n )\n # Check if post-backward was called for this param (FSDP unit).\n # TODO: This logic will have to be revisited when non-recursive wrapping\n # lands. If it was not called, there is no new gradient to accumulate\n if p._post_backward_called:\n p.grad = p._saved_grad_shard\n if fsdp_module._mixed_precision_keep_low_precision_grads():\n p.grad.data = p.grad.to(\n fsdp_module.mixed_precision.param_dtype\n )\n else:\n p_assert(\n not handle.uses_sharded_strategy or not p._post_backward_called,\n \"All sharded parameters that received a gradient \"\n \"should use `_saved_grad_shard`\"\n )\n if hasattr(p, \"_saved_grad_shard\"):\n delattr(p, \"_saved_grad_shard\")\n\n p_assert(\n hasattr(p, '_post_backward_called'),\n \"Expected flag _post_backward_called to be set on param.\"\n )\n # Reset _post_backward_called in preparation for the next iteration.\n p._post_backward_called = False", "def update_params(self):\n if self.clip > 0:\n torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.clip)\n self.optimizer.step()", "def updateInstrumentParameterValue(tablews, paramdict):\n paramnames = paramdict.keys()\n for parname in paramnames: \n parvalue = paramdict[parname]\n\t# print \"%s = %f\" % (parname, parvalue)\n\tif parname.count(\"Chi2\") == 0:\n\t # Only update parameters nothing to do with chi2\n UpdatePeakParameterTableValue(InputWorkspace=tablews,\n\t\tColumn='Value',\n ParameterNames=[parname],\n\t\tNewFloatValue=parvalue)\n\n return", "def genParametersOldFormat(self, **kwargs):\n\n # if, for some reason, you don't want to be changign the new dict\n newDict = dict(kwargs)\n # One big change was only saving statistical information of the FEL pulses\n # etc. Caclulate that information and update the dict.\n if isinstance(kwargs.get(\"fieldStrength\", {}), list):\n stats = [\"kurtosis\", \"mean\", \"skew\", \"std\"]\n sets = [\"fieldStrength\", \"fieldInt\", \"cdRatios\", \"fpTime\", \"pyroVoltage\"]\n newDict[\"fel_pulses\"] = sum(kwargs[\"fel_pulses\"])\n\n newDict.update(\n {set: {stat: np.mean(\n [x.get(stat, '-1') for x in kwargs[set]] )\n for stat in stats}\n for set in sets}\n )\n\n return self.genParameters(**newDict)", "def _update_parameters(self, loss):\n self.optimizer.zero_grad()\n loss.backward()\n self.optimizer.step()", "def compute_derived_parameters(cls, fdict):\n cgg = fdict['cgd'] + fdict['cgs']\n return dict(\n cgg=cgg,\n cdd=fdict['cgd'] + fdict['cds'],\n vstar=2.0 * (fdict['ids'] / fdict['gm']),\n gain=fdict['gm'] / fdict['gds'],\n ft=fdict['gm'] / (2.0 * np.pi * cgg),\n )", "def _update_parameters(self, delta):\n if delta is not None:\n self.SGD.update_with_L1_regularization(self.variables, delta, self.L1)", "def _update_params(self):\n pass", "def learn_params(self, measurements, true_ranges):\n z_hit,z_short,z_max,z_rand,var_hit,lambda_short= self.params\n pre_params=[z_hit,z_short,z_max,z_rand,var_hit,lambda_short]\n updated_params=[-1,-1,-1,-1,-1,-1]\n while np.max(np.abs(np.array(updated_params) - np.array(pre_params))) > 1e-6:\n\n e_hit, e_short, e_max, e_rand = [], [], [], []\n for i in range(len(measurements)):\n true_range, measurement = true_ranges[i], measurements[i]\n p_hit = self.PHit(true_range, measurement,var_hit)\n p_short = self.PShort(true_range, measurement,lambda_short)\n p_max = self.PMax(measurement)\n p_rand = self.PRand(measurement)\n normalizer = 1.0 / (p_hit + p_short + p_max + p_rand)\n e_hit.append(normalizer * p_hit)\n e_short.append(normalizer * p_short)\n e_max.append(normalizer * p_max)\n e_rand.append(normalizer * p_rand)\n e_hit, e_short, e_max, e_rand = np.array(e_hit), np.array(e_short), np.array(e_max), np.array(e_rand)\n\n # perform M step\n pre_params = [z_hit, z_short, z_max, z_rand, var_hit,lambda_short]\n z_hit = sum(e_hit) / len(measurements)\n z_short = sum(e_short) / len(measurements)\n z_max = sum(e_max)/ len(measurements)\n z_rand = sum(e_rand) / len(measurements)\n var_hit = np.sqrt(1.0 / np.sum(e_hit) * np.sum(e_hit * (np.array(measurements)-np.array(true_ranges))**2)).item()\n lambda_short = (np.sum(e_short) / np.sum(e_short * np.array(measurements))).item()\n updated_params = [z_hit, z_short, z_max, z_rand, var_hit, lambda_short]\n print('origin',self.params)\n print('updated',updated_params)\n return updated_params", "def update_weights_rocchio(isRelevant, result):\n\tglobal global_weights\n\tmax_contributing_weight = max(result.sim_measures, key=result.sim_measures.get)\n\tif isRelevant:\n\t\tglobal_weights[max_contributing_weight] += alpha\n\t\tfor measure in result.sim_measures:\n\t\t\tglobal_weights[measure] -= beta \n\n\telse:\n\t\tglobal_weights[max_contributing_weight] -= alpha\n\t\tfor measure in result.sim_measures:\n\t\t\tglobal_weights[measure] += beta", "def set_hyperparams(self, params):", "def update_parameters_with_gd(parameters, grads, learning_rate):\n\n L = len(parameters) // 2 # number of layers in the neural networks\n\n # Update rule for each parameter\n for l in range(L):\n parameters[\"W\" + str(l+1)] = parameters[\"W\" + str(l+1)] - learning_rate * grads['dW' + str(l+1)]\n parameters[\"b\" + str(l+1)] = parameters[\"b\" + str(l+1)] - learning_rate * grads['db' + str(l+1)]\n \n return parameters", "def update_generate_params(self,inps,trgs,preds):\n batch_size = np.shape(trgs)[0]\n\n self.delta_weight_h_to_v = self.learning_rate / batch_size * np.transpose(trgs) @ (inps - preds)\n self.delta_bias_v = self.learning_rate * np.mean(inps - preds)\n \n self.weight_h_to_v += self.delta_weight_h_to_v\n self.bias_v += self.delta_bias_v \n \n return", "def _setup_from_parameters(self,params):\n\n # SHOULD WE CHECK HERE THAT INPUT PARAMETERS HAVE SAME KP / Z_STAR ?\n\n # copy input dictionary\n self.linP_params=params.copy()\n\n # will add polynomial describing the log power, around kp_kms\n linP_kms_2=0.5*params['alpha_star']\n linP_kms_1=params['n_star']\n A_star=(2*np.pi**2)*params['Delta2_star']/self.kp_kms**3\n linP_kms_0=np.log(A_star)\n linP_kms = np.poly1d([linP_kms_2,linP_kms_1,linP_kms_0])\n # why are we storing this poly1d object? When do we actually use it?\n self.linP_params['linP_kms']=linP_kms", "def set_priors(parnames, limits, linenames, vsyst, nssps=1):\n priors = {}\n for parname in parnames:\n name = parname.split(\"_\")[0]\n if name in limits: #all the CvD ssp parameters\n vmin, vmax = limits[name]\n# print(parname,vmin,vmax)\n delta = vmax - vmin\n priors[parname] = stats.uniform(loc=vmin, scale=delta)\n elif parname in vsyst:\n priors[parname] = stats.norm(loc=vsyst[parname], scale=500)\n elif parname == \"eta\": #what does eta do?\n priors[\"eta\"] = stats.uniform(loc=1., scale=19)#uniform distribution in range [1,19]\n elif parname == \"nu\": #what does nu do?\n priors[\"nu\"] = stats.uniform(loc=2, scale=20)#uniform distribution in range [2,20]\n elif parname == \"sigma\":\n priors[\"sigma\"] = stats.uniform(loc=50, scale=300)#obtains the uniform distribution on [loc, loc + scale]. i.e. uniform in range [50,300]\n elif parname == \"sigma_gas\":\n priors[parname] = stats.uniform(loc=50, scale=100)#uniform between [50,100]km/s\n elif name == \"w\":\n priors[parname] = stats.uniform(loc=0, scale=1)#weights uniform between 0 and 1\n elif name in linenames:\n# priors[parname] = stats.expon(loc=0, scale=0.5)#favors low values>~0; make even stronger by decreasing scale. \n priors[parname] = stats.expon(loc=0, scale=0.2)#favors low values>~0; make even stronger by decreasing scale. \n elif name in [\"pred\", \"pblue\"]:\n porder = int(parname.split(\"_\")[1])\n if porder == 0:\n mu, sd = 1 / nssps, 1\n a, b = (0 - mu) / sd, (np.infty - mu) / sd\n priors[parname] = stats.truncnorm(a, b, mu, sd)\n else:\n priors[parname] = stats.norm(0, 0.05)\n else:\n print(f\"parameter without prior: {parname}\")\n return priors", "def params_refactoring(_params):\n _params['wavelength'] = 1e-9 * 299792458 / _params['ms_nu']\n\n return _params", "def set_params(self):\n \n lo, hi = self.R.get((self.h, self.w, self.m), (0.0, 0.0))\n params.update({\n 'gamma' : 1.0, # minesweeper is a finite horizon game\n 'epsilon': 0.0,\n 'K': 16,\n 'R_lo': lo,\n 'R_hi': hi,\n 'max_depth': self.h * self.w / 2,\n 'c':hi-lo\n })", "def em_update_params(self):\n for name in self.likelihood_params:\n with self.elbo_check(name):\n self.update_param(name)", "def set_constants(self,\n f = 0.2,\n rho = 0.006,\n sigma1 = 0.0004,\n sigma2 = 0.1,\n number_of_persons = 10000,\n gender_ratio = 0.5,\n transmission_male_female_steady = 0.15,\n transmission_female_male_steady = 0.0625,\n transmission_male_female_casual = 0.6,\n transmission_female_male_casual = 0.25,\n asymptomatic_men = 0.1,\n asymptomatic_women = 0.45,\n incubation_time_men = 5,\n incubation_time_women = 10,\n patient_delay_treatment_men = 5,\n patient_delay_treatment_women = 8,\n recovery_rate_asymptomatic_men = 0.0074,\n recovery_rate_symptomatic_men = 0.04,\n recovery_rate_asymptomatic_women = 0.0044,\n recovery_rate_symptomatic_women = 0.03,\n screening_percentage = 0.02,\n sexual_activity_high = 0.05,\n resistance_probability = 0.0001,\n r0 = False):\n self.f = f\n self.rho = rho\n self.sigma1 = sigma1\n self.sigma2 = sigma2\n self.number_of_persons = number_of_persons\n self.gender_ratio = gender_ratio\n\n self.transmission_male_female_steady = transmission_male_female_steady\n self.transmission_female_male_steady = transmission_female_male_steady\n self.transmission_male_female_casual = transmission_male_female_casual\n self.transmission_female_male_casual = transmission_female_male_casual\n self.asymptomatic_men = asymptomatic_men\n self.asymptomatic_women = asymptomatic_women\n self.incubation_time_men = incubation_time_men\n self.incubation_time_women = incubation_time_women\n self.patient_delay_treatment_men = patient_delay_treatment_men\n self.patient_delay_treatment_women = patient_delay_treatment_women\n self.recovery_rate_asymptomatic_men = recovery_rate_asymptomatic_men\n self.recovery_rate_symptomatic_men = recovery_rate_symptomatic_men\n self.recovery_rate_asymptomatic_women = recovery_rate_asymptomatic_women\n self.recovery_rate_symptomatic_women = recovery_rate_symptomatic_women\n self.screening_percentage = screening_percentage\n self.sexual_activity_high = sexual_activity_high\n self.resistance_probability = resistance_probability\n self.r0 = r0", "def compute_derived_parameters(cls, fdict):\n cgg = fdict['cgd'] + fdict['cgs'] + fdict['cgb']\n return dict(\n cgg=cgg,\n cdd=fdict['cgd'] + fdict['cds'] + fdict['cdb'],\n css=fdict['cgs'] + fdict['cds'] + fdict['csb'],\n cbb=fdict['cgb'] + fdict['cdb'] + fdict['csb'],\n vstar=2.0 * (fdict['ids'] / fdict['gm']),\n gain=fdict['gm'] / fdict['gds'],\n ft=fdict['gm'] / (2.0 * np.pi * cgg),\n )", "def update_dependencies():\n\n\t# Reward map, for hippocampus reward one-hot conversion\n\tpar['reward_map'] = {\n\t\tpar['fix_break_penalty']\t\t: 0,\n\t\tpar['wrong_choice_penalty']\t\t: 1,\n\t\t0.\t\t\t\t\t\t\t\t: 2,\n\t\tpar['correct_choice_reward']\t: 3\n\t}\n\n\tpar['num_reward_types'] = len(par['reward_map'].keys())\n\tpar['reward_map_matrix'] = np.zeros([par['num_reward_types'],1]).astype(np.float32)\n\tfor key, val in par['reward_map'].items():\n\t\tpar['reward_map_matrix'][val,:] = key\n\n\t# Set input and output sizes\n\tpar['n_input'] = par['num_motion_tuned'] + par['num_fix_tuned'] + par['num_rule_tuned']\n\tpar['n_pol'] = par['num_motion_dirs'] + 1\n\n\t# Set trial step length\n\tpar['num_time_steps'] = par['trial_length']//par['dt']\n\n\t# Set up standard LSTM weights and biases\n\tLSTM_weight = lambda size : np.random.uniform(-par['LSTM_init'], par['LSTM_init'], size=size).astype(np.float32)\n\n\t# option 1\n\t#for p in ['Wf', 'Wi', 'Wo', 'Wc']: par[p+'_init'] = LSTM_weight([par['n_input']+par['n_hidden'], par['n_hidden']])\n\t#par['W_write_init'] = LSTM_weight([par['n_input']+par['n_val']+par['n_pol'], par['n_latent']])\n\t# option 2\n\t#for p in ['Wf', 'Wi', 'Wo', 'Wc']: par[p+'_init'] = LSTM_weight([par['n_input'], par['n_hidden'][0]])\n\t#for j in range(par['n_modules']):\n\t#\tfor p in ['Wf', 'Wi', 'Wo', 'Wc']: par[p+str(j)+'_init'] = LSTM_weight([par['n_input'], par['n_hidden'][j]])\n\t#\tfor p in ['Uf', 'Ui', 'Uo', 'Uc']: par[p+str(j)+'_init'] = LSTM_weight([par['n_hidden'][0], par['n_hidden'][j]])\n\t#\tfor p in ['bf', 'bi', 'bo', 'bc']: par[p+str(j)+'_init'] = np.zeros([1, par['n_hidden'][j]], dtype=np.float32)\n\tpar['W0_init'] = np.random.uniform(-0.02, 0.02, size=[par['n_input'], par['n_ff0']]).astype(np.float32)\n\tpar['W1_init'] = np.random.uniform(-0.02, 0.02, size=[par['n_ff0'], par['n_ff1']]).astype(np.float32)\n\tpar['W_td_init'] = np.random.uniform(-0.02, 0.02, size=[par['n_hidden'], par['n_ff0']]).astype(np.float32)\n\tpar['W_rnn_init'] = np.random.uniform(-0.02, 0.02, size=[par['n_ff0'], par['n_ff0']]).astype(np.float32)\n\tpar['b0_init'] = np.zeros([1, par['n_ff0']], dtype=np.float32)\n\tpar['b1_init'] = np.zeros([1, par['n_ff1']], dtype=np.float32)\n\n\t# V0\n\tn_input_ctl = par['n_pol']*par['n_val'] + par['n_pol'] + par['n_val'] + par['n_input']\n\t#n_input_ctl = 33 + par['n_pol'] + par['n_val'] + par['n_pol']*par['n_val']\n\t# V1\n\t#n_input_ctl = par['n_module_out']*par['n_modules'] + par['n_pol'] + par['n_val'] + par['n_pol']*par['n_val']\n\t#n_input_ctl = par['n_input'] + par['n_module_out']*par['n_modules'] + par['n_pol']*par['n_val']\n\n\tfor p in ['Wf', 'Wi', 'Wo', 'Wc']: par[p+'_init'] = LSTM_weight([n_input_ctl, par['n_hidden']])\n\tfor p in ['Uf', 'Ui', 'Uo', 'Uc']: par[p+'_init'] = LSTM_weight([par['n_hidden'], par['n_hidden']])\n\tfor p in ['bf', 'bi', 'bo', 'bc']: par[p+'_init'] = np.zeros([1, par['n_hidden']], dtype=np.float32)\n\n\n\t# LSTM posterior distribution weights\n\t#for p in ['Pf', 'Pi', 'Po', 'Pc']: par[p+'_init'] = LSTM_weight([par['n_tasks'], par['n_hidden']])\n\n\t# Cortex RL weights and biases\n\tpar['W_pol_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_hidden'], par['n_pol']]).astype(np.float32)\n\tpar['b_pol_init'] = np.zeros([1,par['n_pol']], dtype=np.float32)\n\tpar['W_val_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_hidden'], 1]).astype(np.float32)\n\tpar['b_val_init'] = np.zeros([1,1], dtype=np.float32)\n\n\tpar['W_norm'] = np.zeros((par['n_ff1'], par['n_ff1']), dtype=np.float32)\n\tfor i in range(par['n_ff1']):\n\t\tu = np.arange(i, i + 25)%par['n_ff1']\n\t\tpar['W_norm'][i, u] = 1.\n\n\n\n\t# Gate weights and biases\n\t\"\"\"\n\tpar['W_pos_gate_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_tasks'],1]).astype(np.float32)\n\tpar['W_cor_gate_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_pol'],1]).astype(np.float32)\n\tpar['W_hip_gate_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_pol'],1]).astype(np.float32)\n\tpar['W_cor_gate_val_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_val'],1]).astype(np.float32)\n\tpar['W_hip_gate_val_init'] = np.random.uniform(-par['w_init'], par['w_init'], size=[par['n_val'],1]).astype(np.float32)\n\tpar['b_act_gate_init'] = np.ones([1,1], dtype=np.float32)\n\tpar['b_pos_gate_init'] = np.ones([1,1], dtype=np.float32)\n\n\tpar['encoder_weight_file'] = './datadir/gotask_50unit_input_encoder_weights.pkl'\n\tprint('--> Loading encoder from {}.'.format(par['encoder_weight_file']))\n\tpar['encoder_init'] = pickle.load(open(par['encoder_weight_file'], 'rb'))['weights']['W']\n\t\"\"\"", "def update_recognize_params(self,inps,trgs,preds):\n batch_size = np.shape(trgs)[0]\n self.delta_weight_v_to_h = self.learning_rate/batch_size * np.transpose(trgs) @ (inps - preds)\n self.delta_bias_h = self.learning_rate * np.mean(inps - preds)\n\n self.weight_v_to_h += self.delta_weight_v_to_h\n self.bias_h += self.delta_bias_h\n \n return", "def init_parameters(obj, hyperparameters):\n # Initialize Global Configuration Parameter\n params = hyperparameters['global']\n setattr(obj, 'param', params)\n\n # Initialize Attributes (Pre-Checked Parameters)\n setattr(obj, 'learning_rate', params['learning_rate'])\n setattr(obj, 'loss', params['loss'])\n setattr(obj, 'max_iter', params['max_iter'])\n\n if params['loss'] == 'least_squares':\n setattr(obj, 'num_classes', 1)\n elif params['loss'] in ['binary_crossentropy', 'categorical_crossentropy', 'auto']:\n setattr(obj, 'num_classes', params['num_classes'])\n\n # Initialize Attributes (Optional Values - Based on Default Parameters)\n if 'l2_regularization' not in params or params['l2_regularization'] is None:\n setattr(obj, 'l2_regularization', 0)\n else:\n setattr(obj, 'l2_regularization', params['l2_regularization'])\n\n if 'max_bins' not in params:\n setattr(obj, 'max_bins', 255)\n else:\n setattr(obj, 'max_bins', params['max_bins'])\n\n if 'max_depth' not in params or params['max_depth'] is None:\n setattr(obj, 'max_depth', None)\n else:\n setattr(obj, 'max_depth', params['max_depth'])\n\n if 'max_leaf_nodes' not in params or params['max_leaf_nodes'] is None:\n setattr(obj, 'max_leaf_nodes', 31)\n else:\n setattr(obj, 'max_leaf_nodes', params['max_leaf_nodes'])\n\n if 'min_samples_leaf' not in params or params['min_samples_leaf'] is None:\n setattr(obj, 'min_samples_leaf', 20)\n else:\n setattr(obj, 'min_samples_leaf', params['min_samples_leaf'])\n\n if 'random_state' in params:\n setattr(obj, 'random_state', params['random_state'])\n else:\n setattr(obj, 'random_state', None)\n\n if 'scoring' in params:\n setattr(obj, 'scoring', params['scoring'])\n else:\n setattr(obj, 'scoring', None)\n\n if 'verbose' not in params or params['verbose'] is None:\n setattr(obj, 'verbose', False)\n else:\n setattr(obj, 'verbose', True)\n\n return obj", "def param_init(self, sig=0.01):\n self.rhos = np.ones(self.Ndim)\n self.a = np.random.rand(self.Ndim, self.Nhidden)\n self.c = np.random.rand(self.Nhidden)\n self.W = np.random.randn(self.Nhidden, self.Ndim) * sig\n self.alphas = np.zeros((self.Ndim, self.Ncomponents))\n self.mus = np.zeros((self.Ndim, self.Ncomponents))\n self.sigmas = np.zeros((self.Ndim, self.Ncomponents))\n self.optimize_params = [self.rhos, self.c, self.W]\n\n types = ['alpha', 'mu', 'sigma']\n self.bs = {}\n self.Vs = {}\n for t in types:\n self.bs[t] = np.random.randn(self.Ndim, self.Ncomponents) * sig\n self.Vs[t] = np.random.randn(self.Ndim, self.Nhidden,\n self.Ncomponents) * sig\n self.optimize_params.append(self.bs[t])\n self.optimize_params.append(self.Vs[t])", "def setParams(self, p = 2):\n self.p = p\n self.l = p - 1\n self.id_ntot = {}\n self.id_y = {}\n self.id_W = {}\n self.id_X = {}\n for i in self.uniids:\n tracker = (self.data['id'] == i)\n self.id_ntot.update({i: np.sum(tracker)})\n self.id_y.update({i:\n self.data['weight'][tracker].reshape(np.sum(tracker), 1)})\n self.id_W.update({i: self._designMatrix_(p, tracker)})\n self.id_X.update({i:\n self._designMatrix_(self.l+1,tracker,is_X=True)})\n self.id_Z = self.id_W.copy()", "def update_parameters(self, parameters, grads, learning_rate):\n\n L = len(parameters) // 2 # number of layers in the neural network\n v_corrected = {} # Initializing first moment estimate, python dictionary\n s_corrected = {} # Initializing second moment estimate, python dictionary\n self.t += 1\n\n # Perform Adam update on all parameters\n for l in range(L):\n # Moving average of the gradients. Inputs: \"v, grads, beta1\". Output: \"v\".\n self.v[\"dW\" + str(l + 1)] = self.beta1 * self.v[\"dW\" + str(l + 1)] + (1 - self.beta1) * grads['dW' + str(l + 1)]\n self.v[\"db\" + str(l + 1)] = self.beta1 * self.v[\"db\" + str(l + 1)] + (1 - self.beta1) * grads['db' + str(l + 1)]\n\n # Compute bias-corrected first moment estimate. Inputs: \"v, beta1, t\". Output: \"v_corrected\".\n v_corrected[\"dW\" + str(l + 1)] = self.v[\"dW\" + str(l + 1)] / (1 - np.power(self.beta1, self.t))\n v_corrected[\"db\" + str(l + 1)] = self.v[\"db\" + str(l + 1)] / (1 - np.power(self.beta1, self.t))\n\n # Moving average of the squared gradients. Inputs: \"s, grads, beta2\". Output: \"s\".\n self.s[\"dW\" + str(l + 1)] = self.beta2 * self.s[\"dW\" + str(l + 1)] + (1 - self.beta2) * np.power(grads['dW' + str(l + 1)], 2)\n self.s[\"db\" + str(l + 1)] = self.beta2 * self.s[\"db\" + str(l + 1)] + (1 - self.beta2) * np.power(grads['db' + str(l + 1)], 2)\n\n # Compute bias-corrected second raw moment estimate. Inputs: \"s, beta2, t\". Output: \"s_corrected\".\n s_corrected[\"dW\" + str(l + 1)] = self.s[\"dW\" + str(l + 1)] / (1 - np.power(self.beta2, self.t))\n s_corrected[\"db\" + str(l + 1)] = self.s[\"db\" + str(l + 1)] / (1 - np.power(self.beta2, self.t))\n\n # Update parameters. Inputs: \"parameters, learning_rate, v_corrected, s_corrected, epsilon\". Output: \"parameters\".\n parameters[\"W\" + str(l + 1)] = parameters[\"W\" + str(l + 1)] - learning_rate * v_corrected[\"dW\" + str(l + 1)] / np.sqrt(self.s[\"dW\" + str(l + 1)] + self.epsilon)\n parameters[\"b\" + str(l + 1)] = parameters[\"b\" + str(l + 1)] - learning_rate * v_corrected[\"db\" + str(l + 1)] / np.sqrt(self.s[\"db\" + str(l + 1)] + self.epsilon)\n\n return parameters", "def update_multiple_parameters(self, detuning=None, lamb_dicke=None,\n base_rabi=None):\n if detuning is not None:\n self.__detuning = detuning\n if lamb_dicke is not None:\n self.__lamb_dicke = lamb_dicke\n if base_rabi is not None:\n self.__base_rabi = base_rabi\n if detuning is None and lamb_dicke is None and base_rabi is None:\n return\n self.__update_prefactors()", "def init_params(self):\n self.params = Parameters()\n self.params.add('qoff', self.qoff, vary=0, min=-np.inf, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('yscale', self.yscale, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('int_bg', self.int_bg, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('Rc', self.Rc, vary=0, min=-np.inf, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('sur_den', self.sur_den, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)\n self.params.add('ion_depth', self.ion_depth, vary=0, min=0, max=np.inf, expr=None, brute_step=0.1)", "def update_compartments(self, food_glucose):\n self.g_t, self.m_t = self.new_values(food_glucose, self.get_variables())", "def set_shape_params(self, params):\n self.alpha = params[0]\n self.beta = params[1]\n self.gamma = params[2]\n self.c500 = params[3]\n self.P0 = params[4]", "def step(self):\n for p, grad, v, square_grad_avg, delta_x_acc in self.params:\n # Compute the running average of the squared gradients \n square_grad_avg.mul_(self.rho)\n square_grad_avg.addcmul_(grad, grad, value = 1 - self.rho)\n # Compute the RMS of the previous squared gradients (eps to avoid numerical issues later for division)\n std = (square_grad_avg.add_(self.eps)).sqrt_()\n # Compute the accumulated update\n delta_x = ((delta_x_acc.add_(self.eps)).sqrt_()) * grad / std\n # Accumulate the updates\n delta_x_acc.mul_(self.rho)\n delta_x_acc.addcmul_(delta_x, delta_x, value = 1 - self.rho) \n # Update the parameters\n p.add_(delta_x, alpha = - self.lr)", "def set_parameters(self, par):\n try:\n for l in self.cell.layers:\n r_curve = cmf.VanGenuchtenMualem(\n Ksat=10**par.pKsat, phi=par.porosity, alpha=par.alpha, n=par.n\n )\n r_curve.w0 = r_curve.fit_w0()\n l.soil = r_curve\n self.cell.saturated_depth = 0.5\n self.gw.potential = self.cell.z - 0.5\n except RuntimeError as e:\n sys.stderr.write(\"Set parameters failed with:\\n\" + str(par) + \"\\n\" + str(e))\n raise", "def update_params(layers, param_grads, learning_rate):\n for layer, layer_backprop_grads in zip(layers, param_grads):\n for param, grad in itertools.izip(layer.get_params_iter(), layer_backprop_grads):\n # The parameter returned by the iterator point to the memory space of\n # the original layer and can thus be modified inplace.\n param -= learning_rate * grad # Update each parameter" ]
[ "0.6255577", "0.62177485", "0.6214475", "0.5984097", "0.5967375", "0.59337133", "0.59327203", "0.59210086", "0.58783424", "0.5867557", "0.57922786", "0.57610255", "0.57574403", "0.57097393", "0.5705617", "0.5699275", "0.56962854", "0.5681587", "0.5657348", "0.56380326", "0.5610231", "0.56098706", "0.5598722", "0.5591783", "0.5584359", "0.5582361", "0.554142", "0.553775", "0.5519301", "0.5495096", "0.5492547", "0.5487352", "0.54836345", "0.5481231", "0.5478128", "0.5477239", "0.54534197", "0.54507816", "0.5448117", "0.5434139", "0.5429331", "0.5428411", "0.5424247", "0.54216826", "0.54113764", "0.53905314", "0.53829336", "0.5370162", "0.53695995", "0.5368765", "0.5367921", "0.53678584", "0.5361216", "0.53530645", "0.53509325", "0.5344839", "0.5340901", "0.5330452", "0.5330253", "0.53247046", "0.53158796", "0.5308874", "0.53088593", "0.53077745", "0.52997726", "0.52861387", "0.5253509", "0.5250955", "0.52502346", "0.5249474", "0.52438754", "0.52411425", "0.52325016", "0.5229243", "0.5224557", "0.52231216", "0.52228975", "0.5221465", "0.52205324", "0.52095675", "0.5208349", "0.5201864", "0.52000177", "0.5194619", "0.5192502", "0.5192125", "0.5187919", "0.5184666", "0.5181304", "0.51655614", "0.5160989", "0.5160197", "0.5154193", "0.5147417", "0.51391846", "0.513743", "0.5131924", "0.51303846", "0.5128386", "0.5123901" ]
0.63643956
0
Method calls ``new_haloprop_func_dict`` to create new halo properties as columns to the mock catalog, if applicable.
def add_new_haloprops(self, galaxy_table): if hasattr(self, 'new_haloprop_func_dict'): d = self.new_haloprop_func_dict for key, func in d.iteritems(): if key not in galaxy_table.keys(): galaxy_table[key] = func(galaxy_table=galaxy_table)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_make_hmp(self):\n table_factory = DataTableFactory(PACKET_DIR)\n table_factory.hmp()", "def _parse_constructor_kwargs(self, **kwargs):\n\n try:\n halo_id = np.array(kwargs['halo_id'])\n assert type(halo_id) is np.ndarray\n Nhalos = custom_len(halo_id)\n except KeyError:\n msg = (\"\\nThe UserSuppliedHaloCatalog requires a ``halo_id`` keyword argument.\")\n raise HalotoolsError(msg)\n\n halo_table_dict = (\n {key: np.array(kwargs[key]) for key in kwargs\n if ((type(kwargs[key]) is np.ndarray) | (type(kwargs[key]) is Column)) and\n (custom_len(kwargs[key]) == Nhalos) and (key[:5] == 'halo_')})\n self._test_halo_table_dict(halo_table_dict)\n\n metadata_dict = (\n {key: kwargs[key] for key in kwargs\n if (key not in halo_table_dict) and (key != 'ptcl_table')}\n )\n\n return halo_table_dict, metadata_dict", "def _add_hybrid_cols(self):\n for new_col_name, method in HYBRID_METHODS.items():\n out = method(self)\n if out is not None:\n try:\n self._hybrid_meta[new_col_name] = out\n except ValueError as e:\n msg = (\"Unable to add {!r} column to hybrid meta. The \"\n \"following exception was raised when adding \"\n \"the data output by '{}': {!r}.\")\n w = msg.format(new_col_name, method.__name__, e)\n logger.warning(w)\n warn(w, OutputWarning)", "def _make_haloupdate(self, f, fixed, halos, **kwargs):\n return", "def create():\n df = prepare_dataframe(io[\"harmonization_df_output_path\"], index_col=\"label\")\n assumption_map = create_assumption_map(columns, df)\n assumption_map.to_csv(io[\"harmonization_output_assumption_path\"], index=False)\n\n # Heat Rate regression Map, Valid only for the Coal\n regression_map = create_regression_map(df)\n\n res = other_regression(df[df[\"fuel_type\"] == \"coal\"], [\"heat_rate\"], \"delta_heatrate\")\n regression_map[\"intersect_err\"] = res.bse[0]\n regression_map[\"slope_err\"] = res.bse[1]\n print(regression_map)\n regression_map.to_csv(io[\"harmonization_output_regression_path\"], index=False)", "def _generate_hcs_meta(self):\n self.hcs_meta = {'plate': self.plate_meta}\n\n well_metas = []\n for well in self.wells:\n meta = self.store[well].attrs.get('well')\n well_metas.append(meta)\n\n self.hcs_meta['well'] = well_metas", "def _mc_galprop(self, seed=None, **kwargs):\n model_helpers.update_param_dict(self, **kwargs)\n self._set_correlation_strength()\n\n if ('galaxy_table' in kwargs.keys()) & ('halos' in kwargs.keys()):\n msg = (\"The mc_\"+self.galprop_key+\" method accepts either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\" + \n \" but never both.\")\n raise KeyError(msg)\n elif 'galaxy_table' in kwargs.keys():\n galaxy_table = kwargs['galaxy_table']\n operative_sec_haloprop_key = (\n model_defaults.host_haloprop_prefix + self.sec_haloprop_key)\n elif 'halos' in kwargs.keys():\n galaxy_table = kwargs['halos']\n operative_sec_haloprop_key = self.sec_haloprop_key\n else:\n msg = (\"The mc_\"+self.galprop_key+\" requires either \" + \n \"a halos keyword argument, or a galaxy_table keyword argument\")\n raise KeyError(msg)\n\n self.add_new_haloprops(galaxy_table)\n\n # All at once, draw all the randoms we will need\n np.random.seed(seed=seed)\n all_randoms = np.random.random(len(galaxy_table)*2)\n galprop_cumprob = all_randoms[0:len(galaxy_table)]\n galprop_scatter = all_randoms[len(galaxy_table):]\n\n # Initialize the output array\n output_galprop = np.zeros(len(galaxy_table))\n\n # Determine binning and loop range\n if 'galaxy_table_slice_array' not in kwargs.keys():\n binned_prim_galprop = np.digitize(\n galaxy_table[self.prim_galprop_key], \n self.prim_galprop_bins)\n prim_galprop_loop_range = set(binned_prim_galprop)\n else:\n prim_galprop_loop_range = range(len(self.one_point_lookup_table))\n\n for i in prim_galprop_loop_range:\n\n # Determine the slice corresponding to the i^th prim_galprop bin\n if 'galaxy_table_slice_array' not in kwargs.keys():\n idx_bini = np.where(binned_prim_galprop==i)[0]\n num_bini = len(idx_bini)\n else:\n idx_bini = kwargs['galaxy_table_slice_array'][i]\n num_bini = len(galaxy_table[idx_bini])\n\n if len(idx_bini) > 0:\n # Fetch the appropriate number of randoms\n # for the i^th prim_galprop bin\n galprop_cumprob_bini = galprop_cumprob[idx_bini]\n galprop_scatter_bini = galprop_scatter[idx_bini]\n\n # Fetch the halos in the i^th prim_galprop bin, \n # and determine how they are sorted\n haloprop_bini = galaxy_table[idx_bini][operative_sec_haloprop_key]\n idx_sorted_haloprop_bini = np.argsort(haloprop_bini)\n\n galprop_bini = self._condition_matched_galprop(\n haloprop_bini[idx_sorted_haloprop_bini], \n galprop_cumprob_bini, i, galprop_scatter_bini, self.tol)\n\n # Assign the final values to the \n # appropriately sorted subarray of output_galprop\n output_galprop[idx_bini[idx_sorted_haloprop_bini]] = galprop_bini\n\n return output_galprop", "def add_properties_to_df(df):\n\n df['N_rot'] = df.apply(Nrot, axis=1)\n df['HAC'] = df.apply(heavy_atoms, axis=1)\n df['cLogP'] = df.apply(clogP, axis =1)\n df['TSPA'] = df.apply(TPSA, axis=1)\n df['NDon'] = df.apply(NDon, axis=1)\n df['NAcc'] = df.apply(NAcc, axis=1)\n df['Fsp3'] = df.apply(Fsp3, axis=1)\n\n return df", "def test_attributes(self):\n result = self.plugin_instance.create_probability_cube(\n self.percentiles_cube, self.orography_cube)\n self.assertEqual(result.units, \"1\")\n self.assertEqual(result.name(), self.new_name)\n self.assertEqual(result.attributes['relative_to_threshold'], 'below')\n self.assertEqual(result.attributes['thresholded_using'],\n 'surface_altitude')", "def test_health_ng(self, mock):\n mock.configure_mock(**(self.config_payload(1, 0)))\n d = lf.lambda_handler(**(self.lambdaparam))\n self.assertEqual(d, 1)\n mock.client.return_value.update_thing_shadow.assert_called_once_with(\n thingName=self.thingname,\n payload=lf.payload_put(lf.shadow_update_data))", "def addfunctions2new(abunch, key):\n snames = [\n \"BuildingSurface:Detailed\",\n \"Wall:Detailed\",\n \"RoofCeiling:Detailed\",\n \"Floor:Detailed\",\n \"FenestrationSurface:Detailed\",\n \"Shading:Site:Detailed\",\n \"Shading:Building:Detailed\",\n \"Shading:Zone:Detailed\",\n ]\n snames = [sname.upper() for sname in snames]\n if key in snames:\n func_dict = {\n \"area\": fh.area,\n \"height\": fh.height, # not working correctly\n \"width\": fh.width, # not working correctly\n \"azimuth\": fh.azimuth,\n \"tilt\": fh.tilt,\n \"coords\": fh.getcoords, # needed for debugging\n }\n try:\n abunch.__functions.update(func_dict)\n except KeyError as e:\n abunch.__functions = func_dict\n return abunch", "def test_metadata_filter_hmp(self):\n table_factory = DataTableFactory(PACKET_DIR)\n hmp1 = table_factory.hmp()\n\n metadata = pd.DataFrame({'foo': {'haib18CEM5332_HMGTJCCXY_SL342402': 1}})\n table_factory.set_metadata(metadata)\n hmp2 = table_factory.hmp()\n\n self.assertEqual(hmp1.shape[0] // 2, hmp2.shape[0])", "def set_hod(self, hod_dict):\n self.__init__(hod_dict)", "def make(self, halo_spots):\n calls = {}\n generated = OrderedDict()\n for hs in halo_spots:\n # 1) Callables/Calls for send/recv\n begin_exchange = []\n for f, v in hs.fmapper.items():\n # Sanity check\n assert f.is_Function\n assert f.grid is not None\n\n # Note: to construct the halo exchange Callables, use the generic `df`,\n # instead of `f`, so that we don't need to regenerate code for Functions\n # that are symbolically identical to `f` except for the name\n df = f.__class__.__base__(name='a', grid=f.grid, shape=f.shape_global,\n dimensions=f.dimensions)\n # `gather`, `scatter`, `sendrecv` and `haloupdate` are generic by\n # construction -- they only need to be generated once for each unique\n # pair (`ndim`, `halos`)\n if (f.ndim, v) not in generated:\n key = len(generated)\n haloupdate = self._make_haloupdate(df, v.loc_indices, v.halos, key)\n sendrecv = self._make_sendrecv(df, v.loc_indices)\n gather = self._make_copy(df, v.loc_indices)\n scatter = self._make_copy(df, v.loc_indices, swap=True)\n # Arrange the newly constructed Callables in a suitable data\n # structure to capture the call tree. This may be useful to\n # the HaloExchangeBuilder user\n haloupdate = EFuncNode(haloupdate)\n sendrecv = EFuncNode(sendrecv, haloupdate)\n gather = EFuncNode(gather, sendrecv)\n scatter = EFuncNode(scatter, sendrecv)\n\n generated[(f.ndim, v)] = haloupdate\n\n # `haloupdate` Call construction\n comm = f.grid.distributor._obj_comm\n nb = f.grid.distributor._obj_neighborhood\n loc_indices = list(v.loc_indices.values())\n args = [f, comm, nb] + loc_indices\n begin_exchange.append(Call(generated[(f.ndim, v)].name, args))\n\n # 2) Callables/Calls for wait (no-op in case of synchronous halo exchange)\n wait_exchange = []\n for f, v in hs.fmapper.items():\n # TODO\n pass\n\n # 3) Callables/Calls for remainder computation (no-op in case of\n # synchronous halo exchange)\n remainder = []\n\n calls[hs] = List(body=begin_exchange + [hs.body] + wait_exchange + remainder)\n\n return flatten(generated.values()), calls", "def _apply_harmonized_metadata_to_sample(sample: Sample, harmonized_metadata: dict):\n for key, value in harmonized_metadata.items():\n setattr(sample, key, value)", "def setUp(self):\n problem = setup_house_L(size=(40, 40))\n\n env = MetroLayoutEnv()\n\n costfn = objectives.ConstraintsHeur(problem,\n wmap={'AspectConstraint':0.1,\n 'AreaConstraint': 2\n },\n default=1.)\n\n model = algo.MetropolisHastings(env, costfn)\n\n self.exp = SimpleMH(\n env,\n problem,\n model=model,\n cost_fn=costfn,\n num_iter=1000,\n initializer=PointsInBound(problem, env, size=3, seed=69)\n )", "def test_property_cols():\n image_file = 'input/D00572501_z_c01_r3624p01_immasked.fits.fz'\n cat_file = 'input/D00572501_z_c01_r5473p01_piff.fits'\n psf_file = os.path.join('output','test_property_cols.piff')\n hsm_file = os.path.join('output','test_property_cols_hsm.fits')\n\n nstars = 25\n scale = 0.26\n size = 15\n order = 1\n stamp_size = 25\n\n config = {\n 'input' : {\n 'nstars': nstars,\n 'image_file_name' : image_file,\n 'image_hdu' : 1,\n 'weight_hdu' : 3,\n 'badpix_hdu' : 2,\n 'cat_file_name' : cat_file,\n 'x_col' : 'XWIN_IMAGE',\n 'y_col' : 'YWIN_IMAGE',\n 'sky_col' : 'BACKGROUND',\n 'stamp_size' : stamp_size,\n 'ra' : 'TELRA',\n 'dec' : 'TELDEC',\n 'gain' : 'GAINA',\n 'satur' : 'SATURATA',\n 'chipnum': 1,\n # Select ones with a variety of dtypes.\n 'property_cols' : ['SOURCE_ID', 'GI_COLOR', 'FLAGS', 'FLAG_COLOR', 'SPREAD_MODEL'],\n },\n 'select' : {\n 'type': 'Properties',\n 'where': 'np.abs(SPREAD_MODEL) < 3.e-4',\n\n 'reserve_frac' : 0.2,\n 'seed' : 1234,\n },\n 'psf' : {\n 'model' : {\n 'type' : 'PixelGrid',\n 'scale' : scale,\n 'size' : size,\n 'interp' : 'Lanczos(5)',\n },\n 'interp' : {\n 'type' : 'BasisPolynomial',\n 'order' : [1, 1, 1],\n 'keys': ['u', 'v', 'GI_COLOR'],\n },\n },\n 'output' : {\n 'file_name' : psf_file,\n 'stats': [\n {\n 'type': 'HSMCatalog',\n 'file_name': hsm_file,\n },\n ],\n },\n }\n\n piff.piffify(config)\n hsm = fitsio.read(hsm_file)\n cat = fitsio.read(cat_file)\n\n print('hsm dtype = ',hsm.dtype)\n print('cat dtype = ',cat.dtype)\n\n for key in hsm.dtype.names:\n print(key)\n if key in cat.dtype.names:\n assert hsm[key].dtype.type == cat[key].dtype.type\n elif key == 'reserve':\n assert hsm[key].dtype.type == np.dtype(bool).type\n elif key.startswith('flag'):\n assert hsm[key].dtype.type == np.dtype(int).type\n elif key == 'sky':\n # This one is read from the input catalog, but renamed\n assert hsm[key].dtype.type == np.float32\n else:\n assert hsm[key].dtype.type == np.dtype(float).type\n\n # Check that drawing the image works without specifying chipnum.\n # This is ok so long as the input is really only a single chip.\n # cf. Issue #140\n psf = piff.read(psf_file)\n im1 = psf.draw(35, 40, center=True, GI_COLOR=1)\n\n # If the input field didn't include chipnum, then it makes no difference for a single chip.\n del config['input']['chipnum']\n piff.piffify(config)\n psf = piff.read(psf_file)\n im2 = psf.draw(35, 40, center=True, GI_COLOR=1)\n assert im1 == im2", "def columns_setup(self):\n self.required = None\n self.addition = None\n self.deletion = None\n self.retention = None\n self.rename = None", "def _setup_hpos():\n hpo_dao = HPODao()\n hpo_dao.insert(\n HPO(hpoId=UNSET_HPO_ID, name=\"UNSET\", displayName=\"Unset\", organizationType=OrganizationType.UNSET)\n )\n hpo_dao.insert(\n HPO(hpoId=PITT_HPO_ID, name=\"PITT\", displayName=\"Pittsburgh\", organizationType=OrganizationType.HPO)\n )\n hpo_dao.insert(\n HPO(hpoId=AZ_HPO_ID, name=\"AZ_TUCSON\", displayName=\"Arizona\", organizationType=OrganizationType.HPO)\n )\n\n org_dao = OrganizationDao()\n org_dao.insert(\n Organization(\n organizationId=AZ_ORG_ID,\n externalId=\"AZ_TUCSON_BANNER_HEALTH\",\n displayName=\"Banner Health\",\n hpoId=AZ_HPO_ID,\n )\n )\n org_dao.insert(\n Organization(\n organizationId=PITT_ORG_ID,\n externalId=\"PITT_BANNER_HEALTH\",\n displayName=\"PITT display Banner Health\",\n hpoId=PITT_HPO_ID,\n )\n )\n\n site_dao = SiteDao()\n site_dao.insert(\n Site(\n siteName=\"Monroeville Urgent Care Center\",\n googleGroup=\"hpo-site-monroeville\",\n mayolinkClientNumber=7035769,\n organizationId=PITT_ORG_ID,\n hpoId=PITT_HPO_ID,\n )\n )\n site_dao.insert(\n Site(\n siteName=\"Phoenix Urgent Care Center\",\n googleGroup=\"hpo-site-bannerphoenix\",\n mayolinkClientNumber=7035770,\n organizationId=PITT_ORG_ID,\n hpoId=PITT_HPO_ID,\n )\n )\n site_dao.insert(\n Site(\n siteName=\"Phoenix clinic\",\n googleGroup=\"hpo-site-clinic-phoenix\",\n mayolinkClientNumber=7035770,\n organizationId=AZ_ORG_ID,\n hpoId=AZ_HPO_ID,\n )\n )", "def generate_test_floors(self):\n def generate_floors_for_location(floor_names, location_name):\n item_dict = {}\n for name in floor_names:\n number = int(''.join([n for n in name if n.isdigit()]))\n item_dict['{}_{}'.format(name, location_name)] = {\n 'number': number,\n 'location': location_name\n }\n return item_dict\n\n self.fs_l1_o1_dict = \\\n generate_floors_for_location(\n ['f0', 'f1', 'f2', 'f3_del', 'f4_del'],\n 'l1_o1')\n\n self.fs_l1_sub1_o1_dict = \\\n generate_floors_for_location(\n ['f0', 'f1', 'f2', 'f3', 'f4'],\n 'l1_sub1_o1')\n\n self.fs_l1_o2_dict = \\\n generate_floors_for_location(\n ['f0', 'f1', 'f2'],\n 'l1_o2')\n\n self.fs_l1_sub1_o2_dict = \\\n generate_floors_for_location(\n ['f0', 'f1', 'f2'],\n 'l1_sub1_o2')\n\n self.fs_dict = {\n **self.fs_l1_o1_dict,\n **self.fs_l1_sub1_o1_dict,\n **self.fs_l1_o2_dict,\n **self.fs_l1_sub1_o2_dict,\n }\n\n self.floors = self.create_floors_from_data(\n self.fs_dict, self.locations)", "def __init__(self, request, **kwargs):\n super(PSIHDReport, self).__init__(request, **kwargs)\n calculate_fn = lambda key, _: key[len(self.place_types) + 1]\n self.columns['demo_type'] = Column(\"Worker Type\", calculate_fn=calculate_fn)\n self.columns['demo_type'].view = FunctionView(calculate_fn=calculate_fn)\n self.function_views['demo_type'] = self.columns['demo_type'].view", "def _setup_metadata(self):\n # loom_metadata is what we use to pass all the information about\n # the loom (max_depth, which typeshapes are supported, and the signatures of\n # the LoomOps) to scheduler.cc\n loom_metadata = loom_pb2.LoomMetadata()\n loom_metadata.max_depth = self._max_depth\n for ts, tensor_names in zip(\n self._type_shapes, self._ts_idx_to_tensor_names):\n type_shape_metadata = loom_metadata.type_shape_metadata.add()\n type_shape_metadata.dtype = ts.dtype_enum\n type_shape_metadata.shape.extend(ts.shape)\n type_shape_metadata.tag = ts.tag\n type_shape_metadata.name = str(ts) # Debug string.\n type_shape_metadata.tensor_names.extend(tensor_names)\n type_shape_metadata.is_batch_input = (\n (ts in self._batch_inputs) or self._direct_feed_dict)\n\n for op_name, op in zip(self._loom_op_names, self._loom_ops):\n op_metadata = loom_metadata.op_metadata.add()\n op_metadata.name = op_name\n op_metadata.input_ts_idx.extend(\n self._type_shape_to_idx[ts] for ts in op.input_type_shapes)\n op_metadata.output_ts_idx.extend(\n self._type_shape_to_idx[ts] for ts in op.output_type_shapes)\n\n self._loom_metadata_str = (\n loom_metadata.SerializeToString())", "def build_metrics_columns(manager):\r\n manager.gen_labordollar_perhour_column(with_formulas=False)\r\n manager.gen_laborhours_unitarea(with_formulas=False)\r\n manager.color_column(\"Labor $/Hr\")\r\n manager.color_column(\"Labor Hours/Unit Area\")", "def __init__(self,cosmology, mass_function, halo_physics, kh_vector, mass_bins, volume, kh_min=0, pt_type = 'EFT', pade_resum = True, smooth_density = True, IR_resum = True, npoints = 1000, verb=False):\n\n # Write attributes, if they're of the correct type\n if isinstance(cosmology, Cosmology):\n self.cosmology = cosmology\n else:\n raise TypeError('cosmology input must be an instance of the Cosmology class!')\n if isinstance(mass_function, MassFunction):\n self.mass_function = mass_function\n else:\n raise TypeError('mass_function input must be an instance of the MassFunction class!')\n if isinstance(halo_physics, HaloPhysics):\n self.halo_physics = halo_physics\n else:\n raise TypeError('halo_physics input must be an instance of the HaloPhysics class!')\n\n # Write useful attributes\n self.kh_vector = kh_vector\n self.kh_min = kh_min\n self.mass_bins = mass_bins\n self.N_bins = len(mass_bins)-1\n self.N_k = len(self.kh_vector)\n self.volume = volume\n self.verb = verb\n self.pt_type = pt_type\n self.pade_resum = pade_resum\n self.smooth_density = smooth_density\n self.IR_resum = IR_resum\n self.npoints = npoints\n\n # Generate a power spectrum class with this k-vector\n self.halo_model = HaloModel(cosmology, mass_function, halo_physics, kh_vector, kh_min,verb=self.verb)\n\n # Copy in the MassIntegrals class\n self.mass_integrals = self.halo_model.mass_integrals\n\n if self.cosmology.use_neutrinos:\n if self.verb:\n print(\"Note: massive neutrinos are not implemented in full, so we assume CDM+baryon power spectra here.\")\n print(\"(This will creates only a (subdominant) percent-level error for typical neutrino masses.)\")\n\n # Run some checks\n assert self.mass_bins[0]>=np.power(10.,self.mass_integrals.min_logM_h), 'Minimum bin must be above MassIntegral limit!'\n assert self.mass_bins[-1]<=np.power(10.,self.mass_integrals.max_logM_h), 'Maximum bin must be below MassIntegral limit!'\n\n # Compute linear power for the k-vector\n self.linear_power = self.cosmology.compute_linear_power(self.kh_vector,self.kh_min).copy()", "def init_columns(self, project, columns):\n self.projects.update_entry(pk=project, project={\"columns\": []}).result()\n cols = []\n\n for path, unit in columns.items():\n col = {\"path\": f\"data.{path}\"}\n if unit is not None:\n col[\"unit\"] = unit\n\n cols.append(col)\n\n return self.projects.update_entry(\n pk=project, project={\"columns\": cols}\n ).result()", "def _add_vars_metadata(nemo_hr):\n nemo_hr.atmpres.attrs[\"level\"] = \"mean sea level\"\n nemo_hr.atmpres.attrs[\"long_name\"] = \"Pressure Reduced to MSL\"\n nemo_hr.atmpres.attrs[\"standard_name\"] = \"air_pressure_at_sea_level\"\n nemo_hr.atmpres.attrs[\"units\"] = \"Pa\"\n\n # nemo_hr.LHTFL_surface.attrs[\"level\"] = \"surface\"\n # nemo_hr.LHTFL_surface.attrs[\"long_name\"] = \"\"\n # nemo_hr.LHTFL_surface.attrs[\"standard_name\"] = \"\"\n # nemo_hr.LHTFL_surface.attrs[\"units\"] = \"\"\n # nemo_hr.LHTFL_surface.attrs[\"ioos_category\"] = \"\"\n # nemo_hr.LHTFL_surface.attrs[\"comment\"] = \"how calculated\"\n\n nemo_hr.percentcloud.attrs[\"long_name\"] = \"Cloud Fraction\"\n nemo_hr.percentcloud.attrs[\"standard_name\"] = \"cloud_area_fraction\"\n nemo_hr.percentcloud.attrs[\"units\"] = \"percent\"\n\n nemo_hr.PRATE_surface.attrs[\"level\"] = \"surface\"\n nemo_hr.PRATE_surface.attrs[\"long_name\"] = \"Precipitation Rate\"\n nemo_hr.PRATE_surface.attrs[\"standard_name\"] = \"precipitation_flux\"\n nemo_hr.PRATE_surface.attrs[\"units\"] = \"kg/m^2/s\"\n\n nemo_hr.nav_lat.attrs[\"ioos_category\"] = \"location\"\n\n nemo_hr.nav_lon.attrs[\"ioos_category\"] = \"location\"\n\n nemo_hr.precip.attrs[\"level\"] = \"surface\"\n nemo_hr.precip.attrs[\"long_name\"] = \"Total Precipitation\"\n nemo_hr.precip.attrs[\"standard_name\"] = \"precipitation_flux\"\n nemo_hr.precip.attrs[\"units\"] = \"kg/m^2/s\"\n\n nemo_hr.qair.attrs[\"level\"] = \"2 m above surface\"\n nemo_hr.qair.attrs[\"long_name\"] = \"Specific Humidity\"\n nemo_hr.qair.attrs[\"standard_name\"] = \"specific_humidity_2maboveground\"\n nemo_hr.qair.attrs[\"units\"] = \"kg/kg\"\n nemo_hr.qair.attrs[\n \"comment\"\n ] = \"calculated from sea level air pressure and dewpoint temperature via WMO 2012 ocean best practices\"\n\n nemo_hr.RH_2maboveground.attrs[\"level\"] = \"2 m above surface\"\n nemo_hr.RH_2maboveground.attrs[\"long_name\"] = \"Relative Humidity\"\n nemo_hr.RH_2maboveground.attrs[\"standard_name\"] = \"relative_humidity_2maboveground\"\n nemo_hr.RH_2maboveground.attrs[\"units\"] = \"percent\"\n nemo_hr.RH_2maboveground.attrs[\n \"comment\"\n ] = \"calculated from air temperature and dewpoint temperature via WMO 2012 ocean best practices\"\n\n nemo_hr.solar.attrs[\"level\"] = \"surface\"\n nemo_hr.solar.attrs[\"long_name\"] = \"Downward Short-Wave Radiation Flux\"\n nemo_hr.solar.attrs[\"standard_name\"] = \"net_downward_shortwave_flux_in_air\"\n nemo_hr.solar.attrs[\"units\"] = \"W/m^2\"\n\n nemo_hr.tair.attrs[\"level\"] = \"2 m above surface\"\n nemo_hr.tair.attrs[\"long_name\"] = \"Air Temperature\"\n nemo_hr.tair.attrs[\"standard_name\"] = \"air_temperature_2maboveground\"\n nemo_hr.tair.attrs[\"units\"] = \"K\"\n\n nemo_hr.therm_rad.attrs[\"level\"] = \"surface\"\n nemo_hr.therm_rad.attrs[\"long_name\"] = \"Downward Long-Wave Radiation Flux\"\n nemo_hr.therm_rad.attrs[\"standard_name\"] = \"net_downward_longwave_flux_in_air\"\n nemo_hr.therm_rad.attrs[\"units\"] = \"W/m^2\"\n nemo_hr.therm_rad.attrs[\"comment\"] = (\n \"calculated from saturation water vapour pressure, air temperature, and cloud fraction \"\n \"via Dilly-Unsworth correlation\"\n )\n\n nemo_hr.u_wind.attrs[\"level\"] = \"10 m above surface\"\n nemo_hr.u_wind.attrs[\"long_name\"] = \"U-Component of Wind\"\n nemo_hr.u_wind.attrs[\"standard_name\"] = \"x_wind\"\n nemo_hr.u_wind.attrs[\"units\"] = \"m/s\"\n nemo_hr.u_wind.attrs[\"ioos_category\"] = \"wind speed and direction\"\n\n nemo_hr.v_wind.attrs[\"level\"] = \"10 m above surface\"\n nemo_hr.v_wind.attrs[\"long_name\"] = \"V-Component of Wind\"\n nemo_hr.v_wind.attrs[\"standard_name\"] = \"y_wind\"\n nemo_hr.v_wind.attrs[\"units\"] = \"m/s\"\n nemo_hr.v_wind.attrs[\"ioos_category\"] = \"wind speed and direction\"\n\n nemo_hr.attrs[\n \"history\"\n ] += f\"\\n{arrow.now().format('ddd MMM DD HH:mm:ss YYYY')}: Add data variables metadata\"", "def setUp(self):\r\n\r\n self.otu_table_values = array([[0, 0, 9, 5, 3, 1],\r\n [1, 5, 4, 0, 3, 2],\r\n [2, 3, 1, 1, 2, 5]])\r\n {(0, 2): 9.0, (0, 3): 5.0, (0, 4): 3.0, (0, 5): 1.0,\r\n (1, 0): 1.0, (1, 1): 5.0, (1, 2): 4.0, (1, 4): 3.0, (1, 5): 2.0,\r\n (2, 0): 2.0, (2, 1): 3.0, (2, 2): 1.0, (2, 3): 1.0, (2, 4): 2.0, (2, 5): 5.0}\r\n self.otu_table = table_factory(self.otu_table_values,\r\n ['Sample1', 'Sample2', 'Sample3',\r\n 'Sample4', 'Sample5', 'Sample6'],\r\n ['OTU1', 'OTU2', 'OTU3'],\r\n [None, None, None, None, None, None],\r\n [{\"taxonomy\": ['Bacteria']},\r\n {\"taxonomy\": ['Archaea']},\r\n {\"taxonomy\": ['Streptococcus']}])\r\n self.otu_table_f = table_factory(self.otu_table_values,\r\n ['Sample1', 'Sample2', 'Sample3',\r\n 'Sample4', 'Sample5', 'Sample6'],\r\n ['OTU1', 'OTU2', 'OTU3'],\r\n [None, None, None, None, None, None],\r\n [{\"taxonomy\": ['1A', '1B', '1C', 'Bacteria']},\r\n {\"taxonomy\":\r\n ['2A', '2B', '2C', 'Archaea']},\r\n {\"taxonomy\": ['3A', '3B', '3C', 'Streptococcus']}])\r\n\r\n self.full_lineages = [['1A', '1B', '1C', 'Bacteria'],\r\n ['2A', '2B', '2C', 'Archaea'],\r\n ['3A', '3B', '3C', 'Streptococcus']]\r\n self.metadata = [[['Sample1', 'NA', 'A'],\r\n ['Sample2', 'NA', 'B'],\r\n ['Sample3', 'NA', 'A'],\r\n ['Sample4', 'NA', 'B'],\r\n ['Sample5', 'NA', 'A'],\r\n ['Sample6', 'NA', 'B']],\r\n ['SampleID', 'CAT1', 'CAT2'], []]\r\n self.tree_text = [\"('OTU3',('OTU1','OTU2'))\"]\r\n fh, self.tmp_heatmap_fpath = mkstemp(prefix='test_heatmap_',\r\n suffix='.pdf')\r\n close(fh)", "def test_load_from_function(self):\n self.test_table.load_from_function(create_small_test_odml)\n self.assertEqual(self.test_table._odmldict, self.expected_odmldict)", "def test_create_from_dicts(self):\n cols = list(zip(*self.dtypes))[0]\n dicts = [dict([(cols[i], d[i]) for i in xrange(len(d))])\n for d in self.idata]\n\n tbl = Table.create(\n ':memory:', \"Bar\", dicts, verbose=True,\n primary_key='id', autoincrement=True)\n\n self.check_index(self.idata, tbl.select())\n for idx, col in enumerate(cols):\n if col == 'id':\n continue\n self.check_data(self.idata[:, [0, idx]], tbl[col])", "def _get_mock_phyps(self):\n mock_lpar_4A = mock.Mock()\n mock_lpar_4A.configure_mock(id=4, name='A')\n mock_lpar_4A.processor = mock.MagicMock(\n util_cap_proc_cycles=5005045000,\n util_uncap_proc_cycles=5005045000,\n idle_proc_cycles=10000)\n mock_lpar_4A_prev = mock.Mock()\n mock_lpar_4A_prev.configure_mock(id=4, name='A')\n mock_lpar_4A_prev.processor = mock.MagicMock(\n util_cap_proc_cycles=40000,\n util_uncap_proc_cycles=40000,\n idle_proc_cycles=0)\n mock_phyp = mock.MagicMock(sample=mock.MagicMock(lpars=[mock_lpar_4A]))\n mock_prev_phyp = mock.MagicMock(\n sample=mock.MagicMock(lpars=[mock_lpar_4A_prev]))\n return mock_phyp, mock_prev_phyp", "def give_metadata(self):\n\n m = dict()\n m['dynamic_expressions'] = self.dynamic_expressions\n\n cust_labels = {}\n for key, value in self.column_labels_custom.iteritems():\n cust_labels[self.raw_to_colname(key)] = value\n m['column_labels_custom'] = cust_labels\n\n m['colsel'] = [self.raw_to_colname(col) for col in self.colsel]\n\n colsizedict = {}\n for col, size in enumerate(self.colsize):\n colsizedict[self.raw_to_colname(col)] = size\n m['colsize'] = colsizedict\n\n marksdict = {}\n for mark, colset in self.marks.iteritems():\n marksdict[mark] = [self.raw_to_colname(col) for col in colset]\n m['marks'] = marksdict\n\n m['name'] = self.name\n return m", "def _get_halo(self,i):\n if self._order is False:\n if self._subs is True:\n #this needs to be tested again on a snapshot that is not ordered!\n x = Halo(i, self, self.base, np.where(np.in1d(self.base['iord'], self.ids[self._subhalodat['sub_off'][i]:self._subhalodat['sub_off'][i]+self._subhalodat['sub_len'][i]] )))\n else:\n x = Halo(i, self, self.base, np.where(np.in1d(self.base['iord'], self.ids[self._halodat['group_off'][i]:self._halodat['group_off'][i]+self._halodat['group_len'][i]] )))\n \n else:\n if self._subs is False: #to use groups as halos:\n x = Halo(i, self, self.base, self.ids[self._halodat['group_off'][i]:self._halodat['group_off'][i]+self._halodat['group_len'][i]] ) \n else:\n x=Halo(i, self, self.base, self.ids[self._subhalodat['sub_off'][i]:self._subhalodat['sub_off'][i]+self._subhalodat['sub_len'][i]] )\n \n x._descriptor = \"halo_\"+str(i)\n x.properties.update(self.get_halo_properties(i))\n return x", "def test_create_class(self, whp_pandas):\n test_class = Slug_Forecasting(whp_pandas.copy()) # Instantiate class\n\n assert hasattr(test_class, \"slug_df\"), \"slug_df attribute must be created\"\n assert isinstance(test_class.slug_df.index, pd.DatetimeIndex), \"slug_df has DateTimeIndex\"\n\n whp_pandas_short = whp_pandas[:60].copy() # crop data frame\n\n # Test that class does not get created if whp_pandas is too short\n try:\n test_class = Slug_Forecasting(whp_pandas_short)\n print(\"pandas data frame is too short\")\n raise ValueError\n except AssertionError:\n pass\n\n whp_pandas_nowhp = whp_pandas.copy()\n whp_pandas_nowhp = whp_pandas_nowhp.drop(\"WH_P\", axis=1)\n\n # Test that class does not get created if whp_pandas does not contain WHP column\n try:\n test_class = Slug_Forecasting(whp_pandas_nowhp)\n print(\"pandas data frame does not contain WH_P column\")\n raise ValueError\n except AssertionError:\n pass\n\n whp_pandas_nots = whp_pandas.copy()\n whp_pandas_nots = whp_pandas_nots.drop(\"ts\", axis=1)\n\n # Test that class does not get created if whp_pandas does not contain timestamp column\n try:\n test_class = Slug_Forecasting(whp_pandas_nots)\n print(\"pandas data frame does not contain ts column\")\n raise ValueError\n except AssertionError:\n pass\n\n # Test that other column in whp_pandas get ignored and dropped from slug_df attribute\n whp_pandas_extravar = whp_pandas.copy()\n whp_pandas_extravar[\"random\"] = whp_pandas_extravar[\"WH_P\"]\n\n test_class = Slug_Forecasting(whp_pandas_extravar.copy())\n\n assert \"random\" not in test_class.slug_df.columns, \"In this example, random column should have been dropped\"", "def test_metadata_init():\n\n testapp = holocron.Application({\"yoda\": \"master\", \"vader\": \"sith\"})\n\n assert testapp.metadata[\"yoda\"] == \"master\"\n assert testapp.metadata[\"vader\"] == \"sith\"\n\n testapp.metadata[\"vader\"] = \"darth\"\n assert testapp.metadata[\"vader\"] == \"darth\"\n assert testapp.metadata[\"yoda\"] == \"master\"", "def hpa_menu_create():\n global test_HPA\n test_HPA = HistoricPriceAnalyser.create()\n return test_HPA", "def create_columns(data_frame, function):\n fields = [\"veh_id\", \"mpr\", \"flow\", \"distance\"]\n data_frame[fields] = data_frame.apply(function, axis=1)\n return data_frame", "def new_object_data(self):\n self.product_fixture = self.F.ProductFactory.create()\n modifiers = (self.datetime, self.resource_name)\n fields = {\n u\"name\": unicode(\"test_%s_%s\" % modifiers),\n u\"description\": unicode(\"test %s %s\" % modifiers),\n u\"product\": unicode(self.get_detail_url(\n \"product\", self.product_fixture.id)),\n u\"status\": unicode(\"draft\"),\n u\"created_by\": None,\n u\"modified_by\": None,\n u\"modified_on\": self.utcnow.strftime(\"%Y-%m-%d %H:%M:%S\"),\n }\n return fields", "def __set_hl_method_settings(self, hl_dict):\n\n if hl_dict is None:\n hl_dict = {}\n self.hl_dict = hl_dict\n \n if 'cc' in self.method:\n self.cc_loc_orbs = hl_dict.get(\"loc_orbs\")\n self.cc_initguess = hl_dict.get(\"cc_initguess\")\n self.cc_froz_core_orbs = hl_dict.get(\"froz_core_orbs\")\n if 'cas' in self.method:\n self.cas_loc_orbs = hl_dict.get(\"loc_orbs\")\n self.cas_init_guess = hl_dict.get(\"cas_initguess\")\n self.cas_active_orbs = hl_dict.get(\"active_orbs\")\n self.cas_avas = hl_dict.get(\"avas\")", "def _set_hashmode(self, hash_on):\n cols = self._pdrep.columns\n\n if hash_on and \"configname\" in cols:\n self._pdrep = self._pdrep.rename(\n columns={\"configname\": \"#configname\"})\n\n elif not hash_on and \"#configname\" in cols:\n self._pdrep = self._pdrep.rename(\n columns={\"#configname\": \"configname\"})\n\n return self._pdrep", "def horde_init(self, horde_info= {}):", "def test_get_table_description(self):\n db_introspection = DatabaseIntrospection(self.connection)\n cursor = mock.MagicMock()\n\n def description(*args, **kwargs):\n return [[\"name\", TypeCode.STRING], [\"age\", TypeCode.INT64]]\n\n def get_table_column_schema(*args, **kwargs):\n column_details = {}\n column_details[\"name\"] = ColumnDetails(\n null_ok=False, spanner_type=\"STRING(10)\"\n )\n column_details[\"age\"] = ColumnDetails(\n null_ok=True, spanner_type=\"INT64\"\n )\n return column_details\n\n cursor.get_table_column_schema = get_table_column_schema\n cursor.description = description()\n table_description = db_introspection.get_table_description(\n cursor=cursor, table_name=\"Table_1\"\n )\n if USING_DJANGO_3:\n self.assertEqual(\n table_description,\n [\n FieldInfo(\n name=\"name\",\n type_code=TypeCode.STRING,\n display_size=None,\n internal_size=10,\n precision=None,\n scale=None,\n null_ok=False,\n default=None,\n collation=None,\n ),\n FieldInfo(\n name=\"age\",\n type_code=TypeCode.INT64,\n display_size=None,\n internal_size=None,\n precision=None,\n scale=None,\n null_ok=True,\n default=None,\n collation=None,\n ),\n ],\n )\n else:\n self.assertEqual(\n table_description,\n [\n FieldInfo(\n name=\"name\",\n type_code=TypeCode.STRING,\n display_size=None,\n internal_size=10,\n precision=None,\n scale=None,\n null_ok=False,\n default=None,\n ),\n FieldInfo(\n name=\"age\",\n type_code=TypeCode.INT64,\n display_size=None,\n internal_size=None,\n precision=None,\n scale=None,\n null_ok=True,\n default=None,\n ),\n ],\n )", "def setUp(self):\n self.test_cube = set_up_percentiles_cube()\n self.new_name = \"probability\"", "def feature_eng2(housing_tr, housing):\n logging.info(\"Adding features.....\")\n housing_tr[\"rooms_per_household\"] = (\n housing_tr[\"total_rooms\"] / housing_tr[\"households\"]\n )\n housing_tr[\"bedrooms_per_room\"] = (\n housing_tr[\"total_bedrooms\"] / housing_tr[\"total_rooms\"]\n )\n housing_tr[\"population_per_household\"] = (\n housing_tr[\"population\"] / housing_tr[\"households\"]\n )\n housing_cat = housing[[\"ocean_proximity\"]]\n housing_prepared = housing_tr.join(\n pd.get_dummies(housing_cat, drop_first=True)\n )\n return housing_prepared", "def _set_primary_behaviors(self):\n\n for component_model in self.model_dictionary.values():\n gal_type = component_model.gal_type\n feature_name = component_model.feature_name\n\n try:\n component_model_galprop_dtype = component_model._galprop_dtypes_to_allocate\n except AttributeError:\n component_model_galprop_dtype = np.dtype([])\n\n methods_to_inherit = list(set(\n component_model._methods_to_inherit))\n\n for methodname in methods_to_inherit:\n new_method_name = methodname + '_' + gal_type\n new_method_behavior = self._update_param_dict_decorator(\n component_model, methodname)\n setattr(self, new_method_name, new_method_behavior)\n setattr(getattr(self, new_method_name), \n '_galprop_dtypes_to_allocate', component_model_galprop_dtype)\n setattr(getattr(self, new_method_name), 'gal_type', gal_type)\n setattr(getattr(self, new_method_name), 'feature_name', feature_name)\n\n attrs_to_inherit = list(set(\n component_model._attrs_to_inherit))\n for attrname in attrs_to_inherit:\n new_attr_name = attrname + '_' + gal_type\n attr = getattr(component_model, attrname)\n setattr(self, new_attr_name, attr)\n\n # Repeatedly overwrite self.threshold \n # This is harmless provided that all gal_types are ensured to have the same threshold, \n # which is guaranteed by the _test_dictionary_consistency method\n if hasattr(component_model, 'threshold'):\n setattr(self, 'threshold_' + gal_type, component_model.threshold)\n self.threshold = getattr(self, 'threshold_' + gal_type)", "def setUp(self):\n self.percentiles_cube = set_up_percentiles_cube()\n self.percentile_coordinate = find_percentile_coordinate(\n self.percentiles_cube)\n self.new_name = \"probability\"\n self.plugin_instance = ProbabilitiesFromPercentiles2D(\n self.percentiles_cube, self.new_name)\n self.orography_cube = set_up_threshold_cube()", "def test_dummydb_new_table(self):\n db = DummyDB()\n columns = {\n \"one\": int,\n \"two\": str,\n \"three\": bool,\n }\n db.create_table(\"new_table\", columns)", "def setUp(self):\n\n self.data = pd.DataFrame({\n 'column1': np.array([\n 2641.16233666, 921.14476418, -651.32239137, 1223.63536668,\n 3233.37342355, 1373.22400821, 1959.28188858, 1076.99295365,\n 2029.25100261, 1835.52188141, 1170.03850556, 739.42628394,\n 1866.65810627, 3703.49786503, 1719.45232017, 258.90206528,\n 219.42363944, 609.90212377, 1618.44207239, 2323.2775272,\n 3251.78732274, 1430.63989981, -180.57028875, -592.84497457,\n ]),\n 'column2': np.array([\n 180.2425623, 192.35609972, 150.24830291, 156.62123653,\n 173.80311908, 191.0922843, 163.22252158, 190.73280428,\n 158.52982435, 163.0101334, 205.24904026, 175.42916046,\n 208.31821984, 178.98351969, 160.50981075, 163.19294974,\n 173.30395132, 215.18996298, 164.71141696, 178.84973821,\n 182.99902513, 217.5796917, 201.56983421, 174.92272693\n ]),\n 'column3': np.array([\n -1.42432446, -0.14759864, 0.66476302, -0.04061445, 0.64305762,\n 1.79615407, 0.70450457, -0.05886671, -0.36794788, 1.39331262,\n 0.39792831, 0.0676313, -0.96761759, 0.67286132, -0.55013279,\n -0.53118328, 1.23969655, -0.35985016, -0.03568531, 0.91456357,\n 0.49077378, -0.27428204, 0.45857406, 2.29614033\n ])\n })", "def populate_resource_columns(item_dict):\n item_dict['type'] = item_dict['name']\n if len(item_dict['summary_fields']) == 0:\n # Singleton roles ommit these fields\n item_dict['resource_name'] = None\n item_dict['resource_type'] = None\n else:\n item_dict['resource_name'] = item_dict[\n 'summary_fields']['resource_name']\n item_dict['resource_type'] = item_dict[\n 'summary_fields']['resource_type']", "def create_tables(self):\n if self.mock:\n mock_dynamodb2(self._create_tables())\n else:\n self._create_tables()", "def test_health_ok(self, mock):\n mock.configure_mock(**(self.config_payload(1, 1)))\n d = lf.lambda_handler(**(self.lambdaparam))\n self.assertEqual(d, 0)\n mock.client.return_value.update_thing_shadow.assert_not_called()", "def build_chortle_table() -> None:\n table = create_table()\n\n # Example chores to start off the database\n chores = [\n Chore(\n button_serial=\"G030JF0520662DJS\",\n click_type=\"SINGLE\",\n strategy=\"PERIODIC\",\n chore_name=\"Scoop Litter Box\",\n reset_time_seconds=86400,\n ),\n Chore(\n button_serial=\"G030JF0520662DJS\",\n click_type=\"DOUBLE\",\n strategy=\"TOGGLE\",\n chore_name=\"Empty the Dishwasher\",\n reset_time_seconds=7800,\n ),\n Chore(\n button_serial=\"G030JF0520662DJS\",\n click_type=\"LONG\",\n strategy=\"TOGGLE\",\n chore_name=\"Laundry: Washer to Dryer\",\n reset_time_seconds=2400,\n ),\n Chore(\n button_serial=\"G030JF052373W8GP\",\n click_type=\"SINGLE\",\n strategy=\"MODAL_TOGGLE\",\n chore_name=\"Laundry: Dryer to Hamper\",\n reset_time_seconds=3600,\n dependent=[\"G030JF052373W8GP\", \"LONG\"],\n ),\n ]\n\n for chore in chores:\n table.put_item(Item=asdict(chore))", "def _mock_function(self, obj, func):\n setattr(obj, func.__name__, MethodType(func, self.breaker))", "def _mock_function(self, obj, func):\n setattr(obj, func.__name__, MethodType(func, self.breaker))", "def at_object_creation(self):\n self.db.max_hp = 100 # Set maximum HP to 100\n self.db.hp = self.db.max_hp # Set current HP to maximum\n self.db.spells_known = [] # Set empty spells known list\n self.db.max_mp = 20 # Set maximum MP to 20\n self.db.mp = self.db.max_mp # Set current MP to maximum", "def mock_repodata(repodata, case):\n if 'repodata' in case:\n data = deepcopy(repodata)\n dict_merge(data, case['repodata'])\n else:\n data = repodata\n\n dataframe = pd.DataFrame(columns=utils.RepoData.columns)\n for channel, packages in data.items():\n for name, versions in packages.items():\n for item in versions:\n pkg = {\n 'channel': channel,\n 'name': name,\n 'build': '',\n 'build_number': 0,\n 'version': 0,\n 'depends': [],\n 'subdir': '',\n 'platform': 'noarch',\n }\n pkg.update(item)\n dataframe = dataframe.append(pkg, ignore_index=True)\n\n backup = utils.RepoData()._df, utils.RepoData()._df_ts\n utils.RepoData()._df = dataframe\n utils.RepoData()._df_ts = datetime.datetime.now()\n yield\n utils.RepoData()._df, utils.RepoData()._df_ts = backup", "def load_haloprop(base_path, subvolume, fields=None, matches=False):\n return load_subvolume(base_path, subvolume, 'Haloprop', fields, matches, True)", "def create_properties(self, properties):\n self._update_metadata_date(properties)\n self._backend.insert_product_properties(properties)", "def __init__(self, context, name, methods, **kwargs):\n \n mock_object_names[id(self)] = name\n for method in methods:\n self.__dict__[method] = MockFunction(context, method)\n for (key, value) in kwargs.items():\n self.__dict__[key] = value", "def _init_h5_out(self, fout, save_hybrid_meta=True):\n dsets = []\n shapes = {}\n attrs = {}\n chunks = {}\n dtypes = {}\n\n for dset, data in self.profiles.items():\n dsets.append(dset)\n shapes[dset] = data.shape\n chunks[dset] = None\n attrs[dset] = {Outputs.UNIT_ATTR: \"MW\"}\n dtypes[dset] = data.dtype\n\n meta = self.hybrid_meta.copy()\n for c in meta.columns:\n try:\n meta[c] = pd.to_numeric(meta[c])\n except ValueError:\n pass\n\n Outputs.init_h5(fout, dsets, shapes, attrs, chunks, dtypes,\n meta, time_index=self.hybrid_time_index)\n\n if save_hybrid_meta:\n with Outputs(fout, mode='a') as out:\n hybrid_meta = to_records_array(self.hybrid_meta)\n out._create_dset('meta', hybrid_meta.shape,\n hybrid_meta.dtype, data=hybrid_meta)", "def setUpClass(cls):\n super().setUpClass()\n\n cls.accessor = OCPReportDBAccessor(cls.schema)\n cls.report_schema = cls.accessor.report_schema\n cls.all_tables = list(OCP_REPORT_TABLE_MAP.values())\n cls.creator = ReportObjectCreator(cls.schema)\n cls.date_accessor = DateHelper()\n cls.manifest_accessor = ReportManifestDBAccessor()\n cls.dh = DateHelper()", "def pre_route_table_create(self, resource_dict):\n pass", "def customize_headers(self,executer, tree, cursor, table,custom_headers):\n headers = executer.get_columns(table, cursor)\n tree[\"columns\"] = custom_headers\n\n\n set_width = int(self.column_length_configurator / len(headers))\n\n # Setting columns width and headers\n for column in custom_headers:\n tree.column(column, width=set_width, minwidth=self.min_width)\n tree.heading(column, text=column)", "def test_add_column(self):\n name_column = Varchar()\n name_column._meta.name = \"name\"\n\n genre_column = Varchar()\n genre_column._meta.name = \"genre\"\n\n schema: t.List[DiffableTable] = [\n DiffableTable(\n class_name=\"Band\",\n tablename=\"band\",\n columns=[name_column, genre_column],\n )\n ]\n schema_snapshot: t.List[DiffableTable] = [\n DiffableTable(\n class_name=\"Band\",\n tablename=\"band\",\n columns=[name_column],\n )\n ]\n\n schema_differ = SchemaDiffer(\n schema=schema, schema_snapshot=schema_snapshot, auto_input=\"y\"\n )\n\n self.assertTrue(len(schema_differ.add_columns.statements) == 1)\n self.assertEqual(\n schema_differ.add_columns.statements[0],\n \"manager.add_column(table_class_name='Band', tablename='band', column_name='genre', db_column_name='genre', column_class_name='Varchar', column_class=Varchar, params={'length': 255, 'default': '', 'null': False, 'primary_key': False, 'unique': False, 'index': False, 'index_method': IndexMethod.btree, 'choices': None, 'db_column_name': None})\", # noqa\n )", "def to_h2o(self, factor_columns_list=None):\n\n from mercury_ml.common.data_wrappers.h2o import H2ODataWrapper\n import h2o\n df_h2o = h2o.H2OFrame(self.underlying)\n\n # convert columns\n if not factor_columns_list:\n factor_columns_list = []\n\n for factor_column_name in factor_columns_list:\n df_h2o[factor_column_name] = df_h2o[factor_column_name].asfactor()\n\n return H2ODataWrapper(df_h2o, self.field_names)", "def define_custom_properties(self, p: dict):\n for k, v in p.items():\n self._properties.define_property(name=k, **v)", "def build_metadata():\n metadata = sa.MetaData()\n\n sa.Table(\n 'hive_blocks', metadata,\n sa.Column('num', sa.Integer, primary_key=True, autoincrement=False),\n sa.Column('hash', CHAR(40), nullable=False),\n sa.Column('prev', CHAR(40)),\n sa.Column('txs', SMALLINT, server_default='0', nullable=False),\n sa.Column('ops', SMALLINT, server_default='0', nullable=False),\n sa.Column('created_at', sa.DateTime, nullable=False),\n\n sa.UniqueConstraint('hash', name='hive_blocks_ux1'),\n sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'], name='hive_blocks_fk1'),\n )\n\n sa.Table(\n 'hive_accounts', metadata,\n sa.Column('id', sa.Integer, primary_key=True),\n sa.Column('name', VARCHAR(16), nullable=False),\n sa.Column('created_at', sa.DateTime, nullable=False),\n #sa.Column('block_num', sa.Integer, nullable=False),\n sa.Column('reputation', sa.Float(precision=6), nullable=False, server_default='25'),\n\n sa.Column('display_name', sa.String(20)),\n sa.Column('about', sa.String(160)),\n sa.Column('location', sa.String(30)),\n sa.Column('website', sa.String(100)),\n sa.Column('profile_image', sa.String(1024), nullable=False, server_default=''),\n sa.Column('cover_image', sa.String(1024), nullable=False, server_default=''),\n\n sa.Column('followers', sa.Integer, nullable=False, server_default='0'),\n sa.Column('following', sa.Integer, nullable=False, server_default='0'),\n\n sa.Column('proxy', VARCHAR(16), nullable=False, server_default=''),\n sa.Column('post_count', sa.Integer, nullable=False, server_default='0'),\n sa.Column('proxy_weight', sa.Float(precision=6), nullable=False, server_default='0'),\n sa.Column('vote_weight', sa.Float(precision=6), nullable=False, server_default='0'),\n sa.Column('kb_used', sa.Integer, nullable=False, server_default='0'), # deprecated\n sa.Column('rank', sa.Integer, nullable=False, server_default='0'),\n\n sa.Column('lastread_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),\n sa.Column('active_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),\n sa.Column('cached_at', sa.DateTime, nullable=False, server_default='1970-01-01 00:00:00'),\n sa.Column('raw_json', sa.Text),\n\n\n sa.UniqueConstraint('name', name='hive_accounts_ux1'),\n sa.Index('hive_accounts_ix1', 'vote_weight', 'id'), # core: quick ranks\n sa.Index('hive_accounts_ix2', 'name', 'id'), # core: quick id map\n sa.Index('hive_accounts_ix3', 'vote_weight', 'name', postgresql_ops=dict(name='varchar_pattern_ops')), # API: lookup\n sa.Index('hive_accounts_ix4', 'id', 'name'), # API: quick filter/sort\n sa.Index('hive_accounts_ix5', 'cached_at', 'name'), # core/listen sweep\n )\n\n sa.Table(\n 'hive_posts', metadata,\n sa.Column('id', sa.Integer, primary_key=True),\n sa.Column('parent_id', sa.Integer),\n sa.Column('author', VARCHAR(16), nullable=False),\n sa.Column('permlink', VARCHAR(255), nullable=False),\n sa.Column('category', VARCHAR(255), nullable=False, server_default=''),\n sa.Column('community_id', sa.Integer, nullable=True),\n sa.Column('created_at', sa.DateTime, nullable=False),\n sa.Column('depth', SMALLINT, nullable=False),\n sa.Column('is_deleted', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_pinned', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_muted', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_valid', BOOLEAN, nullable=False, server_default='1'),\n sa.Column('promoted', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),\n\n sa.ForeignKeyConstraint(['author'], ['hive_accounts.name'], name='hive_posts_fk1'),\n sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'], name='hive_posts_fk3'),\n sa.UniqueConstraint('author', 'permlink', name='hive_posts_ux1'),\n sa.Index('hive_posts_ix3', 'author', 'depth', 'id', postgresql_where=sql_text(\"is_deleted = '0'\")), # API: author blog/comments\n sa.Index('hive_posts_ix4', 'parent_id', 'id', postgresql_where=sql_text(\"is_deleted = '0'\")), # API: fetching children\n sa.Index('hive_posts_ix5', 'id', postgresql_where=sql_text(\"is_pinned = '1' AND is_deleted = '0'\")), # API: pinned post status\n sa.Index('hive_posts_ix6', 'community_id', 'id', postgresql_where=sql_text(\"community_id IS NOT NULL AND is_pinned = '1' AND is_deleted = '0'\")), # API: community pinned\n )\n\n sa.Table(\n 'hive_post_tags', metadata,\n sa.Column('post_id', sa.Integer, nullable=False),\n sa.Column('tag', sa.String(32), nullable=False),\n sa.UniqueConstraint('tag', 'post_id', name='hive_post_tags_ux1'), # core\n sa.Index('hive_post_tags_ix1', 'post_id'), # core\n )\n\n sa.Table(\n 'hive_follows', metadata,\n sa.Column('follower', sa.Integer, nullable=False),\n sa.Column('following', sa.Integer, nullable=False),\n sa.Column('state', SMALLINT, nullable=False, server_default='1'),\n sa.Column('created_at', sa.DateTime, nullable=False),\n\n sa.UniqueConstraint('following', 'follower', name='hive_follows_ux3'), # core\n sa.Index('hive_follows_ix5a', 'following', 'state', 'created_at', 'follower'),\n sa.Index('hive_follows_ix5b', 'follower', 'state', 'created_at', 'following'),\n )\n\n sa.Table(\n 'hive_reblogs', metadata,\n sa.Column('account', VARCHAR(16), nullable=False),\n sa.Column('post_id', sa.Integer, nullable=False),\n sa.Column('created_at', sa.DateTime, nullable=False),\n\n sa.ForeignKeyConstraint(['account'], ['hive_accounts.name'], name='hive_reblogs_fk1'),\n sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_reblogs_fk2'),\n sa.UniqueConstraint('account', 'post_id', name='hive_reblogs_ux1'), # core\n sa.Index('hive_reblogs_ix1', 'post_id', 'account', 'created_at'), # API -- not yet used\n )\n\n sa.Table(\n 'hive_payments', metadata,\n sa.Column('id', sa.Integer, primary_key=True),\n sa.Column('block_num', sa.Integer, nullable=False),\n sa.Column('tx_idx', SMALLINT, nullable=False),\n sa.Column('post_id', sa.Integer, nullable=False),\n sa.Column('from_account', sa.Integer, nullable=False),\n sa.Column('to_account', sa.Integer, nullable=False),\n sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False),\n sa.Column('token', VARCHAR(5), nullable=False),\n\n sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'], name='hive_payments_fk1'),\n sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'], name='hive_payments_fk2'),\n sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'], name='hive_payments_fk3'),\n )\n\n sa.Table(\n 'hive_feed_cache', metadata,\n sa.Column('post_id', sa.Integer, nullable=False),\n sa.Column('account_id', sa.Integer, nullable=False),\n sa.Column('created_at', sa.DateTime, nullable=False),\n sa.UniqueConstraint('post_id', 'account_id', name='hive_feed_cache_ux1'), # core\n sa.Index('hive_feed_cache_ix1', 'account_id', 'post_id', 'created_at'), # API (and rebuild?)\n )\n\n sa.Table(\n 'hive_posts_cache', metadata,\n sa.Column('post_id', sa.Integer, primary_key=True, autoincrement=False),\n sa.Column('author', VARCHAR(16), nullable=False),\n sa.Column('permlink', VARCHAR(255), nullable=False),\n sa.Column('category', VARCHAR(255), nullable=False, server_default=''),\n\n # important/index\n sa.Column('community_id', sa.Integer, nullable=True),\n sa.Column('depth', SMALLINT, nullable=False, server_default='0'),\n sa.Column('children', SMALLINT, nullable=False, server_default='0'),\n\n # basic/extended-stats\n sa.Column('author_rep', sa.Float(precision=6), nullable=False, server_default='0'),\n sa.Column('flag_weight', sa.Float(precision=6), nullable=False, server_default='0'),\n sa.Column('total_votes', sa.Integer, nullable=False, server_default='0'),\n sa.Column('up_votes', sa.Integer, nullable=False, server_default='0'),\n\n # basic ui fields\n sa.Column('title', sa.String(255), nullable=False, server_default=''),\n sa.Column('preview', sa.String(1024), nullable=False, server_default=''),\n sa.Column('img_url', sa.String(1024), nullable=False, server_default=''),\n\n # core stats/indexes\n sa.Column('payout', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),\n sa.Column('promoted', sa.types.DECIMAL(10, 3), nullable=False, server_default='0'),\n sa.Column('created_at', sa.DateTime, nullable=False, server_default='1990-01-01'),\n sa.Column('payout_at', sa.DateTime, nullable=False, server_default='1990-01-01'),\n sa.Column('updated_at', sa.DateTime, nullable=False, server_default='1990-01-01'),\n sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'),\n\n # ui flags/filters\n sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_declined', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_full_power', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_hidden', BOOLEAN, nullable=False, server_default='0'),\n sa.Column('is_grayed', BOOLEAN, nullable=False, server_default='0'),\n\n # important indexes\n sa.Column('rshares', sa.BigInteger, nullable=False, server_default='0'),\n sa.Column('sc_trend', sa.Float(precision=6), nullable=False, server_default='0'),\n sa.Column('sc_hot', sa.Float(precision=6), nullable=False, server_default='0'),\n\n # bulk data\n sa.Column('body', TEXT),\n sa.Column('votes', TEXT),\n sa.Column('json', sa.Text),\n sa.Column('raw_json', sa.Text),\n\n # index: misc\n sa.Index('hive_posts_cache_ix3', 'payout_at', 'post_id', postgresql_where=sql_text(\"is_paidout = '0'\")), # core: payout sweep\n sa.Index('hive_posts_cache_ix8', 'category', 'payout', 'depth', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: tag stats\n\n # index: ranked posts\n sa.Index('hive_posts_cache_ix2', 'promoted', postgresql_where=sql_text(\"is_paidout = '0' AND promoted > 0\")), # API: promoted\n\n sa.Index('hive_posts_cache_ix6a', 'sc_trend', 'post_id', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: trending todo: depth=0\n sa.Index('hive_posts_cache_ix7a', 'sc_hot', 'post_id', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: hot todo: depth=0\n sa.Index('hive_posts_cache_ix6b', 'post_id', 'sc_trend', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: trending, filtered todo: depth=0\n sa.Index('hive_posts_cache_ix7b', 'post_id', 'sc_hot', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: hot, filtered todo: depth=0\n\n sa.Index('hive_posts_cache_ix9a', 'depth', 'payout', 'post_id', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: payout todo: rem depth\n sa.Index('hive_posts_cache_ix9b', 'category', 'depth', 'payout', 'post_id', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: payout, filtered todo: rem depth\n\n sa.Index('hive_posts_cache_ix10', 'post_id', 'payout', postgresql_where=sql_text(\"is_grayed = '1' AND payout > 0\")), # API: muted, by filter/date/payout\n\n # index: stats\n sa.Index('hive_posts_cache_ix20', 'community_id', 'author', 'payout', 'post_id', postgresql_where=sql_text(\"is_paidout = '0'\")), # API: pending distribution; author payout\n\n # index: community ranked posts\n sa.Index('hive_posts_cache_ix30', 'community_id', 'sc_trend', 'post_id', postgresql_where=sql_text(\"community_id IS NOT NULL AND is_grayed = '0' AND depth = 0\")), # API: community trend\n sa.Index('hive_posts_cache_ix31', 'community_id', 'sc_hot', 'post_id', postgresql_where=sql_text(\"community_id IS NOT NULL AND is_grayed = '0' AND depth = 0\")), # API: community hot\n sa.Index('hive_posts_cache_ix32', 'community_id', 'created_at', 'post_id', postgresql_where=sql_text(\"community_id IS NOT NULL AND is_grayed = '0' AND depth = 0\")), # API: community created\n sa.Index('hive_posts_cache_ix33', 'community_id', 'payout', 'post_id', postgresql_where=sql_text(\"community_id IS NOT NULL AND is_grayed = '0' AND is_paidout = '0'\")), # API: community payout\n sa.Index('hive_posts_cache_ix34', 'community_id', 'payout', 'post_id', postgresql_where=sql_text(\"community_id IS NOT NULL AND is_grayed = '1' AND is_paidout = '0'\")), # API: community muted\n )\n\n sa.Table(\n 'hive_state', metadata,\n sa.Column('block_num', sa.Integer, primary_key=True, autoincrement=False),\n sa.Column('db_version', sa.Integer, nullable=False),\n sa.Column('steem_per_mvest', sa.types.DECIMAL(8, 3), nullable=False),\n sa.Column('usd_per_steem', sa.types.DECIMAL(8, 3), nullable=False),\n sa.Column('sbd_per_steem', sa.types.DECIMAL(8, 3), nullable=False),\n sa.Column('dgpo', sa.Text, nullable=False),\n )\n\n metadata = build_metadata_community(metadata)\n\n metadata = build_metadata_blacklist(metadata)\n\n metadata = build_trxid_block_num(metadata)\n\n return metadata", "def create_whoosh_schema(self) -> whoosh.fields.Schema:\n schema_classname = \"WhooshSchema\"\n schema_classname = str(schema_classname)\n attrs = OrderedDict()\n for field in self.fields:\n if field.type_is_ngram:\n whoosh_field = whoosh.fields.NGRAM(\n stored=field.type_is_store,\n minsize=field.ngram_minsize,\n maxsize=field.ngram_maxsize,\n field_boost=field.weight,\n sortable=field.is_sortable,\n )\n elif field.type_is_phrase:\n whoosh_field = whoosh.fields.TEXT(\n stored=field.type_is_store,\n field_boost=field.weight,\n sortable=field.is_sortable,\n )\n elif field.type_is_keyword:\n whoosh_field = whoosh.fields.KEYWORD(\n stored=field.type_is_store,\n lowercase=field.keyword_lowercase,\n commas=field.keyword_commas,\n field_boost=field.weight,\n sortable=field.is_sortable,\n )\n elif field.type_is_numeric:\n whoosh_field = whoosh.fields.NUMERIC(\n stored=field.type_is_store,\n field_boost=field.weight,\n sortable=field.is_sortable,\n )\n elif field.type_is_store:\n whoosh_field = whoosh.fields.STORED()\n else: # pragma: no cover\n raise NotImplementedError\n attrs[field.name] = whoosh_field\n SchemaClass = type(schema_classname, (whoosh.fields.SchemaClass,), attrs)\n schema = SchemaClass()\n return schema", "def __init__(self, cfg: CFG_DICT, product_cfg: \"datacube_ows.ows_configuration.OWSNamedLayer\",\n **kwargs) -> None:\n super().__init__(cfg, **kwargs)\n cfg = cast(CFG_DICT, self._raw_cfg)\n self.product = product_cfg\n pq_names = self.product.parse_pq_names(cfg)\n self.pq_names = pq_names[\"pq_names\"]\n self.pq_low_res_names = pq_names[\"pq_low_res_names\"]\n self.pq_band = cfg[\"band\"]\n if \"fuse_func\" in cfg:\n self.pq_fuse_func: Optional[FunctionWrapper] = FunctionWrapper(self.product, cast(Mapping[str, Any], cfg[\"fuse_func\"]))\n else:\n self.pq_fuse_func = None\n self.pq_ignore_time = cfg.get(\"ignore_time\", False)\n self.ignore_info_flags = cfg.get(\"ignore_info_flags\", [])\n self.pq_manual_merge = cfg.get(\"manual_merge\", False)\n self.declare_unready(\"pq_products\")\n self.declare_unready(\"flags_def\")\n self.declare_unready(\"info_mask\")", "def makeHybridTable(size=100, database='VariabilityTestDatabase.db', **kwargs):\n\n # a haphazard sample of stellar SEDs\n sedFiles = ['kp10_8750.fits_g35_8950', 'kp03_10500.fits_g45_10600', 'km50_6750.fits_g20_6750']\n\n # a haphazard sample of cepheid light curves\n lcFiles = ['cepheid_lc/classical_longPer_specfile', 'cepheid_lc/classical_medPer_specfile',\n 'cepheid_lc/classical_shortPer_specfile', 'cepheid_lc/classical_shortPer_specfile',\n 'cepheid_lc/popII_longPer_specfile', 'cepheid_lc/popII_shortPer_specfile']\n\n conn = sqlite3.connect(database)\n c = conn.cursor()\n try:\n c.execute('''CREATE TABLE hybrid\n (varsimobjid int, variability text, sedfilename text, parallax real, ebv real)''')\n conn.commit()\n except:\n return\n\n rng = np.random.RandomState(32)\n periods = rng.random_sample(size)*50.0\n mjDisplacement = (rng.random_sample(size)-0.5)*50.0\n for i in range(size):\n sedFile = sedFiles[rng.randint(0, len(sedFiles))]\n if i%3 == 0:\n # just to make sure that Variability mixins no how to andle\n # objects with no variability\n varParam = None\n paramStr = None\n elif i%2 == 0:\n varParam = {'varMethodName': 'applyCepheid',\n 'pars': {'period': periods[i],\n 'lcfile': lcFiles[rng.randint(0, len(lcFiles))],\n 't0': 48000.0+mjDisplacement[i]}}\n else:\n varParam = {'varMethodName': 'testVar',\n 'pars': {'period': rng.random_sample()*100.0, 'amplitude': 2.0}}\n\n if varParam is not None:\n paramStr = json.dumps(varParam)\n\n qstr = '''INSERT INTO hybrid VALUES (%i, '%s', '%s', 0.01, 0.7)''' % (i, paramStr, sedFile)\n c.execute(qstr)\n conn.commit()\n conn.close()", "def sample_metadata(sample):\n sel = [\n Mental_health.year,\n Mental_health.physical_importance,\n Mental_health.mental_importance,\n Mental_health.industry_support,\n ]\n \n results = db.session.query(*sel).all() \n# Create a dictionary entry for each row of metadata information\n sample_metadata = {}\n for result in results:\n Mental_health[\"year\"] = result[0]\n Mental_health[\"physical_importance\"] = result[1]\n Mental_health[\"mental_importance\"] = result[2]\n Mental_health[\"industry_support\"] = result[3]\n \n\n print(sample_metadata)\n return jsonify(sample_metadata)", "def __init__(self, atmos_func='exponential', atmos_filename=None,\n Cd=1., Ch=0.1, Q=1e7, Cl=1e-3, alpha=0.3, Rp=6371e3,\n g=9.81, H=8000., rho0=1.2):\n # Input constants\n self.Cd = Cd\n self.Ch = Ch\n self.Q = Q\n self.Cl = Cl\n self.alpha = alpha\n self.Rp = Rp\n self.g = g\n self.H = H\n self.rho0 = rho0\n\n if atmos_func == 'exponential':\n self.rhoa = lambda z: self.rho0 * np.exp(-z/self.H)\n\n elif atmos_func == 'tabular':\n assert isinstance(\n atmos_filename, str), 'atmos_filename required for tabular atmosphere'\n data = pd.read_csv(atmos_filename, comment='#', delimiter=' ', names=[\n 'Altitude', 'Density', 'Scale'])\n\n # ensure data is sorted by altitude ascending for interpolation\n data.sort_values(by=['Altitude'], ascending=True)\n zmax = data.Altitude.iloc[-1]\n zmin = data.Altitude.iloc[0]\n altitude_values = data.Altitude.values\n density_values = data.Density.values\n\n # interpolate from data table (revert to exponential function if z outside range)\n self.rhoa = lambda z: np.select([(z >= zmin) & (z <= zmax), (z < zmin) | (z > zmax)], [\n np.interp(z, altitude_values, density_values), self.rho0 * np.exp(-z/self.H)])\n\n elif atmos_func == 'mars':\n self.rhoa = lambda z: (0.699 * np.exp(-0.00009*z)) / (0.1921 * np.select(\n [z >= 7000, z < 7000], [249.7 - 0.00222*z, 242.1 - 0.000998*z]))\n\n elif atmos_func == 'constant':\n self.rhoa = lambda z: rho0\n\n else:\n raise ValueError(\n 'Valid atmos_func inputs are: \"exponential\", \"tabular\", \"mars\", \"constant\"')", "def _create_meta_data_table(self) -> None:\n self.dynamodb.create_table(\n TableName=self.gene_metadata_table,\n KeySchema=[\n {\"AttributeName\": \"src_name\", \"KeyType\": \"HASH\"} # Partition key\n ],\n AttributeDefinitions=[\n {\"AttributeName\": \"src_name\", \"AttributeType\": \"S\"},\n ],\n ProvisionedThroughput={\"ReadCapacityUnits\": 10, \"WriteCapacityUnits\": 10},\n )", "def init_prop(obj):\n if 'Test_object' not in obj:\n obj['Test_object'] = \"None\"\n if 'Test_ratio' not in obj:\n obj['Test_ratio'] = 1\n if 'Correct_color' not in obj:\n obj['Correct_color'] = 0, 1.0, 0\n if 'Wrong_color' not in obj:\n obj['Wrong_color'] = 1.0, 0, 0\n if 'TEST' not in obj:\n obj[\"TEST\"] = \"INACTIVE\"\n\n if 'Active_Dialogue' not in obj:\n obj['Active_Dialogue'] = {}\n\n if 'STORY_MODE' not in obj:\n obj['STORY_MODE'] = \"NORMAL\"\n\n if 'SOLVED' not in obj:\n obj['SOLVED'] = \"No\"\n\n if 'SLIDE' not in obj:\n obj['SLIDE'] = 0\n if 'ACTIVE' not in obj:\n obj['ACTIVE'] = None\n if 'TEST_MODE' not in obj:\n obj['TEST_MODE'] = \"Off\"\n #Set run speed\n if 'running' not in obj:\n obj['running'] = 20\n #Set jump force\n if 'jump_force' not in obj:\n obj['jump_force'] = 20\n #Toggles first person mode\n if 'view_mode' not in obj:\n obj['view_mode'] = 'THIRD_PERSON'\n #The fp thumbstick layout\n if 'thumbstick_layout' not in obj:\n obj['thumbstick_layout'] = 'DEFAULT' #can be DEFAULT, LEGACY, SOUTHPAW, or LEGACYSOUTHPAW\n #Look invert for fp_mode\n if 'look_invert' not in obj:\n #1 = not inverted, -1 = inverted\n obj['look_invert'] = 1\n #When Camera has reached its destined position\n if 'cam_set' not in obj:\n obj['cam_set'] = 'Off'\n if 'index' not in obj:\n obj['index'] = 0", "def setcolumns(self, columns):\n\n # Store the column titles (\"raw\" format)\n # This is a list of white-space separated strings\n self.__columns = columns\n # Create table_column objects\n for col in columns.split():\n self.addcolumn(col)\n # Attempt to populate the column objects\n if self.__data:\n self.__populate_columns()\n self.__nonzero = True", "def _merge_solar_wind_meta(self):\n self._hybrid_meta = self.data.solar_meta.merge(\n self.data.wind_meta,\n on=ColNameFormatter.fmt(MERGE_COLUMN),\n suffixes=[None, '_x'], how=self._merge_type()\n )", "def create_table_columns(database, table, columns):\r\n in_tests.test_create_table_columns(database, table, columns)\r\n\r\n connection = sqlite3.connect(database)\r\n cursor = connection.cursor()\r\n\r\n for column in columns:\r\n query = f\"ALTER TABLE {table} ADD COLUMN {column}\"\r\n cursor.execute(query)\r\n connection.commit()\r\n cursor.close()\r\n connection.close()\r\n\r\n out_tests.test_create_table_columns(\r\n get_table_columns_names(database, table), columns)\r\n return ()", "def randomise(mock_info):\n mock_info[\"resource_info\"][\"usage\"][\"cpu\"] = round(random.uniform(0, 1), 2)\n mock_info[\"resource_info\"][\"usage\"][\"cpu_percentage\"] = round(random.uniform(0, 1), 2)\n mock_info[\"resource_info\"][\"usage\"][\"memory\"] = round(random.uniform(0, 1), 2)\n mock_info[\"resource_info\"][\"usage\"][\"memory_percentage\"] = round(random.uniform(0, 1), 2)\n return mock_info", "def makeBHMicrolensingTable(size=100, database=VARIABILITY_DB, **kwargs):\n\n # a haphazard sample of stellar SEDs\n sedFiles = ['kp10_8750.fits_g35_8950', 'kp03_10500.fits_g45_10600', 'km50_6750.fits_g20_6750']\n\n # a sample of black hole microlensing light curves that do not repeat time steps\n # (repeating time steps causes the scipy spline interpolation routine to return Nan)\n lcFiles = ['microlens/bh_binary_source/lc_14_25_75_8000_0_0.05_316',\n 'microlens/bh_binary_source/lc_14_25_4000_8000_0_phi1.09_0.005_100',\n 'microlens/bh_binary_source/lc_14_25_75_8000_0_tets2.09_0.005_316']\n\n conn = sqlite3.connect(database)\n c = conn.cursor()\n try:\n c.execute('''CREATE TABLE bhmicrolensing\n (varsimobjid int, variability text, sedfilename text, parallax real, ebv real)''')\n conn.commit()\n except:\n return\n\n rng = np.random.RandomState(32)\n mjDisplacement = rng.random_sample(size)*5.0*365.25\n for i in range(size):\n sedFile = sedFiles[rng.randint(0, len(sedFiles))]\n varParam = {'varMethodName': 'applyBHMicrolens',\n 'pars': {'filename': lcFiles[rng.randint(0, len(lcFiles))],\n 't0': 52000.0-mjDisplacement[i]}}\n paramStr = json.dumps(varParam)\n\n qstr = '''INSERT INTO bhmicrolensing VALUES (%i, '%s', '%s', 0.01, 0.7)''' % (i, paramStr, sedFile)\n c.execute(qstr)\n conn.commit()\n conn.close()", "def test_db_table_creation_check(self):\n mock_cursor = Mock()\n mock_cursor.configure_mock(**{\"cursor.return_value.fetchone.return_value\": (\"vnf_table_2\")})\n status = misshtbtd.db_table_creation_check(mock_cursor, \"vnf_table_2\")\n self.assertEqual(status, True)", "def setup(self):\n self.table = prettytable.PrettyTable()\n self.table.field_names = self.titles\n if self.convert_columns:\n self.rows = self.convert_columns_to_rows(self.rows)\n if self.colour:\n self.colour = self.convert_columns_to_rows(self.colour)", "def test_clone_change_param(self, cosmo):\n super().test_clone_change_param(cosmo)\n\n # don't change any values\n kwargs = cosmo._init_arguments.copy()\n kwargs.pop(\"name\", None) # make sure not setting name\n c = cosmo.clone(**kwargs)\n assert c.__class__ == cosmo.__class__\n assert c.name == cosmo.name + \" (modified)\"\n assert c.is_equivalent(cosmo)\n\n # change ``H0``\n # Note that H0 affects Ode0 because it changes Ogamma0\n c = cosmo.clone(H0=100)\n assert c.__class__ == cosmo.__class__\n assert c.name == cosmo.name + \" (modified)\"\n assert c.H0.value == 100\n for n in (\"Om0\", \"Ode0\", \"Tcmb0\", \"Neff\", \"m_nu\", \"Ok0\", \"Ob0\"):\n v = getattr(c, n)\n if v is None:\n assert v is getattr(cosmo, n)\n continue\n assert u.allclose(v, getattr(cosmo, n), atol=1e-4 * getattr(v, \"unit\", 1))\n assert not u.allclose(c.Ogamma0, cosmo.Ogamma0)\n assert not u.allclose(c.Onu0, cosmo.Onu0)\n\n # change multiple things\n c = cosmo.clone(name=\"new name\", H0=100, Tcmb0=2.8, meta=dict(zz=\"tops\"))\n assert c.__class__ == cosmo.__class__\n assert c.name == \"new name\"\n assert c.H0.value == 100\n assert c.Tcmb0.value == 2.8\n assert c.meta == {**cosmo.meta, **dict(zz=\"tops\")}\n for n in (\"Om0\", \"Ode0\", \"Neff\", \"m_nu\", \"Ok0\", \"Ob0\"):\n v = getattr(c, n)\n if v is None:\n assert v is getattr(cosmo, n)\n continue\n assert u.allclose(v, getattr(cosmo, n), atol=1e-4 * getattr(v, \"unit\", 1))\n assert not u.allclose(c.Ogamma0, cosmo.Ogamma0)\n assert not u.allclose(c.Onu0, cosmo.Onu0)\n assert not u.allclose(c.Tcmb0.value, cosmo.Tcmb0.value)", "def populate_hypershapes(m_rel: int, points_distribution: List[int], hypershapes: Dict[int, Dict], n_classes: int) \\\n -> pd.DataFrame:\n dataset = []\n\n for cla in hypershapes.keys():\n ns = []\n\n rs = [cluster[\"radius\"] if not cluster[\"shape\"] == \"moons\" else cluster[\"radius_big\"] for cluster in\n hypershapes[cla].values()]\n\n f = points_distribution[int(cla)] / sum(rs)\n\n for r in rs:\n ns.append(round(r * f))\n\n i = 0\n while sum(ns) < points_distribution[int(cla)]:\n ns[i % n_classes] += 1\n i += 1\n i = 0\n while sum(ns) > points_distribution[int(cla)]:\n ns[i % n_classes] -= 1 if ns[i % n_classes] > 0 else 0\n i += 1\n\n for shape, size in zip(hypershapes[cla].values(), ns):\n if shape[\"shape\"] == \"moons\":\n c = shape[\"center_big\"], shape[\"center_small\"]\n r = shape[\"radius_big\"], shape[\"radius_small\"]\n else:\n c = shape[\"center\"]\n r = shape[\"radius\"]\n\n points = generate_points_inside_hypershape(m_rel, size, c, r, shape[\"shape\"])\n\n for point in points:\n point.append(int(cla))\n dataset.append(point)\n\n return pd.DataFrame(dataset)", "def test_initialize_hypervisor(self, create_mock, libvirt_mock):\n resources = lxc.LXCResources('foo', {'domain': 'bar', 'hypervisor': 'baz'})\n libvirt_mock.open.assert_called_with('baz')\n create_mock.assert_called_with(resources.hypervisor, 'foo', 'bar', network_name=None)", "def test_handle_data(self):\n for close in ('higher', 'lower'):\n print 'close:', close\n self.hd_args['close'] = close\n self.df_stock = self.backtest.handle_data(self.backtest.df_stock, **self.hd_args)\n print self.df_stock.to_string(line_width=400)\n\n print '=' * 100\n\n new_columns = ('found0', 'found1', 'found2',\n 'open_to_high', 'open_to_low', 'open_to_close')\n for column in new_columns:\n self.assertIn(column, self.df_stock.columns)", "def load_columns(self):\n pass", "def testCosmologyCatalog(self):\n dbObj = myTestGals(database=self.dbName)\n cat = cosmologicalGalaxyCatalog(dbObj)\n cat.write_catalog(self.catName)", "def add_mock_data(to_mock_df: pd.DataFrame) -> pd.DataFrame:\n mock_companies = [\"Company_A\", \"Company_B\", \"Company_C\"]\n\n # Generate a random list of job ID numbers, and a random list of the above mock companies.\n jobs: List[int] = list(np.random.randint(low=1, high=10, size=to_mock_df.shape[0]))\n companies: List[str] = [random.choice(mock_companies) for i in range(to_mock_df.shape[0])]\n\n # Append these to the DataFrame\n to_mock_df[\"Company\"] = companies\n to_mock_df[\"Job\"] = jobs\n return to_mock_df", "def test_constructor(self):\n # Record the model types of all the models to be created\n all_model_types = model_type_to_display_name.keys()\n\n # Record the attribute / value pairs that are common to all models.\n common_attr_value_dict = {\"data\": self.fake_df,\n \"name_spec\": self.fake_names,\n \"design\": self.fake_design,\n \"ind_var_names\": self.fake_names[\"x\"],\n \"alt_id_col\": self.alt_id_col,\n \"obs_id_col\": self.obs_id_col,\n \"choice_col\": self.choice_col,\n \"specification\": self.fake_specification,\n \"alt_IDs\": self.fake_df[\"alt_id\"].values,\n \"choices\": self.fake_df[\"choice\"].values}\n\n # Create a shape name dictionary to relate the various models to the\n # names of their shape parameters.\n shape_name_dict = {\"MNL\": None,\n \"Asym\": self.fake_shape_names[:2],\n \"Cloglog\": None,\n \"Scobit\": self.fake_shape_names,\n \"Uneven\": self.fake_shape_names,\n \"Nested Logit\": None,\n \"Mixed Logit\": None}\n\n # Create a shape reference position dictionary to relate the various\n # models to their shape reference positions.\n shape_ref_dict = {}\n for key in shape_name_dict:\n shape_ref_dict[key] = (None if key != \"Asym\" else\n self.fake_shape_ref_pos)\n\n # Create an intercept_names and intercept_ref_position dictionary to\n # relate the various models to their respective kwargs.\n intercept_names_dict = {}\n intercept_ref_dict = {}\n for key in shape_name_dict:\n if key in [\"MNL\", \"Nested Logit\", \"Mixed Logit\"]:\n intercept_names_dict[key] = None\n intercept_ref_dict[key] = None\n else:\n intercept_names_dict[key] = self.fake_intercept_names\n intercept_ref_dict[key] = self.fake_intercept_ref_pos\n\n # Create a nest_names dictionary to relate the various models to their\n # nest_name attributes\n nest_name_dict = {}\n nest_spec_dict = {}\n for key in shape_name_dict:\n if key != \"Nested Logit\":\n nest_name_dict[key] = None\n nest_spec_dict[key] = None\n else:\n nest_name_dict[key] = list(self.fake_nest_spec.keys())\n nest_spec_dict[key] = self.fake_nest_spec\n\n # Create dictionaries for the mixing_id_col, mixing_vars, and\n # mixing_pos attributes\n mixing_id_col_dict = {}\n mixing_vars_dict = {}\n mixing_pos_dict = {}\n\n for key in shape_name_dict:\n if key != \"Mixed Logit\":\n mixing_id_col_dict[key] = None\n mixing_vars_dict[key] = None\n mixing_pos_dict[key] = None\n else:\n mixing_id_col_dict[key] = self.obs_id_col\n mixing_vars_dict[key] = self.fake_names[\"x\"]\n mixing_pos_dict[key] = [0]\n\n # Record the attribute / value pairs that vary across models\n varying_attr_value_dict = {\"model_type\": model_type_to_display_name,\n \"intercept_names\": intercept_names_dict,\n \"intercept_ref_position\":\n intercept_ref_dict,\n \"shape_names\": shape_name_dict,\n \"shape_ref_position\": shape_ref_dict,\n \"nest_names\": nest_name_dict,\n \"nest_spec\": nest_spec_dict,\n \"mixing_id_col\": mixing_id_col_dict,\n \"mixing_vars\": mixing_vars_dict,\n \"mixing_pos\": mixing_pos_dict}\n\n # Set up the keyword arguments that are needed for each of the model\n # types\n variable_kwargs = {}\n for model_name in all_model_types:\n variable_kwargs[model_name] = {}\n variable_kwargs[model_name][\"intercept_names\"] =\\\n intercept_names_dict[model_name]\n variable_kwargs[model_name][\"intercept_ref_pos\"] =\\\n intercept_ref_dict[model_name]\n variable_kwargs[model_name][\"shape_ref_pos\"] =\\\n shape_ref_dict[model_name]\n variable_kwargs[model_name][\"shape_names\"] =\\\n shape_name_dict[model_name]\n variable_kwargs[model_name][\"nest_spec\"] =\\\n nest_spec_dict[model_name]\n variable_kwargs[model_name][\"mixing_id_col\"] =\\\n mixing_id_col_dict[model_name]\n variable_kwargs[model_name][\"mixing_vars\"] =\\\n mixing_vars_dict[model_name]\n\n # Execute the test for each model type\n for model_name in all_model_types:\n # Update the model type in the list of constructor args\n self.constructor_args[-1] = model_name\n\n # Use this specific model's keyword arguments\n self.constructor_kwargs.update(variable_kwargs[model_name])\n\n # Construct the model object\n model_obj = pylogit.create_choice_model(*self.constructor_args,\n **self.constructor_kwargs)\n\n # Make sure that the constructor has all of the required attributes\n for attr in common_attr_value_dict:\n value = common_attr_value_dict[attr]\n if isinstance(value, pd.DataFrame):\n self.assertTrue(value.equals(model_obj.data))\n elif isinstance(value, np.ndarray):\n npt.assert_allclose(value,\n model_obj.__getattribute__(attr))\n else:\n self.assertEqual(value,\n model_obj.__getattribute__(attr))\n\n for attr in varying_attr_value_dict:\n value = varying_attr_value_dict[attr][model_name]\n\n self.assertEqual(value,\n model_obj.__getattribute__(attr))\n\n return None", "def setup_get_warehouse_fixture(self):\n if self.target is None:\n self.target = os.getenv('SODA_TEST_TARGET', TARGET_POSTGRES)\n\n warehouse_fixture = SqlTestCase.warehouse_fixtures_by_target.get(self.target)\n if warehouse_fixture is None:\n logging.debug(f'Creating warehouse {self.target}')\n\n warehouse_fixture = WarehouseFixture.create(self.target)\n dialect = self.create_dialect(self.target)\n\n warehouse_yml = WarehouseYml(dialect=dialect, name='test_warehouse')\n warehouse_fixture.warehouse = Warehouse(warehouse_yml)\n warehouse_fixture.create_database()\n SqlTestCase.warehouse_fixtures_by_target[self.target] = warehouse_fixture\n\n return warehouse_fixture", "def add_underscore(houses:pd.DataFrame) -> pd.DataFrame:\n houses['PROPERTY_TYPE']= houses[PROPERTY_TYPE]\n houses['ZIP_CODE']= houses[ZIP_CODE]\n houses['SQFT']= houses[SQUARE_FEET]\n houses['YEAR_BUILT']= houses[YEAR_BUILT]\n houses['DAYS_ON_MARKET']= houses[DAYS_ON_MARKET]\n houses['SQFT_PER']= houses[SQFT_PER]\n houses['HOA']= houses[HOA]\n drop_columns= [PROPERTY_TYPE, ZIP_CODE, SQUARE_FEET, YEAR_BUILT, \n DAYS_ON_MARKET, SQFT_PER, HOA]\n return houses.drop(drop_columns, axis= 1)", "def setup1():\n ht1 = HashTable()\n ht1.set('Apple', 'Jack')\n ht1.set('Pinky', 'Pie')\n ht1.set('Flutter', 'Shy')\n return ht1", "def _get_columns(self):\n columns = []\n for column in self.plate_meta['columns']:\n columns.append(column['name'])\n self.columns = columns", "def generateBlankDataHDU(num_rows=1, header_file='header_dataHDU.txt',\n coldef_file='coldefs_dataHDU.txt'):\n \n cols = []\n \n # The column definitions are loaded from an external file, which is\n # parsed line-by-line, using regular experssions.\n \n unit_pat = \"unit\\s*\\=\\s*'([\\w/%]+)'\"\n name_pat = \"name\\s*\\=\\s*'([\\w-]+)'\"\n dim_pat = \"dim\\s*\\=\\s*'(\\([\\d,]+\\))'\"\n format_pat = \"format\\s*\\=\\s*'(\\w+)'\" \n\n # Loop through, matching on each line\n cfile = open(coldef_file)\n for line in cfile.readlines():\n unit = name = dim = format = None\n name_match = re.search(name_pat, line)\n if name_match:\n name = name_match.group(1)\n \n format_match = re.search(format_pat, line)\n dim_match = re.search(dim_pat, line)\n unit_match = re.search(unit_pat, line)\n\n if unit_match: unit = unit_match.group(1)\n if dim_match: dim = dim_match.group(1)\n \n if format_match: \n fits_fmt = format_match.group(1)\n zarr = generateZeros(num_rows, fits_fmt, dim)\n\n \n # Append the column to the column list\n cols.append(pf.Column(name=name, format=fits_fmt, unit=unit, dim=dim, array=zarr))\n \n # Now we have made a list of columns, we can make a new table\n coldefs = pf.ColDefs(cols)\n #print coldefs\n tbhdu = pf.new_table(coldefs)\n \n # If that all worked, we can populate with the final header values\n cards = generateCards(header_file)\n \n for card in cards:\n if card.keyword == 'COMMENT':\n pass\n tbhdu.header.add_comment(card.value)\n elif card.keyword == 'HISTORY':\n pass\n tbhdu.header.add_history(card.value)\n else:\n tbhdu.header.set(card.key, card.value, card.comment)\n \n return tbhdu", "def populate(self, fields=None, **fields_kwargs):\n\n # this will run all the fields of the Orm, not just the fields in fields\n # dict, another name would be hydrate\n pop_fields = {}\n fields = self.make_dict(fields, fields_kwargs)\n for k in self.schema.fields.keys():\n pop_fields[k] = fields.get(k, None)\n\n self._populate(pop_fields)", "def test_created_chassis_properties(self):\n self.assertEqual(self.chassis.description, self.chassis_description)\n self.assertEqual(self.chassis.extra, self.chassis_extra)", "def prep_hd(header,phi_c,lambda_c,nx,ny,dx,dy):\n header_out = {}\n\n # Keywords to get from original header\n keys_hd = ['TELESCOP', 'INSTRUME', 'WAVELNTH', 'CAMERA','DATE',\n 'DATE_S','DATE-OBS','T_OBS','T_REC','TRECEPOC',\n 'TRECSTEP','TRECUNIT','HARPNUM','DSUN_OBS','DSUN_REF',\n 'RSUN_REF','CRLN_OBS','CRLT_OBS','CAR_ROT','OBS_VR',\n 'OBS_VW','OBS_VN','RSUN_OBS','QUALITY','QUAL_S','QUALLEV1']\n\n for key in keys_hd:\n header_out[key] = header[key]\n\n # Add new keywords\n header_out['NAXIS'] = 2\n header_out['NAXIS1'] = nx\n header_out['NAXIS2'] = ny\n\n header_out['CUNIT1'] = 'degree'\n header_out['CUNIT2'] = 'degree'\n\n header_out['CRPIX1'] = (nx - 1) / 2 + 1\n header_out['CRPIX2'] = (ny - 1) / 2 + 1\n header_out['CRVAL1'] = phi_c\n header_out['CRVAL2'] = lambda_c\n header_out['CDELT1'] = dx\n header_out['CDELT2'] = dy\n header_out['CTYPE1'] = 'CRLN-CEA'\n header_out['CTYPE2'] = 'CRLT-CEA'\n header_out['CROTA2'] = 0.0\n\n header_out['WCSNAME'] = 'Carrington Heliographic'\n header_out['BUNIT'] = 'Mx/cm^2'\n\n return header_out", "def populate_processing_methods(self):\n self.FIELDS_PRE_PROCESSING_METHODS = {\n 'publisher': self.join_all,\n 'description': self.join_all,\n 'format': self.join_all,\n 'language': self.join_all,\n 'type': self.get_alignment,\n 'rights': self.join_all,\n 'date': self.get_alignment,\n 'relation': self.join_all,\n 'source': self.join_all,\n 'coverage': self.get_alignment,\n 'contributor': self.join_all,\n 'title': self.join_all,\n 'identifier': self.join_all,\n 'subject': self.get_alignment,\n 'creator': self.get_alignment\n }", "def hfp_create(handle, org_dn, name,\r\n blade_bundle_version=\"\",\r\n rack_bundle_version=\"\",\r\n ignore_comp_check=\"yes\",\r\n update_trigger=\"immediate\",\r\n mode=\"staged\",\r\n stage_size=\"0\",\r\n policy_owner=\"local\",\r\n descr=\"testdescr\"):\r\n\r\n from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import \\\r\n FirmwareComputeHostPack\r\n\r\n org = handle.query_dn(org_dn)\r\n if org is None:\r\n raise ValueError(\"Org '%s' does not exist\" % org_dn)\r\n\r\n mo = FirmwareComputeHostPack(parent_mo_or_dn=\"org-root\",\r\n name=name,\r\n blade_bundle_version=blade_bundle_version,\r\n rack_bundle_version=rack_bundle_version,\r\n ignore_comp_check=ignore_comp_check,\r\n update_trigger=update_trigger,\r\n mode=mode,\r\n stage_size=stage_size,\r\n policy_owner=policy_owner,\r\n descr=descr)\r\n handle.add_mo(mo, modify_present=True)\r\n handle.commit()\r\n\r\n return mo", "def simple_trial_history_columns_df(mouse, meta):\n # add a binary column for choice, 1 for go 0 for nogo\n new_meta = {}\n new_meta['choice'] = np.zeros(len(meta))\n new_meta['choice'][meta['trialerror'].isin([0, 3, 5, 7]).values] = 1\n meta_df_out = pd.DataFrame(data=new_meta, index=meta.index)\n # meta_df_out = pd.concat([meta_df_out, new_meta_df], axis=1)\n\n # add a binary column for reward\n new_meta = {}\n new_meta['reward'] = np.zeros(len(meta_df_out))\n new_meta['reward'][meta['trialerror'].isin([0]).values] = 1\n new_meta_df = pd.DataFrame(data=new_meta, index=meta.index)\n meta_df_out = pd.concat([meta_df_out, new_meta_df], axis=1)\n\n # add a binary column for punishment\n new_meta = {}\n new_meta['punishment'] = np.zeros(len(meta_df_out))\n new_meta['punishment'][meta['trialerror'].isin([5]).values] = 1\n new_meta_df = pd.DataFrame(data=new_meta, index=meta.index)\n meta_df_out = pd.concat([meta_df_out, new_meta_df], axis=1)\n \n # rename oris according to their meaning during learning\n new_meta = {}\n for ori in ['plus', 'minus', 'neutral']:\n new_meta = {}\n new_meta['initial_{}'.format(ori)] = np.zeros(len(meta_df_out))\n new_meta['initial_{}'.format(ori)][meta['orientation'].isin([lookups.lookup[mouse][ori]]).values] = 1\n new_meta_df = pd.DataFrame(data=new_meta, index=meta.index)\n meta_df_out = pd.concat([meta_df_out, new_meta_df], axis=1)\n\n # rename oris according to their meaning during learning broken up by preceding ori\n new_meta = {}\n p_cols = []\n prev_ori_vec = np.insert(np.array(meta['orientation'].values[:-1], dtype='float'), 0, np.nan)\n for ori in ['plus', 'minus', 'neutral']:\n curr_ori_bool = meta['orientation'].isin([lookups.lookup[mouse][ori]]).values\n for prev_ori in ['plus', 'minus', 'neutral']:\n prev_ori_bool = np.isin(prev_ori_vec, lookups.lookup[mouse][prev_ori])\n new_meta = {}\n new_meta['initial_{}, initial_{}'.format(prev_ori, ori)] = np.zeros(len(meta_df_out))\n new_meta['initial_{}, initial_{}'.format(prev_ori, ori)][prev_ori_bool & curr_ori_bool] = 1\n new_meta_df = pd.DataFrame(data=new_meta, index=meta.index)\n meta_df_out = pd.concat([meta_df_out, new_meta_df], axis=1)\n p_cols.append('initial_{}, initial_{}'.format(prev_ori, ori))\n \n # rename oris according to their meaning during learning\n new_meta = {}\n cs_codes = {'plus': [0, 1], 'neutral': [2, 3], 'minus': [4, 5]}\n for ori in ['plus', 'minus', 'neutral']:\n new_meta = {}\n new_meta['cs_{}'.format(ori)] = np.zeros(len(meta_df_out))\n new_meta['cs_{}'.format(ori)][meta['trialerror'].isin(cs_codes[ori]).values] = 1\n new_meta_df = pd.DataFrame(data=new_meta, index=meta.index)\n meta_df_out = pd.concat([meta_df_out, new_meta_df], axis=1)\n\n # also return binary columns for orientation\n i_cols, cs_cols = [], []\n for ori in ['plus', 'minus', 'neutral']:\n i_cols.append('initial_{}'.format(ori))\n cs_cols.append('cs_{}'.format(ori))\n \n return meta_df_out, p_cols, i_cols, cs_cols", "def make_columns(options, columns):\n # (ElasticsearchFDWOptions, Dict[str, multicorn.ColumnDefinition]) -> Columns\n columns = columns.copy()\n\n id_column = IdColumn(name=options.rowid_column)\n columns.pop(options.rowid_column, None)\n if options.score_column:\n score_column = ScoreColumn(name=options.score_column)\n del columns[options.score_column]\n else:\n score_column = None\n if options.query_column:\n query_column = options.query_column\n del columns[options.query_column]\n else:\n query_column = None\n if options.sort_column:\n sort_column = options.sort_column\n del columns[options.sort_column]\n else:\n sort_column = None\n\n columns = [make_column(options, name, column) for name, column in columns.items()]\n return Columns(\n id_column=id_column,\n score_column=score_column,\n query_column=query_column,\n sort_column=sort_column,\n columns=columns,\n )" ]
[ "0.53807044", "0.52395046", "0.50270265", "0.49795195", "0.49258766", "0.49224955", "0.4920414", "0.49198395", "0.49087453", "0.49079537", "0.48617426", "0.48479292", "0.4806662", "0.48012027", "0.47516495", "0.47495013", "0.47483745", "0.46836653", "0.46500537", "0.46343023", "0.46319044", "0.4618239", "0.4617335", "0.46090215", "0.45768186", "0.45753703", "0.45643967", "0.45495805", "0.45277986", "0.45172945", "0.45158127", "0.450278", "0.44967043", "0.4488259", "0.44847125", "0.44773155", "0.4473246", "0.44728073", "0.44719145", "0.44627187", "0.44516852", "0.4451479", "0.44424146", "0.44418377", "0.4417659", "0.4417151", "0.44166368", "0.4413185", "0.44115388", "0.4405627", "0.4403268", "0.44025418", "0.44025418", "0.44021565", "0.43911853", "0.4386292", "0.43799275", "0.43751872", "0.43670464", "0.43665907", "0.43656415", "0.43625072", "0.4362189", "0.43618196", "0.43616349", "0.4346984", "0.4346936", "0.43409824", "0.4339147", "0.4337587", "0.43351227", "0.43281028", "0.43241957", "0.4322586", "0.43223134", "0.4320817", "0.43199465", "0.43187082", "0.43128687", "0.43123627", "0.43056506", "0.4300517", "0.42982116", "0.42972142", "0.4295885", "0.42931253", "0.42915976", "0.42906794", "0.4287602", "0.42873162", "0.4284773", "0.42795673", "0.42786804", "0.4278673", "0.42762554", "0.42726585", "0.42708504", "0.42638722", "0.42615572", "0.4261416" ]
0.639631
0
Get sub directories within a path
def get_directories(self, path): if self.name == 'dropbox': dbx = dropbox.get_dropbox() return dropbox.get_folders(dbx, path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getImmediateSubdirectories(dir):", "def get_subdirs(dir_path: str) -> list:\n res = list(x.name for x in os.scandir(dir_path) if x.is_dir())\n return res", "def collect_subdirs(path_to_walk):\n root, subdirs, _ = next(os.walk(path_to_walk))\n\n return [os.path.join(root, d) for d in subdirs]", "def get_subdirectories(self, physical_path):\n result = []\n for p in os.listdir(physical_path):\n if not os.path.isdir(os.path.join(physical_path, p)):\n continue\n result.append(os.path.join(physical_path, p))\n\n return result", "def getsubdirs(toppath, search_string = \".\"):\n if not search_string:\n return [toppath]\n reg_prog = re.compile(search_string)\n dirlist = []\n if search_string == \".\":\n dirlist.append(toppath)\n for root, dirs, files in os.walk(toppath):\n for fname in files:\n if reg_prog.search(os.path.join(root,fname)):\n dirlist.append(root)\n continue\n uniqueList = []\n for value in dirlist:\n if value not in uniqueList:\n uniqueList.append(value)\n return uniqueList", "def subdirs(path, name):\n f = lambda x: name is None or x.lower() == name.lower()\n return [file_path\n for file_name in os.listdir(path)\n if f(file_name) and not file_name.startswith('.')\n for file_path in (os.path.join(path, file_name),)\n if os.path.isdir(file_path)]", "def get_subdirectories(a_dir):\n return [a_dir + name + \"/\" for name in os.listdir(a_dir)\n if os.path.isdir(os.path.join(a_dir, name))]", "def get_subdirs(src_dir):\n img_dirs = sorted(next(os.walk(src_dir))[1])\n subdirs = [src_dir + img_dir for img_dir in img_dirs]\n return subdirs", "def get_directories_recursive(self, path) :\n\n if path.is_dir() :\n yield path\n for child in path.iterdir():\n yield from self.get_directories_recursive(child)\n elif path.is_file() :\n yield path", "def subdirs(dir):\n return [dir + '/' + name for name in os.listdir(dir)\n if os.path.isdir(os.path.join(dir, name))]", "def get_dirs(hub: pop.hub.Hub, sub: pop.hub.Sub) -> List[str]:\n return sub._dirs", "def get_sub_folders(session, ds_browser, ds_path):\n search_task = session._call_method(\n session._get_vim(),\n \"SearchDatastore_Task\",\n ds_browser,\n datastorePath=ds_path)\n try:\n task_info = session._wait_for_task(search_task)\n except error_util.FileNotFoundException:\n return set()\n # populate the folder entries\n if hasattr(task_info.result, 'file'):\n return set([file.path for file in task_info.result.file])\n return set()", "def get_all_subdirs(dir_path):\n\tls = os.listdir(dir_path)\n\tdirs = []\n\tfor f in ls:\n\t\tif os.path.isdir(os.path.join(dir_path, f)):\n\t\t\tdirs.append(f)\n\treturn dirs", "def get_dir_recursive(path: str) -> List[str]:\n files = []\n for dir_entry in os.scandir(path):\n if dir_entry.is_dir(follow_symlinks=True):\n files.extend(get_dir_recursive(dir_entry))\n else:\n files.append(dir_entry.path)\n return files", "def collect_paths_to_subdirectories(path_to_directory):\n paths_to_subdirectories = []\n for name_item in os.listdir(path_to_directory):\n path_to_item = os.path.join(path_to_directory,\n name_item)\n if os.path.isdir(path_to_item):\n paths_to_subdirectories.append(path_to_item)\n \n # `os.listdir` returns a list whose order depends\n # on the OS. To make `collect_paths_to_subdirectories`\n # independent of the OS, `paths_to_subdirectories` is\n # sorted.\n paths_to_subdirectories.sort()\n return paths_to_subdirectories", "def traverse(self, path):\n path_list = [s for s in path.split('/') if len(s) > 0 ]\n directory = self.files\n index = 0\n while index < len(path_list) and path_list[index] in directory:\n if type(directory[path_list[index]]) is str: # directory is a file\n break\n directory = directory[path_list[index]]\n index += 1\n return directory, path_list[index:]", "def find_subdirectories(package):\n try:\n subdirectories = next(os.walk(package_to_path(package)))[1]\n except StopIteration:\n subdirectories = []\n return subdirectories", "def _GetMountSubdirs(path):\n result = []\n for _, mountpoint, _, _ in utils.GetMounts():\n if (mountpoint.startswith(path) and\n mountpoint != path):\n result.append(mountpoint)\n\n result.sort(key=lambda x: x.count(\"/\"), reverse=True)\n return result", "def list_dir(*path):\n path = os.path.join(*path)\n return next(os.walk(path))[1]", "def _traverse_path(path):\n path = Path(path)\n\n if path.is_dir():\n yield from path.rglob(\"*\")\n else:\n yield path", "def get_parent_paths(path, stop_at):\n assert stop_at in path\n if stop_at not in path:\n return []\n if path == stop_at:\n return [path]\n\n path = os.path.expanduser(path)\n\n ps = [os.path.abspath(path)]\n while True:\n path = os.path.abspath(os.path.join(path, os.pardir))\n ps.append(path)\n if path == stop_at:\n break\n\n return ps", "def determine_subdirectories(file_path):\n\tsource_dir = file_path.replace('/data/','/data-cg1d/')\n\tlead_dir_1, subdir_1 = split_leading_directory(source_dir)\n\tlead_dir_2, subdir_2 = split_leading_directory(subdir_1)\n\tipts_dir, new_subdir = split_leading_directory(subdir_2)\n\tprint('\\n\\nsource_dir: {}\\nlead_dir_2: {}\\nsubdir_2: {}\\nipts_dir: {}\\n new_subdir: {}\\n\\n'.format(\n\t\tsource_dir, lead_dir_2, subdir_2, ipts_dir, new_subdir))\n\treturn source_dir, ipts_dir, new_subdir", "def dirs_in_dir(path):\n listing = sorted(os.listdir(path))\n\n dirs = []\n for name in listing:\n longname = path + '/' + name\n if name[0] == '.':\n continue\n if not os.path.isdir(longname):\n continue\n dirs.append(name)\n\n return dirs", "def get_directories(path):\n\n # Uses abspath as the directory\n absolute = os.path.dirname(abspath(path))\n all_files = os.listdir(absolute)\n\n # Get the absolute path of each file\n absolute_files = [\"/\".join([absolute, d]) for d in all_files]\n\n # Here we filter all non-directires out and return\n return [i for i in absolute_files if os.path.isdir(i)]", "def _subdirectories(self):\n for o in os.listdir(self.directory):\n if os.path.isdir(os.path.join(self.directory, o)):\n yield os.path.join(self.directory, o)", "def test_GetFilesInDirectory_subdir_relpath(tempdir: pathlib.Path):\n # Create files: [ sub/a, sub/sub/b ]\n (tempdir / \"sub\").mkdir()\n (tempdir / \"sub\" / \"a\").touch()\n (tempdir / \"sub\" / \"sub\").mkdir()\n (tempdir / \"sub\" / \"sub\" / \"b\").touch()\n assert set(dpack.GetFilesInDirectory(tempdir, [])) == {\n pathlib.Path(\"sub/a\"),\n pathlib.Path(\"sub/sub/b\"),\n }", "def finddirs(root):\n retval = []\n for root, dirs, files in os.walk(root):\n for d in dirs:\n retval.append(os.path.join(root, d))\n return retval", "def all_subdirs_of(dir='.'):\n result = []\n for item in os.listdir(dir):\n path = os.path.join(dir, item)\n if os.path.isdir(path):\n result.append(path)\n return result", "def traverse(self, path):\n\n path_list = [s for s in path.split('/') if len(s) > 0 ]\n # print(path)\n # print('files:', self.files)\n directory = self.files\n index = 0\n while index < len(path_list) and path_list[index] in directory:\n if type(directory[path_list[index]]) is str: # directory is a file\n break\n directory = directory[path_list[index]]\n index += 1\n print('info', directory, path_list[index:])\n return directory, path_list[index:]", "def walk_directory(path: Path, suffix: Optional[str] = None) -> List[Path]:\n if not path.is_dir():\n return [path]\n paths = [path]\n locs = []\n seen = set()\n for path in paths:\n if str(path) in seen:\n continue\n seen.add(str(path))\n if path.parts[-1].startswith(\".\"):\n continue\n elif path.is_dir():\n paths.extend(path.iterdir())\n elif suffix is not None and not path.parts[-1].endswith(suffix):\n continue\n else:\n locs.append(path)\n # It's good to sort these, in case the ordering messes up cache.\n locs.sort()\n return locs", "def segment_paths(root):\n directories = []\n history = history_path(root)\n for d in os.listdir(history):\n path = os.path.join(history, d)\n if os.path.isdir(path):\n directories.append(path)\n return sorted(directories)", "def get_dirs(self, path):\n ds = []\n try:\n for d in os.listdir(path):\n if os.path.isdir(os.path.join(path, d)):\n ds.append(d)\n except OSError:\n pass\n ds.sort()\n return ds", "def find_directory(pattern, path):\n result = []\n for root, dirs, files in os.walk(path):\n for name in dirs:\n if fnmatch.fnmatch(name, pattern):\n result.append(os.path.join(root, name))\n return result", "def ls_dirs(self, path, recursive=False):\n if path != \"\" and not path.endswith(\"/\"):\n path += \"/\"\n\n blob_iter = self.client.list_blobs(name_starts_with=path)\n dirs = []\n for blob in blob_iter:\n relative_dir = os.path.dirname(os.path.relpath(blob.name, path))\n if (\n relative_dir\n and (recursive or \"/\" not in relative_dir)\n and relative_dir not in dirs\n ):\n dirs.append(relative_dir)\n\n return dirs", "def get_search_subdirs(adir, afile, in_subdirs=[]):\n dirs = []\n search_subdirs = in_subdirs\n if not in_subdirs:\n search_subdirs = get_basic_search_subdirs(afile)\n for subdir in search_subdirs:\n path = os.path.join(adir, subdir)\n dirs.append(path)\n return dirs", "def _walk_to_root(path):\n if not os.path.exists(path):\n raise IOError('Starting path not found')\n\n if os.path.isfile(path):\n path = os.path.dirname(path)\n\n last_dir = None\n current_dir = os.path.abspath(path)\n while last_dir != current_dir:\n yield current_dir\n parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir))\n last_dir, current_dir = current_dir, parent_dir", "def get_all_paths(dmt, directory_path=''):\n # Base case.\n if not dmt.children:\n return set()\n \n filesystem_items = set()\n for item in dmt.children.keys():\n filesystem_items.add(directory_path+item)\n # Also get the paths of subdirectory contents.\n if item[-1] == '/':\n subdir_name = item\n subdir_path = directory_path + subdir_name\n \n filesystem_items.add(subdir_path)\n filesystem_items.update(get_all_paths(dmt.children[subdir_name], subdir_path))\n \n return filesystem_items", "def split_path(self, path):\n path = os.path.splitdrive(path)[1][1:]\n folders = []\n while 1:\n path, folder = os.path.split(path)\n if folder != \"\" and folder:\n folders.append(folder)\n if len(path) == 0:\n return folders[::-1]\n else:\n if path != \"\" and path:\n folders.append(path)\n break\n folders.reverse()\n return folders", "def get_immediate_subdirectories(self, a_dir):\n return [name for name in os.listdir(a_dir)\n if os.path.isdir(os.path.join(a_dir, name))]", "def RecursePath(path):\n\tif not os.path.exists(path):\n\t\t# First check will be to see if @path ended with a slash and was quoted.\n\t\t# This helps with spaces in the path but will treat the last \" as a\n\t\t#\tliteral character and leave it at the end of the path, making it\n\t\t#\tnon-existing.\n\t\tpath = path.rstrip('\"')\n\t\tif not os.path.exists(path):\n\t\t\treturn\n\n\tif os.path.exists(path):\n\t\tif os.path.isdir(path):\n\t\t\tfor root, dirs, files in os.walk(path):\n\t\t\t\tdirs.sort()\n\t\t\t\tfor name in sorted(files):\n\t\t\t\t\tfname = os.path.join(root, name) \n\t\t\t\t\tif os.path.isfile(fname):\n\t\t\t\t\t\tyield fname\n\t\t\t\t\telse:\n\t\t\t\t\t\tpass\n\t\telse:\n\t\t\tif os.path.isfile(path):\n\t\t\t\tyield path", "def dir_by_levels(path, levels):\n return op.abspath(op.join(path, *(['..'] * levels)))", "def get_files(path: str) -> List[str]:\n if not isdir(path):\n return [path] # its expected to return a list each time even if its a single element\n return [file for fileOrDir in listdir(path) for file in get_files(path + '/' + fileOrDir)]\n # return list of each file returned by the recursive call getFiles(fileOrDir) on\n # each fileOrDir in listdir(path)", "def find_folder(startpath, folder_name, first_occurrence=False):\n candidates = []\n for root, dirs, files in os.walk(startpath):\n for d in dirs:\n if d == folder_name.strip('/'):\n if first_occurrence:\n candidates.append(os.path.abspath(root + '/' + d))\n return candidates\n candidates.append(os.path.abspath(root+'/'+d))\n return candidates", "def scantree(path):\n for entry in os.scandir(path):\n if entry.is_dir(follow_symlinks=False):\n yield from scantree(entry.path)\n else:\n yield entry", "def getDirectoryList(path):\n dirList = [\"/\".join([path, object]) for object in os.listdir(path)]\n dirList = [object for object in dirList if os.path.isdir(object)]\n return dirList", "def split(path):\n\n head = path\n dirs = []\n\n while True:\n head, tail = os.path.split(head)\n if tail:\n dirs.append(tail)\n else:\n break\n\n return list(reversed(dirs))", "def _RecursiveDirectoryListing(dirpath):\n result = []\n for root, _, files in os.walk(dirpath):\n for f in files:\n result.append(os.path.relpath(os.path.join(root, f), dirpath))\n return result", "def sunderPath(path):\n ret = []\n while True:\n h, t = os.path.split(path)\n if t:\n ret.append(t)\n if not h:\n break\n path = h\n return ret", "def get_all_dirs(dirpath, base_dir=None):\n\tif not base_dir:\n\t\tpost = os.path.normpath(dirpath)\n\telif base_dir in dirpath:\n\t\t(pre, post) = dirpath.split(os.path.normpath(base_dir))\n\t\tpost = os.path.normpath(post)\n\telse:\n\t\treturn\n\tdirs = []\n\t(head, tail) = os.path.split(post)\n\twhile tail:\n\t\tdirs.append(tail)\n\t\t(head, tail) = os.path.split(head)\n\tdirs.reverse()\n\treturn dirs", "def get_directory(path):\n return mangle_path(path).rsplit('/',1)[0]", "def getDirectory(path):\n\tfrom os.path import split\n\tpath = normalizePath(path)\n\treturn split(path)[0]", "def IteratePathParents(start_path):\n path = os.path.abspath(start_path)\n yield path\n while path.strip('/'):\n path = os.path.dirname(path)\n yield path", "def _repo_dir_and_children(path, max_levels=100):\n # Start from a path, and iterate until we find the repo root.\n path = op.abspath(path)\n children = []\n for li in range(max_levels + 1): # protect against infinite loop\n if op.exists(op.join(path, '.git')) or op.exists(op.join(path, '.gitroot')):\n break\n if op.isdir(path):\n children.append(op.basename(path))\n path = op.dirname(path)\n\n if li <= max_levels:\n return path, children[::-1]\n else:\n return None, []", "def immediate_children( path ):\n assert( os.path.isdir( path ) )\n CMD = [ \"find\", path, \"-mindepth\", \"1\", \"-maxdepth\", \"1\" ]\n return [ x for x in run_cmd( CMD ).split( \"\\n\" ) if len( x ) > 0 ]", "def list_folders(path):\n return (name for name in os.listdir(path)\n if os.path.isdir(os.path.join(path, name)))", "def getSubDirectories(path, ignore_dirs=()):\n\n result = []\n\n ignore_dirs = [os.path.normcase(ignore_dir) for ignore_dir in ignore_dirs]\n\n for root, dirnames, _filenames in os.walk(path):\n # Normalize dirnames for better matching.\n dirnames_normalized = [os.path.normcase(dirname) for dirname in dirnames]\n for ignore_dir in ignore_dirs:\n if ignore_dir in dirnames_normalized:\n dirnames.remove(ignore_dir)\n\n dirnames.sort()\n\n for dirname in dirnames:\n result.append(os.path.join(root, dirname))\n\n result.sort()\n return result", "def get_path_ancestry(path: Path) -> Iterable[Path]:\n reverse_parents = reversed(path.parents)\n if path.is_dir():\n return chain(reverse_parents, [path])\n return reverse_parents", "def list_dirs(self, path=\"/\"):\n path = j.sal.fs.pathClean(path)\n dir_obj = self._dir_model.get_by_name(path)\n if not dir_obj:\n raise j.exceptions.Base(\"path {} does not exist\".format(path))\n res = [self._dir_model.get(item).name for item in dir_obj[0].dirs]\n return res", "def parents(path):\n dirs = split(path)\n for _ in range(len(dirs)):\n yield os.path.join('/', *dirs)\n dirs.pop()\n\n yield '/'", "def _findAllSubdirs(self, parentDir):\n subDirs = [join(parentDir, d) for d in os.listdir(parentDir) if os.path.isdir(join(parentDir, d))]\n if not subDirs:\n subDirs = None\n else:\n # add the modify time for each directory\n subDirs = [[path, os.stat(path).st_mtime] for path in subDirs]\n\n # return the subdirectories\n return subDirs", "def scan_tree(path):\n list_of_file_paths = []\n for file_obj in scandir(path):\n if file_obj.is_dir(follow_symlinks=False):\n # yield from scan_tree(file_obj.path)\n list_of_file_paths.extend(scan_tree(file_obj.path))\n else:\n # yield file_path\n if 'DS_Store' not in file_obj.path:\n list_of_file_paths.append(file_obj.path)\n return list_of_file_paths", "def walk_parents(path):\n while os.path.splitdrive(path)[1] != os.sep:\n yield path\n path = os.path.dirname(path)", "def scantree(path):\n # type: (str) -> os.DirEntry\n for entry in scandir(path):\n if entry.is_dir(follow_symlinks=True):\n # due to python2 compat, cannot use yield from here\n for t in scantree(entry.path):\n yield t\n else:\n yield entry", "def _scan_directory(self, root_path, name_patterns=None, exclude=None, inc_dirs=None, max_level=None):\n\n name_patterns = name_patterns or []\n exclude = exclude or []\n inc_dirs = inc_dirs or 0\n max_level = max_level or -1\n\n paths=[]\n\n # Generates a tuple of allowed file types\n if '' in name_patterns: name_patterns.remove('')\n if '@Invalid()' in name_patterns: name_patterns.remove('@Invalid()')\n name_patterns = [i.strip('.*') for i in name_patterns]\n name_patterns = tuple(name_patterns)\n\n # Generates list of forbided strings from direcory paths\n if '' in exclude: exclude.remove('')\n\n # Gets the max depth from a system level\n root_path = root_path.rstrip(os.path.sep)\n assert os.path.isdir(root_path)\n num_sep = root_path.count(os.path.sep) + 1\n\n # Walks down directory tree adding to paths[]\n for walk_root, walk_dirs, walk_files in os.walk(root_path):\n if self.should_terminate():\n return paths\n\n # Checks the level is valid\n num_sep_this = walk_root.count(os.path.sep)\n if (num_sep + max_level > num_sep_this) or (max_level == -1):\n\n if not any(ext in walk_root for ext in exclude):\n\n # If indexing directories add the current directory to the index.\n if inc_dirs:\n paths.append(walk_root)\n\n if name_patterns:\n for name in walk_files:\n if name.endswith(name_patterns):\n paths.append(os.path.join(walk_root, name))\n\n return paths", "def get_dir_list(basepath):\n parent = ListDir(basepath=basepath)\n parent.contents = get_dir_list_recurse(basepath, parent=parent)\n return parent", "def walk_deep(path):\n for root, _, filenames in os.walk(path):\n for f in filenames:\n yield os.path.join(root, f).replace('\\\\', '/')", "def get_all_vdirs(path):\n items = glob.glob(path)\n return items", "def listDir(path):\n filenames = []\n for root, dirs, files in os.walk(path):\n for i in files:\n filenames.append(os.path.join(root, i))\n return filenames", "def walk_directory(self, path):\n files = []\n for dirpath, dirnames, filenames in os.walk(path):\n for filename in filenames:\n files.append(os.path.join(dirpath, filename))\n return files", "def subdir_findall(dir, subdir):\n strip_n = len(dir.split('/'))\n path = '/'.join((dir, subdir))\n return ['/'.join(s.split('/')[strip_n:]) for s in setuptools.findall(path)]", "def dir_path(path):\n pattern='^(.*)[/]$'\n matchobj=re.match(pattern,path)\n if matchobj:\n return path\n else:\n return path+'/'", "def split_path(self, path: str) -> List[str]:\n dirs = path.split('/')\n return list(filter(lambda x: x!='', dirs))", "def listdir2(path):\n found_names = set(listdir(path))\n found_folders = set(d for d in found_names if isdir(join(path, d)))\n found_files = found_names - found_folders\n return found_folders, found_files", "def list_directories(path):\n dir_list = os.listdir(path)\n directories = [f for f in dir_list if os.path.isdir(os.path.join(path, f))]\n return directories", "def get_files_in_dir(path):\n return [os.path.join(dir_name, file)\n for dir_name, subdirs, files in os.walk(path)\n for file in files]", "def _get_files(self, path):\n result = []\n for f in os.listdir(path):\n if os.path.isdir(os.path.join(path, f)):\n result += self._get_files(os.path.join(path, f))\n else:\n result.append(os.path.join(path, f))\n return result", "def volume_paths(path):\n files = (os.path.join(path, f) for f in sorted(os.listdir(path)))\n return [f for f in files if os.path.isdir(f) or f.endswith('.zip')]", "def list_folders_into_directory(directory_path: str) -> [str]:\n for root, directory_names, file_names in walk(directory_path):\n return directory_names", "def list_directory(self, path):\n dirent = self.lookup(path)\n if dirent and dirent.is_directory():\n best_fit = self.retrieve_catalog_for_path(path)\n return best_fit.list_directory(path)", "def split_path(path):\n items = []\n while True:\n path, folder = os.path.split(path)\n if folder != '':\n items.append(folder)\n else:\n if path != '':\n items.append(path)\n break\n items.reverse()\n return items", "def list_sub(location=''):\n if location != '':\n pathloc = os.path.join(os.getcwd(), location)\n else:\n pathloc = os.getcwd()\n\n print(pathloc)\n\n directory_contents = os.listdir(pathloc)\n sub_directories = []\n for item in directory_contents:\n # list directories\n if os.path.isdir(os.path.join(pathloc, item)):\n sub_directories.append(item)\n sub_directories.sort()\n return sub_directories", "def _walk_paths(self, paths):\r\n for path in sorted(paths):\r\n if os.path.isdir(path):\r\n for dir_name, _, filenames in sorted(os.walk(path)):\r\n for filename in filenames:\r\n filename = os.path.join(dir_name, filename)\r\n yield os.path.relpath(filename, path), filename\r\n else:\r\n yield os.path.basename(path), path", "def path_split(path):\n res = []\n while path:\n path, tail = os.path.split(path)\n res.insert(0, tail)\n if path == '/':\n res.insert(0, '/')\n break\n return res", "def get_dir(path):\n extension = path.suffix\n if extension == '':\n return path\n else:\n return path.parent", "def list_dir(self, path):", "def path_generator(initial_root):\n for root, dirs, files in os.walk(initial_root):\n paths = [os.path.join(root, name) for name in files]\n return paths", "def find_directories(root_directory):\n\n search_directories = []\n\n if os.path.isdir(root_directory):\n files_and_folders = os.listdir(root_directory)\n for item in files_and_folders:\n sub_directory = os.path.join(root_directory, item)\n if os.path.isdir(sub_directory):\n search_directories.append(sub_directory)\n return search_directories\n\n else:\n sys.exit(\"Error: {} is not a valid directory\".format(root_directory))", "def get_by_path(root, path):\n \n sub_data = root\n for key in path:\n sub_data = sub_data[key]\n \n return sub_data", "def get_list_of_subdir_in_dir(directory):\n list_of_all_dirs = []\n for root, dirs, files in os.walk(directory):\n if not re.search('/$', root):\n root += os.sep # Add '/' to the end of root\n if '.ipynb_checkpoints' not in root:\n list_of_all_dirs.append(root)\n return list_of_all_dirs", "def get_dirs(source_dir):\n all_dirs = set()\n it = os.walk(source_dir)\n it.next()\n dirs = list(it)\n for d in dirs:\n if len(d[1])==0:\n all_dirs.add(d[0])\n return all_dirs", "def get_dirs(root_dir, recursive=True):\n\n ret_dirs = []\n\n for root, dirs, _ in os.walk(root_dir, topdown=True):\n\n for name in dirs:\n ret_dirs.append(os.path.join(root, name))\n\n if not recursive:\n break\n\n return ret_dirs", "def list_directories(dir_pathname, recursive=True, topdown=True,\n followlinks=False):\n for root, dir_names, _ in walk(dir_pathname, recursive, topdown, followlinks):\n for dir_name in dir_names:\n yield absolute_path(os.path.join(root, dir_name))", "def walk_path(start_path, match_pattern=None, recursive=True):\n if match_pattern is None:\n match_pattern = re.compile(r'.*')\n\n if os.path.isdir(start_path):\n for dir_entry in os.listdir(start_path):\n file_path = os.path.join(start_path, dir_entry)\n if os.path.isdir(file_path):\n if recursive:\n for path in walk_path(file_path, match_pattern, recursive):\n yield path\n else:\n yield path\n elif match_pattern.search(file_path):\n yield file_path\n else:\n yield start_path", "def listDirectory( self, path ):\n res = self.__checkArgumentFormat( path )\n if not res['OK']:\n return res\n urls = res['Value']\n successful = {}\n failed = {}\n gLogger.debug( \"DIPStorage.listDirectory: Attempting to list %s directories.\" % len( urls ) )\n serviceClient = RPCClient( self.url )\n for url in urls:\n res = serviceClient.listDirectory( url, 'l' )\n if not res['OK']:\n failed[url] = res['Message']\n else:\n files = {}\n subDirs = {}\n for subPath, pathDict in res['Value'].items():\n if pathDict['Type'] == 'File':\n files[subPath] = pathDict\n elif pathDict['Type'] == 'Directory':\n subDirs[subPath] = pathDict\n successful[url] = {}\n successful[url]['SubDirs'] = subDirs\n successful[url]['Files'] = files\n resDict = {'Failed':failed, 'Successful':successful}\n return S_OK( resDict )", "def all_files_under(path):\r\n for cur_path, dirnames, filenames in os.walk(path):\r\n for filename in filenames:\r\n yield os.path.join(cur_path, filename)", "def get_child_folder_names(folder_path):\n folder_names_in_folder = []\n try:\n for f in listdir(folder_path):\n if '__pycache__' not in f and isdir(\"%s/%s\" %(folder_path,f)):\n folder_names_in_folder.append(f)\n except OSError as e:\n # error\n print(\"ERROR IN get_child_folder_names\")\n\n return folder_names_in_folder", "def dirname_recurse(filepath: str, depth: int) -> str:\n for _ in range(depth):\n filepath = os.path.dirname(filepath)\n return filepath", "def simplifyPath(self, path):\n pwd = [] # stack, present working directory\n path = path.split(\"/\")\n for curr in path:\n if not curr or curr == \".\": # skip current dir\n continue\n elif curr == \"..\":\n if pwd: # if we're not in the root directory, go back\n pwd.pop()\n else:\n pwd.append(curr)\n return \"/\" + \"/\".join(pwd)", "def getSubDirectoriesWithDlls(path):\n\n result = set()\n\n for dll_sub_directory in _getSubDirectoriesWithDlls(path):\n result.add(dll_sub_directory)\n\n return tuple(sorted(result))", "def iterPath(self, path):\n \n pathIndex = 0\n folderNames = path.split(self.PATH_SEPARATOR)\n \n while pathIndex < len(folderNames):\n yield folderNames[pathIndex], self.PATH_SEPARATOR.join(folderNames[0:pathIndex + 1])\n pathIndex += 1" ]
[ "0.7549383", "0.74412143", "0.74089825", "0.7138318", "0.7118109", "0.70590365", "0.69344604", "0.6872523", "0.68333566", "0.6820497", "0.6777506", "0.67719275", "0.67686915", "0.67627996", "0.67528176", "0.67451686", "0.6711929", "0.6701213", "0.6682351", "0.6675598", "0.6668384", "0.66450024", "0.66426146", "0.66218483", "0.654104", "0.65407914", "0.65216535", "0.6515118", "0.6498433", "0.6490137", "0.6483271", "0.64138496", "0.6410884", "0.63973725", "0.638081", "0.6375997", "0.6356686", "0.63554496", "0.63377804", "0.6325665", "0.6322124", "0.6309838", "0.6309054", "0.63050544", "0.6288567", "0.62858754", "0.62817883", "0.62530905", "0.62435335", "0.6220577", "0.6219948", "0.6215203", "0.6209069", "0.62021214", "0.61991274", "0.6172575", "0.61527383", "0.6143355", "0.61371785", "0.6131306", "0.6127848", "0.6122441", "0.61215156", "0.6113581", "0.6108194", "0.6107978", "0.6095332", "0.607691", "0.60739696", "0.6073954", "0.60737073", "0.6073524", "0.60674673", "0.6065972", "0.6065", "0.6050062", "0.6047013", "0.60381013", "0.6032189", "0.60288966", "0.60047317", "0.5989244", "0.5986773", "0.598283", "0.5976653", "0.5947301", "0.594262", "0.5940147", "0.59355855", "0.5931067", "0.5921711", "0.5904258", "0.5893602", "0.58896035", "0.5885764", "0.58768713", "0.58631575", "0.58618855", "0.58554494", "0.58550936" ]
0.6562612
24
Check a given directory exists on the cloud storage.
def check_exists(self, directory): if self.name == 'dropbox': directory = dropbox.normalise_path(directory) dbx = dropbox.get_dropbox() return dropbox.is_folder(dbx, directory)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def directory_exists(self, directory: str = None) -> bool:\n return os.access(directory if directory else self.get_directory(), os.R_OK)", "def check_dir_exist():\n if os.path.isdir(path_structure):\n return True\n else:\n return False", "def path_exists(path):\n if path.startswith('gs://'):\n command = 'gsutil ls {path}'.format(path=path)\n elif path.startswith('s3://'):\n command = 'awscli s3 ls {path}'.format(path=path)\n else:\n return os.path.exists(path)\n\n return run_quick(command, echo=False).returncode == 0", "def path_exists(dir):\n if os.path.exists(dir): return 1\n else: return 0", "def _existDir(d):\n\treturn os.path.exists(d)", "def dir_exists(dir: str) -> bool:\n return os.path.isdir(dir)", "def dir_exists(self, path):\n if not path:\n return True\n return False", "def gsutil_file_exists(self, path):\n try:\n self.m.gsutil(['ls', path])\n except self.m.step.StepFailure: # pragma: no cover\n return False\n return True", "def folder_exists(path: str) -> bool:\n\treturn os.path.isdir(path)", "def directory_exists(destination):\n\n if not os.path.isdir(destination):\n raise RuntimeError('Directory %s does not exists' % (destination))\n\n return True", "def check_if_dir_exists(path):\n\n # From http://stackoverflow.com/questions/8933237/how-to-find-if-directory-exists-in-python\n return os.path.isdir(path)", "def check_directory(self, directory: str) -> bool:\n return self.run(\"/\", \"root\", [\"test\", \"-d\", directory], check=False).returncode == 0", "def dir_exists(self, path):\n return self._dir_model.get_by_name(name=path) != []", "def DirExists(folder):\n return os.path.isdir(folder)", "def path_exists(self, path):\n\t\tos_path = self._get_os_path(path=path)\n\t\treturn is_folder(self.bucket, os_path)", "def isDirectory( self, path ):\n res = self.__checkArgumentFormat( path )\n if not res['OK']:\n return res\n urls = res['Value']\n successful = {}\n failed = {}\n gLogger.debug( \"DIPStorage.isDirectory: Attempting to determine whether %s paths are directories.\" % len( urls ) )\n serviceClient = RPCClient( self.url )\n for url in urls:\n res = serviceClient.getMetadata( url )\n if res['OK']:\n if res['Value']['Exists']:\n if res['Value']['Type'] == 'Directory':\n gLogger.debug( \"DIPStorage.isDirectory: Successfully obtained metadata for %s.\" % url )\n successful[url] = True\n else:\n successful[url] = False\n else:\n failed[url] = 'Directory does not exist'\n else:\n gLogger.error( \"DIPStorage.isDirectory: Failed to get metdata for %s.\" % url, res['Message'] )\n failed[url] = res['Message']\n resDict = {'Failed':failed, 'Successful':successful}\n return S_OK( resDict )", "def file_exists(path):\n if path.startswith('gs://'):\n return gcsio.GcsIO().exists(path)\n else:\n return os.path.exists(path)", "def check_dir_exists(path):\n if not posixpath.exists(path):\n raise OSError(errno.ENOENT, 'No such file or directory', path)\n elif not posixpath.isdir(path):\n raise OSError(errno.ENOTDIR, 'Not a directory', path)", "def exists(self, prefix, args=()):\n dir_path = self.path(prefix, args)\n return os.path.isdir(dir_path)", "def check_folder_exists(location: str) -> bool:\n if os.path.isdir(location):\n return True\n else:\n return False", "def check_dir(directory):\n if not os.path.exists(directory):\n os.makedirs(directory)", "def check_if_dir_exists():\n if not os.path.exists(str(__CURRENT_DIRECTORY) + os.sep + \"..\" + os.sep + \"logs\"):\n try:\n os.mkdir(str(__CURRENT_DIRECTORY) + os.sep + \"..\" + os.sep + \"logs\")\n logger.debug(\"Dir for logs has been created\")\n except OSError:\n logger.debug(f\"Creation of the directory {str(__CURRENT_DIRECTORY) + os.sep + '..' + os.sep + 'logs'} failed\")\n\n if not os.path.exists(str(__CURRENT_DIRECTORY) + os.sep + \"..\" + os.sep + \"db\"):\n try:\n os.mkdir(str(__CURRENT_DIRECTORY) + os.sep + \"..\" + os.sep + \"db\")\n logger.debug(\"Dir for DB has been created\")\n except OSError:\n logger.debug(f\"Creation of the directory {str(__CURRENT_DIRECTORY) + os.sep + '..' + os.sep + 'db'} failed\")", "def is_directory(self, directory):\n mgm, directory = self._safe_split_mgm(directory)\n cmd = [ 'xrdfs', mgm, 'stat', '-q', 'IsDir', directory ]\n status = (subprocess.check_output(cmd) == 0)\n if not status:\n logger.info('Directory {0} does not exist'.format(self._join_mgm_lfn(mgm, directory)))\n return status", "def exists(self, path):", "def check_dir_exist(scheme):\n if os.path.exists(scheme.prefix) is False:\n print(\"ERROR: Required directory '{}' is missing! Exiting!\").format(scheme.prefix)\n sys.exit(1)", "def test_ensure_dir_exists(self):\n pass", "def check_charm_dir_exists(charm_dir: Path) -> None:\n assert charm_dir.is_dir()", "def Exists(self, path: str) -> bool:\n ...", "def check_dir(path, create = True):\n if os.path.exists(path):\n if os.path.isdir(path):\n return path\n else:\n return False\n if create:\n msg = \"Creating directory: '%s'\" % (path)\n print msg\n log.info(msg)\n os.mkdir(path)\n else:\n return False", "def dir_exists(self):\r\n if os.path.exists(self.csvdir):\r\n return True\r\n else:\r\n return False", "def folder_exists(self, path):\n bucket_name, save_path = self.split_name(path)\n if self.bucket_exists(bucket_name):\n try:\n result = self.s3_client.list_objects(Bucket=bucket_name, Prefix=save_path)\n if result[\"Contents\"]:\n return True\n except (botocore.exceptions.ClientError, KeyError):\n # The object does not exist.\n return False\n return False", "def _check_or_create_dir(directory):\n if not tf.gfile.Exists(directory):\n tf.gfile.MakeDirs(directory)", "def _is_folder_exists() -> bool:\n\n pwd: str = os.getcwd()\n data_folder: str = os.path.join(pwd, \"data\")\n return os.path.isdir(data_folder)", "def is_dir(self, path):\n return self.dir_exists(path)", "def dir_is_empty(dir):\n if os.path.exists(dir) and os.path.isdir(dir):\n if not os.listdir(dir):\n return True\n else:\n return False\n else:\n print(\"Given Directory don't exists\")", "def checkExistenceDir(path):\n path = os.path.abspath(path)\n if not os.path.isdir(path):\n logger.warning(\n \"Directory {} does not seem to exist, creating one.\".format(path)\n )\n os.mkdir(path)", "def test_has_directory(self, check_fn_true):\n\n #setup\n has_directory = extractor.make_has_directory(os.path.isdir)\n \n #when\n test1 = has_directory(\"./data/observed\")\n\n #result\n assert test1 is True", "def exists(path):\r\n path = encode(path)\r\n if path.lower().startswith(\"smb://\"):\r\n try:\r\n return samba.file_exists(os.path.basename(path), os.path.dirname(path)) or \\\r\n samba.folder_exists(os.path.basename(path), os.path.dirname(path))\r\n except gaierror:\r\n logger.info(\"deportesalacarta.core.filetools exists: No es posible conectar con la ruta\")\r\n platformtools.dialog_notification(\"No es posible conectar con la ruta\", path)\r\n return True\r\n else:\r\n return os.path.exists(path)", "def _is_dir(path: str)->bool:\n if _is_s3(path):\n return path.endswith(\"/\")\n else:\n return os.path.isdir(os.path.abspath(path))", "def check_folder(directory):\n global path_checked\n if not os.path.exists(directory):\n os.makedirs(directory)\n else:\n path_checked = True", "def exists(self, path):\n path = path.strip(\"/\")\n if not path: # it's a directory, for all narratives\n return True\n return self.file_exists(path)", "def check_dir(path):\n \n if not os.path.exists(path):\n os.makedirs(path)\n print path", "def exists(path):\n fs.exists(path)", "def test_has_directory_log(self, check_fn_true, caplog):\n\n #setup\n records = caplog.records\n has_directory = extractor.make_has_directory(os.path.isdir)\n directory_path = \"./data/observed\"\n \n #when\n test1 = has_directory(directory_path)\n\n #result\n assert len(records) == 1\n assert records[0].message == f\"It was found directory {directory_path}\"", "def dirCheck(dirPath):\n if not os.path.exists(dirPath):\n os.mkdir(dirPath)\n return dirPath", "def url_exists(url):\r\n from urllib import parse\r\n res = parse.urlparse(url)\r\n if res.scheme == 'gs':\r\n # blob_name has no '/' prefix\r\n bucket_name, blob_name = res.netloc, res.path[1:]\r\n from google.cloud import storage\r\n storage_client = storage.Client()\r\n bucket = storage_client.get_bucket(bucket_name)\r\n blob = bucket.blob(blob_name)\r\n return blob.exists()\r\n else:\r\n return os.path.exists(res.path)", "def CheckDir(dir):\n if not os.path.exists(dir):\n os.makedirs(dir)", "def check_dir(dirname):\n print('Checking directory...{}'.format(dirname))\n if dirname is not None and not is_dir(dirname):\n raise FileNotFoundError('{} is not a valid directory'.format(dirname))", "def check_dir(path):\n if not os.path.exists(path):\n os.makedirs(path)", "def is_dir(self, path):", "def exists(self, datadir):\n return False", "def check_image_dir(image_dir):\n if not os.path.isdir(image_dir):\n if verbose:\n print(\"INFO : Creating Image Storage folder %s\" % (image_dir))\n try:\n os.makedirs(image_dir)\n except OSError as err:\n print(\"ERROR : Could Not Create Folder %s %s\" % (image_dir, err))\n exit(1)", "def space_exists(self):\n space_abs = getattr(self, '__%s_space_abs' % space)\n return os.path.exists(space_abs) and os.path.isdir(space_abs)", "def check_dir(directory: str, err_string: str) -> None:\n if not pathlib.Path(directory).is_dir():\n print('\\n' + err_string + '\\n')\n raise NotADirectoryError", "def checkDirExists(dirPath):\n if not MyFile.checkFileExists(dirPath):\n MyFile.makeDir(dirPath)", "def verify_dir_exists(directory):\n if not os.path.exists(directory):\n try:\n os.makedirs(directory)\n except os.error as e:\n print 'could not create directory {}'.format(directory)\n raise e", "def path_exists(bucket, path):\n bucket = get_bucket(bucket)\n return bool(bucket.get_key(path, validate=True))", "def dfs_exists(self, path):\n try:\n df = self.dfs_ls(path)\n except Exception as e:\n if \"No such file or directory\" in str(e):\n return False\n else:\n raise e\n if len(df) == 0:\n # it is a folder\n return True\n ex = df[df.name == path]\n if len(ex) > 0:\n return True\n ex = df[df.apply(lambda r: r[\"name\"].startswith(path + \"/\"), axis=1)]\n if len(ex) > 0:\n return True\n return False", "def exists(self, path: PathLike):", "def test_doesnt_have_directory(self, check_fn_false):\n\n # setup\n has_directory = extractor.make_has_directory(os.path.isdir)\n\n # when\n test2 = has_directory(\"./data/tests\")\n\n # result\n assert test2 is False", "def _path_exist(self, stream_name:str=None, version:int=None, user_id:str=None):\n storage_path = self._get_storage_path(stream_name=stream_name, version=version, user_id=user_id)\n if self.nosql_store == \"hdfs\":\n status = self.fs.exists(storage_path)\n elif self.nosql_store==\"filesystem\":\n status = self.fs.path.exists(storage_path)\n else:\n raise Exception(\"Not supported File system\")\n\n if status:\n return True\n else:\n return False", "def path_exists(path):\r\n return os.path.exists(path)", "def test_doesnt_have_directory_log(self, check_fn_false, caplog):\n\n #setup\n records = caplog.records\n has_directory = extractor.make_has_directory(os.path.isdir)\n directory_path = \"./data/tests\"\n \n #when\n test2 = has_directory(directory_path)\n\n #result\n assert len(records) == 1\n assert records[0].message == f\"It wasn't found directory {directory_path}\"", "def dirChecking(dir):\n if not os.path.exists(dir):\n os.mkdir(dir)", "def checkDirectory(path,logger):\n newPath = completePath(path)\n if not os.path.exists(newPath):\n os.makedirs(newPath)\n if (logger):\n print(\"Did not found required directories. Creating them...\")\n else:\n if (logger):\n print(\"Found the required directories!\")", "def check_file_exists(directory, filename):\n\n path_to_file = os.path.join(directory, filename)\n\n if os.path.exists(path_to_file):\n return True\n\n return False", "def storage_exists(self, request):\n HttpRequest = request.to_http_info(self.api_client.configuration)\n return self.__make_request(HttpRequest, 'GET', 'StorageExist')", "def _check_file_exists(filename):\n filename = os.path.abspath(filename)\n return os.path.exists(filename) and not os.path.isdir(filename)", "def path_exists(path):\n return os.path.exists(path)", "def exists(path: str) -> bool:\n pass", "def is_directory(filename):\n\n return os.path.isdir(filename)", "def _check_directory(my_folder):\n if not os.path.exists(my_folder):\n os.makedirs(my_folder)", "def check_dir(dname):\n direc = os.path.dirname(dname)\n try:\n os.stat(direc)\n except:\n os.mkdir(direc)\n print \"Made directory %s....\" % dname\n return dname", "def check_directory(self, directory):\n directories = (\n \"bundles\",\n \"config\",\n \"layout\",\n )\n\n if not os.path.exists(directory):\n raise ValueError(\"the directory {} doesn't exist\".format(\n directory))\n\n existing_dirs = [name for name in os.listdir(directory) if \\\n os.path.isdir(directory + os.sep + name)]\n for required_dir in directories:\n if required_dir not in existing_dirs:\n raise RuntimeError(\"the {} directory doesn't exist in \" \\\n \"{}\".format(repr(required_dir), directory))\n\n abs_directory = os.path.abspath(directory)\n sys.path.append(abs_directory)\n return abs_directory", "def _check_directories(self, dist, component):\n path = join(self.repository, 'dists', dist, component, 'source')\n\n if not isdir(path):\n makedirs(path)", "def is_directory(path: str) -> bool:\n return os.path.isdir(path)", "def path_exists(self, path):\n try:\n os.stat(path)\n except OSError:\n return False\n return True", "def checkDir(dirName=None):\r\n if not os.path.exists(dirName):\r\n os.makedirs(dirName)\r\n return 0", "def check_dir(dir):\n if not os.path.exists(dir):\n print(\"[+] Creating directory for target..\")\n os.makedirs(dir)", "def is_dir(self, path: PathLike):", "def exists(path: str) -> bool:\n return _fs().exists(path)", "def check_is_directory(val, name):\n check_path_exists(val, name)\n if not os.path.isdir(val):\n raise ValueError(name + ' of value ' + val + '\" is not a legal directory.')", "def exists(path):\n return os.path.exists(path)", "def check_is_dir(path):\n if not os.path.isdir(path):\n raise DirectoryNotFoundError(path)", "def exists(path):\n try:\n os.stat(path)\n except OSError:\n return False\n else:\n return True", "def _assert_dir_already_exists(dirname):\n\n if not dirname:\n return\n\n assert os.path.isdir(dirname), dirname\n assert os.access(dirname, os.R_OK), dirname\n assert os.access(dirname, os.W_OK), dirname", "def _ensure_dir_exists(self, directory):\n directory = directory.strip()\n if not Path(directory).exists():\n os.mkdir(directory)", "def exists(path, bucket=os.getenv('RV_DEFAULT_S3_BUCKET')):\n matches = list_objects(path=path,\n bucket=bucket,\n include_prefix=True)\n if path in matches:\n return True\n return False", "def ensure_dir_exists(directory):\n if not os.path.exists(directory):\n os.makedirs(directory)", "def check_folder(filepath):\n if not os.path.exists(filepath):\n os.mkdir(filepath)\n return filepath", "def empty_dir(value):\n return not os.listdir(value)", "def assure_directory_exists(directory):\n\ttry:\n\t\tos.makedirs(directory)\n\texcept OSError as e:\n\t\tif e.errno != errno.EEXIST:\n\t\t\traise", "def file_exists(path):\n return os.path.exists(path)", "def exist(name: str) -> bool:\n return bool(os.path.exists(name))", "def path_exist(filepath):\n\treturn os.path.exists(os.path.basename(filepath))", "def file_exists(cls, path: Path) -> bool:\n return path.exists()", "def check_if_file_exists(path):\n\n return os.path.exists(path)", "def isdir(path):\r\n path = encode(path)\r\n if path.lower().startswith(\"smb://\"):\r\n if path.endswith(\"/\"):\r\n path = path[:-1]\r\n\r\n return samba.folder_exists(os.path.basename(path), os.path.dirname(path))\r\n else:\r\n return os.path.isdir(path)", "def verify_dir_helper(dir):\n if not os.path.exists(dir):\n print(\"Creating cache directory at {}\".format(dir))\n os.makedirs(dir)", "def exists(self):\r\n return os.path.exists(self.full_path)" ]
[ "0.73309547", "0.72222155", "0.7112823", "0.70747614", "0.70586145", "0.6962344", "0.68925315", "0.682691", "0.6792617", "0.67795223", "0.66985345", "0.6674074", "0.66731733", "0.66687524", "0.66685283", "0.66561913", "0.6646632", "0.6562051", "0.6539875", "0.65383136", "0.6502946", "0.6495761", "0.64935654", "0.6470742", "0.6448871", "0.6433611", "0.640745", "0.64007366", "0.63943964", "0.6388852", "0.6366026", "0.6363506", "0.63603055", "0.6358871", "0.6320848", "0.63033885", "0.6297926", "0.6295194", "0.6264033", "0.62388355", "0.6236685", "0.62363803", "0.62345624", "0.62320876", "0.6231624", "0.62278664", "0.62238556", "0.62202466", "0.6219641", "0.6211832", "0.62087345", "0.6208109", "0.6205777", "0.6192509", "0.6190458", "0.6190242", "0.61863977", "0.61835897", "0.61827594", "0.6177577", "0.6175653", "0.617551", "0.6167866", "0.61563635", "0.6152197", "0.6150346", "0.61469525", "0.6145741", "0.61451936", "0.613109", "0.6111947", "0.6110779", "0.6101181", "0.6092246", "0.6085936", "0.6085196", "0.6064708", "0.6063818", "0.6042635", "0.6039731", "0.6028661", "0.6022395", "0.6014007", "0.6013787", "0.59814703", "0.59808975", "0.5952333", "0.594808", "0.59311754", "0.5918249", "0.59077674", "0.5904358", "0.58937776", "0.5889575", "0.5889413", "0.58890545", "0.5887754", "0.5877723", "0.58772796", "0.5877219" ]
0.7083334
3
Currently not implemented First print returns date of modifications to the video file Second print prints date of Creation of the video file, literally time when it was written to folder
def creation_date_video(path_to_file): print("Last modified: %s" % time.ctime(os.path.getmtime(path_to_file))) print("Created: %s" % time.ctime(os.path.getctime(path_to_file))) # return os.path.getctime(path_to_file)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_timestamps(dir_video):\n print(\"Adding creation dates to file names\")\n os.chdir(dir_video)\n # get only top level dir info\n dir_data_video_files = next(os.walk(dir_video))\n list_video_files = dir_data_video_files[2] # get file list\n for f_name in list_video_files:\n if GOPRO_PATTERN.search(f_name):\n f_time = time.strftime(r\"%Y-%m-%d_%H-%M\", time.localtime(os.path.getctime(f_name)))\n os.rename(f_name, f\"{f_time}_{f_name}\")", "def get_file_modification_date() -> str:\n file_modification_date = datetime.now().strftime(\"%d.%m.%Y\")\n print(file_modification_date)\n return file_modification_date", "def creation_date(path_to_file):\n if platform.system() == 'Windows':\n print(\"last modified: %s\" % time.ctime(os.path.getmtime(path_to_file)))\n modtime = time.ctime(os.path.getmtime(path_to_file))\n \n print(\"created: %s\" % time.ctime(os.path.getctime(path_to_file)))\n modtime = datetime.datetime.strptime(modtime, \"%a %b %d %H:%M:%S %Y\")\n modtime = datetime.datetime.strftime(modtime, \"%Y-%m-%d\")\n return modtime", "def printVersionInfo():\n #pass\n pathname = sys.argv[0]\n myMtime = os.stat(pathname)[ST_MTIME]\n modDate = CONFIG['utils'].mktime(myMtime)\n logIt(\"Python Script: \" + pathname + \"\\n\")\n logIt(\"Version Date: \" + modDate + \"\\n\")", "def getTimeToFileName(self):\n return self.sNow.strftime(\"%d-%m-%Y_%H-%M-%S\")", "def media_file_info(self):\n\n if self.observationId and self.playerType == VLC:\n\n media = self.mediaplayer.get_media()\n\n logging.info(\"State: {}\".format(self.mediaplayer.get_state()))\n logging.info(\"Media (get_mrl): {}\".format(bytes_to_str(media.get_mrl())))\n logging.info(\"media.get_meta(0): {}\".format(media.get_meta(0)))\n logging.info(\n \"Track: {}/{}\".format(self.mediaplayer.video_get_track(), self.mediaplayer.video_get_track_count()))\n logging.info(\"number of media in media list: {}\".format(self.media_list.count()))\n logging.info(\"get time: {} duration: {}\".format(self.mediaplayer.get_time(), media.get_duration()))\n logging.info(\"Position: {} %\".format(self.mediaplayer.get_position()))\n logging.info(\"FPS: {}\".format(self.mediaplayer.get_fps()))\n logging.info(\"Rate: {}\".format(self.mediaplayer.get_rate()))\n logging.info(\"Video size: {}\".format(self.mediaplayer.video_get_size(0)))\n logging.info(\"Scale: {}\".format(self.mediaplayer.video_get_scale()))\n logging.info(\"Aspect ratio: {}\".format(self.mediaplayer.video_get_aspect_ratio()))\n logging.info(\"is seekable? {0}\".format(self.mediaplayer.is_seekable()))\n logging.info(\"has_vout? {0}\".format(self.mediaplayer.has_vout()))\n\n vlc_output = (\"State: {}<br>\"\n \"Media Resource Location: {}<br>\"\n \"File name: {}<br>\"\n \"Track: {}/{}<br>\"\n \"Number of media in media list: {}<br>\"\n \"get time: {}<br>\"\n \"duration: {}<br>\"\n \"Position: {} %<br>\"\n \"FPS: {}<br>\"\n \"Rate: {}<br>\"\n \"Video size: {}<br>\"\n \"Scale: {}<br>\"\n \"Aspect ratio: {}<br>\"\n \"is seekable? {}<br>\"\n \"has_vout? {}<br>\").format(self.mediaplayer.get_state(),\n bytes_to_str(media.get_mrl()),\n media.get_meta(0),\n self.mediaplayer.video_get_track(),\n self.mediaplayer.video_get_track_count(),\n self.media_list.count(),\n self.mediaplayer.get_time(),\n self.convertTime(media.get_duration() / 1000),\n self.mediaplayer.get_position(),\n self.mediaplayer.get_fps(),\n self.mediaplayer.get_rate(),\n self.mediaplayer.video_get_size(0),\n self.mediaplayer.video_get_scale(),\n self.mediaplayer.video_get_aspect_ratio(),\n \"Yes\" if self.mediaplayer.is_seekable() else \"No\",\n \"Yes\" if self.mediaplayer.has_vout() else \"No\"\n )\n\n self.results = dialog.ResultsWidget()\n self.results.setWindowTitle(programName + \" - Media file information\")\n self.results.ptText.setReadOnly(True)\n\n self.results.ptText.appendHtml(\"<b>VLC analysis</b><hr>\" + vlc_output)\n\n # FFmpeg analysis\n self.results.ptText.appendHtml(\"<br><b>FFmpeg analysis</b><hr>\")\n for nplayer in self.pj[OBSERVATIONS][self.observationId][FILE]:\n for filePath in self.pj[OBSERVATIONS][self.observationId][FILE][nplayer]:\n media_full_path = project_functions.media_full_path(filePath, self.projectFileName)\n # nframes, duration_ms, duration, fps, hasVideo, hasAudio = accurate_media_analysis(self.ffmpeg_bin, media_full_path)\n\n r = utilities.accurate_media_analysis2(self.ffmpeg_bin, media_full_path)\n nframes = r[\"frames_number\"]\n\n if \"error\" in r:\n self.results.ptText.appendHtml(\n \"File path: {filePath}<br><br>{error}<br><br>\".format(filePath=media_full_path,\n error=r[\"error\"]))\n else:\n self.results.ptText.appendHtml(\n \"File path: {}<br>Duration: {}<br>Bitrate: {}k<br>FPS: {}<br>Has video: {}<br>Has audio: {}<br><br>\".\n format(media_full_path, self.convertTime(r[\"duration\"]), r[\"bitrate\"], r[\"fps\"],\n r[\"has_video\"], r[\"has_audio\"]))\n\n self.results.ptText.appendHtml(\"Total duration: {} (hh:mm:ss.sss)\".\n format(self.convertTime(sum(self.duration) / 1000)))\n\n self.results.show()\n\n else:\n\n fn = QFileDialog(self).getOpenFileName(self, \"Select a media file\", \"\", \"Media files (*)\")\n filePath = fn[0] if type(fn) is tuple else fn\n\n if filePath:\n self.results = dialog.ResultsWidget()\n self.results.setWindowTitle(programName + \" - Media file information\")\n self.results.ptText.setReadOnly(True)\n self.results.ptText.appendHtml(\"<br><b>FFmpeg analysis</b><hr>\")\n # nframes, duration_ms, duration, fps, hasVideo, hasAudio = accurate_media_analysis(self.ffmpeg_bin, filePath)\n r = utilities.accurate_media_analysis2(self.ffmpeg_bin, filePath)\n if \"error\" in r:\n self.results.ptText.appendHtml(\n \"File path: {filePath}<br><br>{error}<br><br>\".format(filePath=filePath, error=r[\"error\"]))\n else:\n self.results.ptText.appendHtml(\n \"File path: {}<br>Duration: {}<br>Bitrate: {}k<br>FPS: {}<br>Has video: {}<br>Has audio: {}<br><br>\".\n format(filePath, self.convertTime(r[\"duration\"]), r[\"bitrate\"], r[\"fps\"], r[\"has_video\"],\n r[\"has_audio\"]))\n\n self.results.show()", "def last_modified():\n return \"Last modified: %s\" % time.ctime(os.path.getmtime(FILE_NAME))", "def check_video_timestamps(movie_file, desired_format='.mp4', desired_framerate=30):\n\n check_video_format(movie_file, desired_format='.mp4', original_format='.avi')\n\n new_movie_file = movie_file+'_tt'+desired_format\n if not os.path.isfile(new_movie_file):\n #Convert file to 30 fps\n cmd = ['ffmpeg', '-i', movie_file+desired_format]\n cmd += ['-r', str(desired_framerate)]\n cmd += ['-y', movie_file+'_t'+desired_format]\n cmd_string = ''.join([\"%s \" % el for el in cmd]) \n #print '-->Running: ', cmd_string\n p = subprocess.Popen(cmd, shell=False)\n p.wait()\n\n #Add timecode text to video\n cmd = 'ffmpeg -i '+movie_file+'_t'+desired_format+' -vf drawtext=\\\"fontfile=/opt/X11/share/fonts/TTF/VeraMoBd.ttf: timecode=\\'00\\:00\\:00\\:00\\':rate=30: fontcolor=white@0.8: x=7: y=460\\\" -an -y '+movie_file+'_tt'+desired_format\n args = shlex.split(cmd)\n #print args\n p = subprocess.Popen(args, shell=False)\n p.wait()\n\n os.remove(movie_file+'_t'+desired_format)\n\n return new_movie_file", "def get_creation_time(ts):\n path_to_embed_file = os.path.join(DATA_DIR, STUDY, \"experiment_files\", \"experiment_\"+ ts, \"triplet_training_validation_embeddings.h5\")\n\n if os.path.exists(path_to_embed_file):\n stat = os.stat(path_to_embed_file)\n try:\n return stat.st_birthtime\n except AttributeError:\n # We're probably on Linux. No easy way to get creation dates here,\n # so we'll settle for when its content was last modified.\n return stat.st_mtime\n else:\n print (\"here, path is: \", path_to_embed_file)\n return None", "def video_times():\n p = parse_cmdline(get_parser=get_parser_times)\n log.setup_main_handler(\n mods=(\"fogtools\", \"typhon\", \"fogpy\", \"sattools\", \"fcitools\", \"satpy\",\n \"pyresample\"),\n level=logging.DEBUG)\n vis.show_video_abi_glm_times(\n start_date=p.start_time,\n end_date=p.end_time,\n img_out=p.filename_pattern_image,\n vid_out=p.filename_pattern_video,\n out_dir=p.outdir,\n sector=p.sector,\n area=p.area)\n print(\"Files written to:\", p.outdir)", "def record_button_action(self):\r\n #on button click Stop/Start recording\r\n if self.RECORD_FLAG == True: #if recording\r\n self.RECORD_FLAG = False #stop recording\r\n return #and quit\r\n \r\n #frame height width\r\n size = (int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)),\r\n int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))\r\n \r\n # ** filename - 'date_time.avi'\r\n #get time date\r\n dt = datetime.datetime.now()\r\n # %b month - %Y year - day %d - %H Hour - %M minute - %S second\r\n # *Zero padded\r\n str_dt = (str(dt.strftime('%b')) \r\n + str(dt.strftime('%Y')) \r\n + str(dt.strftime('%d'))\r\n + \"_\"\r\n + str(dt.strftime('%H'))\r\n + str(dt.strftime('%M'))\r\n + str(dt.strftime('%S'))\r\n )\r\n #print(str_dt)\r\n\r\n self.video_writer = cv2.VideoWriter(\r\n f\"{PARENT_PATH}//{VIDEO_RECORD_DIR}//rec{str_dt}.avi\",\r\n cv2.VideoWriter_fourcc('I', '4', '2', '0'),\r\n FPS, size)\r\n\r\n self.RECORD_FLAG = True #start recording\r\n return", "def get_mod_time(self):\n if self.file_meta[:2] == b'bp':\n file_meta_plist = ccl_bplist.load(BytesIO(self.file_meta))\n raw_date_time = file_meta_plist['$objects'][1]['LastModified']\n converted_time = datetime.datetime.fromtimestamp(raw_date_time)\n converted_time = converted_time.timetuple()\n return converted_time\n else:\n file_meta_plist = plistlib.loads(self.file_meta)\n return file_meta_plist['modified'].timetuple()", "def process_file(self, file_info, sort_option):\n video_orig_writeback = open(\n str(file_info.current_folder + '/' + file_info.filename), 'rb')\n\n video_file_orig = video_orig_writeback.read()\n video_orig_writeback.close()\n\n time = os.path.getmtime(\n file_info.current_folder + \"\\\\\" + file_info.filename)\n date = datetime.datetime.fromtimestamp(\n time).strftime('%Y-%m-%d %H:%M:%S')\n file_name = helper.create_filename_for_file(\n sort_option, MP4[1:], date)\n helper.write_file(file_name, file_info.destination + \"\\\\\", video_file_orig)\n self.incr_writes()", "def getLastModifiedTime(self): #$NON-NLS-1$\r", "def mtime(name):", "def last_video(self) -> str:\n return max(glob.glob(VIDEOS_DIR), key=os.path.getmtime)", "def get_upload_date(self, video_ID):\n self.cur.execute(\"SELECT upload_date FROM videos WHERE video_ID = \\\"{}\\\"\".format(video_ID))\n return self.cur.fetchone()[0]", "def mtime(self):\r\n return self.info().mtime", "def watch_movie():\r\n if os.path.isfile('files/final_movie.mp4'): # if the file exists\r\n with open('files/final_movie.mp4', 'rb') as f:\r\n video_data = f.read()\r\n st.video(video_data)\r\n else: # if the file doesn't exist, let the user know\r\n st.header(\"You haven't created a movie yet!\")", "def timestamp():\n print(datetime.datetime.now().strftime(\"%A, %d. %B %Y %I:%M%p\") + \" \" + __file__)", "def creation_month(path_to_file):\n if platform.system() == 'Windows':\n print(\"last modified: %s\" % time.ctime(os.path.getmtime(path_to_file)))\n modtime = time.ctime(os.path.getmtime(path_to_file))\n \n print(\"created: %s\" % time.ctime(os.path.getctime(path_to_file)))\n modtime = datetime.datetime.strptime(modtime, \"%a %b %d %H:%M:%S %Y\")\n modtime = datetime.datetime.strftime(modtime, \"%B\")\n return modtime", "def get_info(frame_or_sketch_or_vid_path):\n if \".mp4\" not in frame_or_sketch_or_vid_path:\n # invalid file path ()\n # TODO: allow other video extensions\n return None\n\n ret_dict = {}\n ret_dict[\"path\"] = frame_or_sketch_or_vid_path\n ret_dict[\"file_name\"] = utils.get_file_name(frame_or_sketch_or_vid_path)\n ret_dict[\"file_ext\"] = utils.get_file_ext(frame_or_sketch_or_vid_path)\n\n # find video file name = video_id\n file_dir_last = utils.get_nth_parentdir(frame_or_sketch_or_vid_path)\n\n # file_dir_full = utils.get_file_path(frame_or_sketch_or_vid_path)\n # file_name = utils.get_full_file_name(frame_or_sketch_or_vid_path)\n\n video_id = f\"{file_dir_last.split('.mp4_')[0]}.mp4\"\n start_end_time = file_dir_last.split(\".mp4_\")[1]\n start_end_time_parts = start_end_time.split(\"_\")\n\n # OLD\n # tmp = frame_or_sketch_or_vid_path.rsplit(\"video_\")[1].replace(\".mp4\", \"\")\n # tmp_parts = tmp.split(\"/\")[0].split(\"_\") # remove frame part if existent\n # ret_dict[\"video_id\"] = tmp_parts[0]\n # ret_dict[\"start_time\"] = float(tmp_parts[1])\n # ret_dict[\"end_time\"] = ret_dict[\"start_time\"]\n\n ret_dict[\"video_id\"] = video_id\n ret_dict[\"start_time\"] = float(start_end_time_parts[0])\n if len(start_end_time_parts) > 1:\n ret_dict[\"end_time\"] = float(start_end_time_parts[1])\n\n if ret_dict[\"file_ext\"] == \".jpg\":\n ret_dict[\"frame\"] = int(ret_dict[\"file_name\"].split(\"_\")[1])\n elif ret_dict[\"file_ext\"] == \".json\":\n ret_dict[\"frame\"] = get_sketch_frame(ret_dict[\"path\"])\n else:\n ret_dict[\"fps\"] = get_fps(ret_dict[\"path\"])\n ret_dict[\"start_frame\"] = time_to_frame(ret_dict[\"start_time\"], ret_dict[\"fps\"])\n ret_dict[\"end_frame\"] = time_to_frame(ret_dict[\"end_time\"], ret_dict[\"fps\"])\n return ret_dict", "def time(self):\r\n return conf.lib.clang_getFileTime(self)", "def last_videos_recorded(self) -> list:\n return sorted(glob.glob(VIDEOS_DIR), key=os.path.getmtime)[-20:]", "def GetModTime(self):\n return self.file.ModTime", "def writeMetadata(path,filename,filetype,ObjectList,VideoRecorder = None):\n\tprint('writing metadata, for saving to {}'.format(path+filename+'.pickle'))\n\tnow = datetime.datetime.now() # current date and time\n\tmetadata = OrderedDict()\n\tmetadata['Path'] = path\n\tmetadata['Filename'] = filename\n\tmetadata['Format'] = filetype\n\tmetadata['datetime'] = now\n\tv = cv2.VideoCapture(path+filename+filetype)\n\tmetadata['Frames'] = v.get(cv2.CAP_PROP_FRAME_COUNT)\n\n\tif VideoRecorder is not None:\n\t\tfps = VideoRecorder.FPStracker.fps() # if you have a more accurate measure\n\telse:\n\t\ttry:\n\t\t\tfps = loadData(path,filename)[0]['FPS']\n\t\texcept:\n\t\t\tfps = None\n\t\tif fps is not None:\n\t\t\tpass\n\t\telse:\n\t\t\tfps = v.get(cv2.CAP_PROP_FPS) # trusting camera FPS\n\tmetadata['FPS'] = fps\n\tmetadata['Length'] = metadata['Frames']/metadata['FPS']\n\tmetadata['Resolution'] = [v.get(3),v.get(4)]\n\tv.release()\n\t# Save the object description (not the x,y,theta data: no processing yet)\n\t# and tracker coordinates for every object\n\tmetadata['Num Objects'] = len(ObjectList)\n\tfor i,object in enumerate(ObjectList):\n\t\tkey = \"object{}\".format(i)\n\t\tt1 = object.Tracker1\n\t\tt2 = object.Tracker2\n\t\tcoord1 = [t1.x,t1.y,t1.w,t1.h,t1.ang]\n\t\tcoord2 = [t2.x,t2.y,t2.w,t2.h,t2.ang]\n\t\tmetadata[key+'_ID'] = object.ID\n\t\tmetadata[key+'_profile'] = object.IDprofile\n\t\tmetadata[key+'_Tracker1_Coords'] = coord1\n\t\tmetadata[key+'_Tracker1_BGR_range'] = t1.bgrRange\n\t\tmetadata[key+'_Tracker2_Coords'] = coord2\n\t\tmetadata[key+'_Tracker2_BGR_range'] = t2.bgrRange\n\treturn metadata", "def _GetUpdateTime(filename):\n stat_info = os.stat(filename)\n return (stat_info.st_atime, stat_info.st_mtime)", "def creation_year(path_to_file):\n if platform.system() == 'Windows':\n print(\"last modified: %s\" % time.ctime(os.path.getmtime(path_to_file)))\n modtime = time.ctime(os.path.getmtime(path_to_file))\n \n print(\"created: %s\" % time.ctime(os.path.getctime(path_to_file)))\n modtime = datetime.datetime.strptime(modtime, \"%a %b %d %H:%M:%S %Y\")\n modtime = datetime.datetime.strftime(modtime, \"%Y\")\n return modtime", "def get_source_ctime(self):\n return self.source_file_ctime", "def _get_video_filename(self):\n fnd = self._get_session_dir()\n self.video_number += 1\n fn = os.path.join(fnd, 'V%4.4d.avi' % self.video_number)\n return fn", "def get_mov_timestamps(filename):\n\n atom_header_size = 8\n # difference between Unix epoch and QuickTime epoch, in seconds\n epoch_adjuster = 2082844800\n\n creation_time = modification_time = None\n\n # search for moov item\n with open(filename, \"rb\") as f:\n while True:\n atom_header = f.read(atom_header_size)\n # ~ print('atom header:', atom_header) # debug purposes\n if atom_header[4:8] == b'moov':\n break # found\n else:\n atom_size = struct.unpack('>I', atom_header[0:4])[0]\n f.seek(atom_size - 8, 1)\n\n # found 'moov', look for 'mvhd' and timestamps\n atom_header = f.read(atom_header_size)\n if atom_header[4:8] == b'cmov':\n raise RuntimeError('moov atom is compressed')\n elif atom_header[4:8] != b'mvhd':\n raise RuntimeError('expected to find \"mvhd\" header.')\n else:\n f.seek(4, 1)\n creation_time = struct.unpack('>I', f.read(4))[0] - epoch_adjuster\n creation_time = datetime.fromtimestamp(creation_time)\n if creation_time.year < 1990: # invalid or censored data\n creation_time = None\n\n modification_time = struct.unpack('>I', f.read(4))[0] - epoch_adjuster\n modification_time = datetime.fromtimestamp(modification_time)\n if modification_time.year < 1990: # invalid or censored data\n modification_time = None\n\n return creation_time, modification_time", "def Check_status_time_stamp():\n X = filecmp.cmp(per, per_old, shallow=False)\n if X is True:\n output = open(new, \"r\")\n output2 = open(per, \"r\")\n print(16 * \"#\", \"NO CHANGES\", 18 * \"#\", \"\\n\")\n print(\" Updated:\", output.read().replace(\"\\n\", ' '))\n print( output2.read())\n output.close()\n output2.close()\n elif X is False:\n copy2(new, old)\n log2 = open(new, \"w\")\n dt = datetime.now()\n date = dt.strftime(\"%m/%d/%Y %I:%M%p\")\n print(date, file=log2)\n log2.close()\n output = open(old, \"r\")\n output2 = open(per_old, \"r\")\n print(18 * \"-\", \" CHANGES\", 18 * \"-\", \"\\n\")\n print(18 * \"#\", \"BEFORE\", 18 * \"#\", \"\\n\")\n print(\" Updated:\", output.read().replace(\"\\n\", ' '))\n print( output2.read())\n output.close()\n output2.close()\n output = open(new, \"r\")\n output2 = open(per, \"r\")\n print(18 * \"#\", \"AFTER\", 18 * \"#\", \"\\n\")\n print(\" Updated:\", output.read().replace(\"\\n\", ' '))\n print( output2.read(), \"\\n\")\n output.close()\n output2.close()", "def get_source_stamp(self):", "def get_source_stamp(self):", "def to_filetime(self):\n try:\n dt_obj = duparser.parse(timestamp)\n self.out_filetime = str(int((dt_obj - self.epoch_1970).total_seconds() * self.hundreds_nano + self.epoch_as_filetime))\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_filetime = False\n return self.out_filetime", "def get_file_date(self, file: str) -> date:", "def convert_timestamp_info(data):\n videos = data.get('video_files', [])\n images = data.get('image_files', [])\n\n # judge the exits of video and images\n upload_path = current_app.config['UPLOAD_FOLDER']\n storage_path = current_app.config['FILE_STORAGE_PATH']\n title = data.get('title')\n storage_dir = os.path.join(storage_path, title)\n\n pathlib.Path(storage_dir).mkdir(parents=True, exist_ok=True)\n\n for video in videos:\n video_name = video.get('name')\n video_upload_path = os.path.join(upload_path, video.get('num'))\n video_storage_path = os.path.join(storage_dir, video_name)\n shutil.move(video_upload_path, video_storage_path)\n video['file_path'] = os.path.join(title, video_name)\n del video['num']\n\n for image in images:\n image_name = image.get('name')\n image_upload_path = os.path.join(upload_path, image.get('num'))\n image_storage_path = os.path.join(storage_dir, image_name)\n shutil.move(image_upload_path, image_storage_path)\n image['file_path'] = os.path.join(title, image_name)\n del image['num']\n\n return data", "def mtime(path):", "def get_mtime(self):\n storage = getattr(self._file, \"storage\", None)\n if storage:\n return storage.modified_time(self._file.name)\n return super(FileAsset, self).get_mtime()", "def get_video_title_releaser_release_time(self, url):\n video_id = ' '.join(re.findall('id.*html', url))\n browser = webdriver.Chrome()\n browser.get(url)\n title = browser.find_element_by_id('subtitle').text\n releaser = browser.find_element_by_id('module_basic_sub').text\n releaser = releaser.replace('+订阅','')\n releaser = releaser.replace(' ','')\n try:\n rt_midstep = browser.find_element_by_class_name('video-status').text\n rt_midstep = rt_midstep.replace('上传于','')\n rt_midstep = rt_midstep.replace(' ','')\n release_time = int(datetime.datetime.strptime(rt_midstep,'%Y-%m-%d').timestamp()*1e3)\n except:\n release_time = 0\n fetch_time = int(datetime.datetime.timestamp(datetime.datetime.now())*1e3)\n D0 = {'video_id': video_id,\n 'title': title,\n 'release_time': release_time,\n 'url': url,\n 'fetch_time': fetch_time}\n return D0", "def modified(self) -> datetime:\n # TODO: Should this be overridden for LocalDirectoryAsset?\n return datetime.fromtimestamp(self.filepath.stat().st_mtime).astimezone()", "def printInfo(totaltime, vid, cpath):\r\n infotxt = open(cpath + 'Resize Info' + '.txt', 'a')\r\n info = str('executeTime: %f' % totaltime + '\\n')\r\n converageRate = totaltime / (vid.get(7))\r\n info += str('average converage rate is: %f' % converageRate + 'f/s' + '\\n')\r\n frameNum = vid.get(7)\r\n info += str('frame number is %d' % frameNum + '\\n')\r\n fps = vid.get(5)\r\n info += str('frame rate is %f' % fps + '\\n')\r\n\r\n infotxt.write(info)\r\n infotxt.close()\r\n\r\n # print(info)\r\n vid.release()\r\n return info", "def modification_time(self) -> str:\n return pulumi.get(self, \"modification_time\")", "def create_video():\n print(\"Generating output video\")\n frame_array = []\n files = [f for f in os.listdir(MODIFIED_FRAMES_DIR) if isfile(join(MODIFIED_FRAMES_DIR, f))]\n #for sorting the file names properly\n # files.sort(key = lambda x: x[3:-4])\n files = sorted(files,key=lambda x: int(os.path.splitext(x)[0]))\n for i in range(len(files)):\n filename= MODIFIED_FRAMES_DIR + files[i]\n # print(filename)\n #reading each files\n img = cv2.imread(filename)\n height, width, layers = img.shape\n size = (width,height)\n \n #inserting the frames into an image array\n frame_array.append(img)\n \n out = cv2.VideoWriter(OUTPUT_FILE,cv2.VideoWriter_fourcc(*'DIVX'), FRAME_RATE, size)\n for i in range(len(frame_array)):\n # writing to a image array\n out.write(frame_array[i])\n out.release()\n print(\"Output video generated successfully...\")\n\n # img_array = []\n # for filename in glob.glob(MODIFIED_FRAMES_DIR+'/*.jpg'):\n # img = cv2.imread(filename)\n # height, width, layers = img.shape\n # size = (width,height)\n # img_array.append(img)\n\n # height, width, layers = img_array[0].shape\n # size = (width,height)\n # out = cv2.VideoWriter('output.mov',cv2.VideoWriter_fourcc(*'DIVX'), 15, size) \n # for i in range(len(img_array)):\n # out.write(img_array[i])\n # out.release()", "def creation_date(path_to_file):\n if platform.system() == 'Windows':\n return os.path.getctime(path_to_file)\n else:\n stat = os.stat(path_to_file)\n try:\n return stat.st_birthtime\n except AttributeError:\n return stat.st_mtime", "def to_filetime(self):\n ts_type = self.ts_types['filetime']\n try:\n dt_obj = duparser.parse(self.timestamp)\n if hasattr(dt_obj.tzinfo, '_offset'):\n dt_tz = dt_obj.tzinfo._offset.total_seconds()\n dt_obj = duparser.parse(self.timestamp, ignoretz=True)\n else:\n dt_tz = 0\n minus_epoch = dt_obj - self.epoch_1601\n calculated_time = minus_epoch.microseconds + ((minus_epoch.seconds - int(dt_tz)) * 1000000) + (minus_epoch.days * 86400000000)\n indiv_output = str(struct.pack(\">Q\", int(calculated_time*10)).hex())\n self.out_filetime = str(indiv_output[8:]) + \":\" + str(indiv_output[:8])\n ts_output = str(\"{}\\t{}\".format(ts_type, self.out_filetime))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_filetime = ts_output = False\n return self.out_filetime, ts_output", "def last_modified(self):\n return os.path.getmtime(self.filename)", "def mtime(self) -> str:\n return self._mtime", "def getCreationTime(self): #$NON-NLS-1$\r", "def time_stamping(file):\n time_stamp = datetime.now().date()\n\n # 1st remove path like /home/\n path_file = file.split(\"/\")\n # 2nd removes file formats\n file_ = path_file[len(path_file)-1].split(\".\", 1)\n path_file.pop()\n # 3rd add time_stamp\n file_[0] = str(file_[0])+\"_\"+str(time_stamp)\n # 4th all is back together\n file = '.'.join(map(str, file_))\n\n path_file.append(file)\n file = '/'.join(map(str, path_file))\n print(file)\n return file", "def out_file_core():\n date = str(datetime.datetime.now().strftime(\"%Y%d%m_%H%M%S\"))\n return f\"log-{date}-{str(uuid.uuid4())}\"", "def modification_timestamp(self):\n return parse_windows_timestamp(self.unpack_qword(0xC))", "def video_path(self):\n return self.file_path", "def video_path(self):\n return self.file_path", "def Results(self):\r\n try:\r\n numOfFiles = 0\r\n file = str(filenames).split(',')\r\n for file in filenames:\r\n if os.path.exists(file):\r\n numOfFiles += 1\r\n print('%d' % numOfFiles + ' videos resized!')\r\n info = 'totaltime: ' + str(datetime.timedelta(seconds=totaltime))\r\n print(info)\r\n except NameError:\r\n info = ''\r\n print('no totaltime passed')\r\n return info", "def timestamp(self):\n\t\tcurrent_stamp = 0\n\t\tcurrent_size = 0\n\t\ttry:\n\t\t\tst = stat(self._file)\n\t\t\tif st:\n\t\t\t\tcurrent_stamp = int(st.st_mtime)\n\t\t\t\tcurrent_size = st.st_size\n\t\t\t\t# Fake a changed mtime if size is different. Subsequent processing\n\t\t\t\t# only depends on the mtime field.\n\t\t\t\tif current_size != self._last_size:\n\t\t\t\t\tcurrent_stamp = int(time())\n\t\t\t\t\tMODULE.info(\"Size of '%s': %s -> %s\" % (self._file,self._last_size,current_size))\n\t\t\t\t\tself._last_size = current_size\n\t\tfinally:\n\t\t\tpass\n\n\t\tif current_stamp == self._last_stamp:\n\t\t\tself._unchanged_count += 1\n\t\t\tif self._unchanged_count >= self._count:\n\t\t\t\t# Don't record new timestamp if MD5 of file is the same\n\t\t\t\thash = md5(open(self._file).read()).hexdigest()\n\t\t\t\tif hash != self._last_md5:\n\t\t\t\t\tself._last_md5 = hash\n\t\t\t\t\tself._last_returned_stamp = current_stamp\n\t\t\t\telse:\n\t\t\t\t\tMODULE.info(\"Hash of '%s' unchanged\" % self._file)\n\t\telse:\n\t\t\tself._unchanged_count = 0\n\t\t\tself._last_stamp = current_stamp\n\n\t\treturn self._last_returned_stamp", "def get_video_publishing_date(self, response):\n return response.css(\".watch-time-text\").extract_first(default='')", "def save_current_run_time():\n # path = \"/Users/szou/Downloads/bu/happydogs/analytics_happydogs/last_time_run\" # hard coding this due to CRON, but will remove later\n output_file = open(\"last_time_run\", \"w\")\n current_time_string = datetime.datetime.strftime(\n datetime.datetime.now(), \"%Y-%m-%d %H:%M:%S\"\n )\n output_file.write(current_time_string)\n print(current_time_string)\n output_file.close()", "def GetGsutilVersionModifiedTime():\n if not VERSION_FILE:\n return 0\n return int(os.path.getmtime(VERSION_FILE))", "def last_modified_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_modified_time\")", "def time_modified(self) -> str:\n return pulumi.get(self, \"time_modified\")", "def time_modified(self) -> str:\n return pulumi.get(self, \"time_modified\")", "def creation_time_str(self):\n return \"%Y/%m/%d %I:%M:%S\".format(self.creation_time)", "def last_file_updated(self):\n query = '*.xml'\n keymap_files = glob.glob(query)\n\n sorted_files = sorted(keymap_files, key=self.mtime, reverse=1)\n last_modified_file = sorted_files[0]\n second_last_modified_file = sorted_files[1]\n\n t1 = self.mtime(last_modified_file)\n t2 = self.mtime(second_last_modified_file)\n\n logger.debug('Last modified time: {0}'.format(t1))\n logger.debug('Second Last modified time: {0}'.format(t2))\n\n last_modified_time = self.mtime(last_modified_file)\n last_access_time = self.atime(last_modified_file)\n\n if sys.platform == \"win32\":\n logger.info('Detected Windows environment')\n # self.regenerate_osx(last_access_time, last_modified_time)\n elif sys.platform == 'darwin':\n logger.info('Detected OSX environment')\n # self.regenerate_windows(last_access_time, last_modified_time)\n else:\n logger.error('Unhandled platform: {0}'.format(sys.platform))\n pass", "def DateModified(filepath, stringformat=False):\n time_in_s = os.path.getmtime(filepath)\n if stringformat:\n return time.ctime(time_in_s)\n else:\n return time_in_s", "def snapshot(self):\n\n if self.pj[OBSERVATIONS][self.observationId][TYPE] in [MEDIA]:\n\n if self.playerType == VLC:\n\n if self.playMode == FFMPEG:\n\n for idx, media in enumerate(self.pj[OBSERVATIONS][self.observationId][FILE][PLAYER1]):\n if self.FFmpegGlobalFrame < sum(self.duration[0:idx + 1]):\n\n p = pathlib.Path(media)\n snapshotFilePath = str(p.parent / \"{}_{}.png\".format(p.stem, self.FFmpegGlobalFrame))\n\n if self.detachFrameViewer or self.second_player():\n self.frame_viewer1.lbFrame.pixmap().save(snapshotFilePath)\n elif not self.detachFrameViewer:\n self.lbFFmpeg.pixmap().save(snapshotFilePath)\n self.statusbar.showMessage(\"Snapshot player #1 saved in {}\".format(snapshotFilePath), 0)\n break\n\n if self.second_player():\n for idx, media in enumerate(self.pj[OBSERVATIONS][self.observationId][FILE][PLAYER2]):\n if self.FFmpegGlobalFrame2 < sum(self.duration2[0:idx + 1]):\n p = pathlib.Path(media)\n snapshotFilePath = str(p.parent / \"{}_{}.png\".format(p.stem, self.FFmpegGlobalFrame2))\n\n self.frame_viewer2.lbFrame.pixmap().save(snapshotFilePath)\n self.statusbar.showMessage(\"Snapshot player #2 saved in {}\".format(snapshotFilePath), 0)\n break\n\n else: # VLC\n\n current_media_path = url2path(self.mediaplayer.get_media().get_mrl())\n # TODO: replace with pathlib\n dirName, fileName = os.path.split(current_media_path)\n self.mediaplayer.video_take_snapshot(0, \"{dirName}{sep}{fileNameWOExt}_{time}.png\".format(\n dirName=dirName,\n sep=os.sep,\n fileNameWOExt=os.path.splitext(fileName)[0],\n time=self.mediaplayer.get_time()),\n 0, 0)\n\n # check if multi mode\n # second video together\n if self.simultaneousMedia:\n current_media_path = url2path(self.mediaplayer2.get_media().get_mrl())\n\n dirName, fileName = os.path.split(current_media_path)\n self.mediaplayer2.video_take_snapshot(0, \"{dirName}{sep}{fileNameWOExt}_{time}.png\".format(\n dirName=dirName,\n sep=os.sep,\n fileNameWOExt=os.path.splitext(fileName)[0],\n time=self.mediaplayer2.get_time()),\n 0, 0)", "def mtime(self):\n\n return os.stat(self.filename).st_mtime", "def last_modified(self) -> str:\n\t\tif self.name == \"\":\n\t\t\tif \"last_modified\" in self.ds._file[\"/matrix\"].attrs:\n\t\t\t\treturn self.ds._file[\"/matrix\"].attrs[\"last_modified\"]\n\t\t\telif self.ds._file.mode == 'r+':\n\t\t\t\tself.ds._file[\"/matrix\"].attrs[\"last_modified\"] = timestamp()\n\t\t\t\tself.ds._file.flush()\n\t\t\t\treturn self.ds._file[\"/matrix\"].attrs[\"last_modified\"]\n\n\t\tif self.name != \"\":\n\t\t\tif \"last_modified\" in self.ds._file[\"/layers/\" + self.name].attrs:\n\t\t\t\treturn self.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"]\n\t\t\telif self.ds._file.mode == 'r+':\n\t\t\t\tself.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"] = timestamp()\n\t\t\t\tself.ds._file.flush()\n\t\t\t\treturn self.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"]\n\n\t\treturn timestamp()", "def __str__(self):\n return self.video_path", "def getKoanTime():\n if not os.path.exists(\"%s.awk\" % koanIndex[currentKoan] ):\n f = open(\"%s.awk\" % koanIndex[currentKoan] ,\"w\")\n f.write(sampleString)\n f.close()\n return os.path.getmtime(\"%s.awk\" % koanIndex[currentKoan] )", "def get(self):\n\t\tif not self.threaded:\n\t\t\tself.record()\n\t\timg = self.Video[-1]\n\t\ttime = self.timestamps[-1]\n\t\tif self.newAvailable:\n\t\t\tnew = True\n\t\t\tself.newAvailable = False\n\t\t\treturn new, img, time\n\t\telse:\n\t\t\tnew = False\n\t\t\treturn new, img, time", "def find_file_modified_time(file):\n#\n#--- find stat of the file. one of them is the file creation date\n#\n (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(file)\n out = time.ctime(mtime)\n#\n#--- out is in \"Mon Dec 01 15:22:37 2014\" format\n#\n atemp = re.split('\\s+', out)\n \n month = changeMonthFormat(atemp[1])\n date = int(float(atemp[2]))\n year = int(float(atemp[4]))\n\n btemp = re.split(':', atemp[3])\n\n hours = int(float(btemp[0]))\n minutes = int(float(btemp[1]))\n seconds = int(float(btemp[2]))\n\n stime = convertDateToTime2(year, month, date, hours, minutes, seconds)\n \n return stime", "def __call__(self) -> str:\n self._set_dt_format()\n self._set_dt_string()\n return self._get_filepath()", "def process_videos(chapter_info):\n\n print(\"Processing chapter_info:\", chapter_info)\n\n # getting creation time of the first chapter\n # TODO update when adding multiple directory proccessing\n os.chdir(DIR_VIDEO_FILES)\n print(\"1st chapter\", chapter_info[1][0])\n chap1_time = time.strftime(\n r\"%Y-%m-%d_%H-%M\", time.localtime(os.path.getctime(chapter_info[1][0])))\n print(\"1st chapter creation\", chap1_time)\n\n # output_file = f\"M_GH00{chapter_info[0]}_{chap1_time}.MP4\"\n output_file = f\"{chap1_time}_GH00{chapter_info[0]}_MRG.MP4\"\n if os.path.isfile(output_file):\n print(f\"Chapter already processed, found file: {output_file}\")\n return\n\n # preparing text file containing file list for merging (for ffmpeg)\n video_list_file = chapter_info[0] + \"_merge.txt\"\n with open(video_list_file, \"w\") as f:\n for video_chapter in chapter_info[1]:\n f.write(f\"file {video_chapter}\\n\")\n\n command = f\"{FFMPEG_EXE} -f concat -i {video_list_file} -c copy {DIR_OUTPUT}{output_file}\"\n print(\"command =\", command)\n # p = subprocess.run(\"dir\", shell=True, capture_output=True)\n # p = subprocess.run(\"dir\", shell=True, stdout=subprocess.PIPE, text=True)\n p = subprocess.run(command, stdout=subprocess.PIPE, text=True)\n print(\"returncode =\", p.returncode)\n # print(\"stdout =\", p.stdout)\n os.remove(video_list_file) # remove file list after merging\n # rename original chapters after processing\n for video_chapter in chapter_info[1]:\n os.rename(video_chapter, f\"OK_{video_chapter}\")", "def file_creation_date(file_path):\n ran_command = run_command([\"stat\", \"-f\", \"%SB\", file_path], True)\n raw_command_output = get_subprocess_output(ran_command)\n command_output = raw_command_output.strip(\"\\\\n\")\n elements = command_output.split(\" \")\n month_string = elements[0]\n month_number = 0\n day_number = int(elements[1])\n year_number = int(elements[3])\n if month_string == \"January\":\n month_number += 1\n elif month_string == \"February\":\n month_number += 2\n elif month_string == \"March\":\n month_number += 3\n elif month_string == \"April\":\n month_number += 4\n elif month_string == \"May\":\n month_number += 5\n elif month_string == \"June\":\n month_number += 6\n elif month_string == \"July\":\n month_number += 7\n elif month_string == \"August\":\n month_number += 8\n elif month_string == \"September\":\n month_number += 9\n elif month_string == \"October\":\n month_number += 10\n elif month_string == \"November\":\n month_number += 11\n elif month_string == \"December\":\n month_number += 12\n return [month_number, day_number, year_number]", "def _get_date_modified(path):\n return str(datetime.datetime.fromtimestamp(os.path.getmtime(path)))", "def timeandsize(str_ffn):\n try:\n str_filetime = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(os.path.getmtime(str_ffn)))\n str_filesize = os.path.getsize(str_ffn)\n return str_filetime, str_filesize\n except FileNotFoundError: # some files are very temporary ...\n return \"\", \"\"", "def get_created_time(self, name):\n full_path = self.path(name)\n return self.__volume.getctime(full_path)", "def cmd_mdtm (self, line):\r\n filename = line[1]\r\n if not self.filesystem.isfile (filename):\r\n self.respond ('550 \"%s\" is not a file' % filename)\r\n else:\r\n mtime = time.gmtime(self.filesystem.stat(filename)[stat.ST_MTIME])\r\n self.respond (\r\n '213 %4d%02d%02d%02d%02d%02d' % (\r\n mtime[0],\r\n mtime[1],\r\n mtime[2],\r\n mtime[3],\r\n mtime[4],\r\n mtime[5]\r\n )\r\n )", "def created(path):\n\n # TODO: Test this code block on other platforms (OS X/Linux)\n\n if platform.system() == 'Windows':\n date = datetime.fromtimestamp(os.path.getctime(path)).strftime('%Y-%m-%d')\n return date\n\n else:\n stat = os.stat(path)\n\n try:\n return stat.st_birthtime\n\n except AttributeError:\n # We're probably on Linux. No easy way to get creation dates here,\n # so we'll settle for when its content was last modified.\n return stat.st_mtime", "def _creation_date(path_to_file):\n if platform.system() == \"Windows\":\n return os.path.getctime(path_to_file)\n else:\n stat = os.stat(path_to_file)\n try:\n return stat.st_birthtime\n except AttributeError:\n # We're probably on Linux. No easy way to get creation dates here,\n # so we'll settle for when its content was last modified.\n return stat.st_mtime", "def mtime(self):\n return safeInt(self.tag(\"mtime\"))", "def LogProcess(self):\n time = datetime.today().strftime('%a %Y%b%d %X')\n# Get user name.\n f = os.popen(\"whoami\",\"r\")\n user = f.read().strip()\n f.close()\n\n entry = '%s\\t%s\\t%s\\t%s\\n' % (time, self.topdir, user, self.version)\n\n if ismounted(c.exams_file):\n# Append info to the exams file.\n try:\n f = open(c.exams_file,'a+')\n f.seek(0, 2)\n f.write(entry)\n f.close()\n except:\n# Not a huge problem if this doesn't work.\n pass", "def checkFiles(self): \r\n mdate_filenames_list = []\r\n mdate_filenames_tuple = {}\r\n last24 = []\r\n now = datetime.datetime.now() \r\n noise,ft = file_type.split('.')\r\n ## note can do an entry bg color stoplight thing >24 hrs = red, 12-24 hrs = yellow < 12 = green nice little if loop\r\n for f in filenames_list:\r\n if os.path.isfile(f):\r\n lastmod_date = datetime.datetime.fromtimestamp(os.path.getmtime(f))\r\n mdate_filenames_tuple = lastmod_date, f\r\n mdate_filenames_list.append(mdate_filenames_tuple)\r\n \r\n if now - lastmod_date < file_age:\r\n \r\n #print (\"{} was last modified on {:%a %b %d %Y, %H:%M:%S, %Z}. Moving to 'destinaiton' transfer folder.\".format(f, lastmod_date))\r\n last24.append(f)\r\n shutil.copy2(f, destination)\r\n xferTime=time.time()\r\n \r\n fa = str(file_age) \r\n with sqlite3.connect('fileTransfer.db') as connection:\r\n c = connection.cursor()\r\n c.execute(\"INSERT INTO tbl_lastRun(col_timestamp, col_source, col_destination, col_file_type, col_file_age) VALUES (?,?,?,?,?)\",(xferTime, source, destination, ft, hrs))\r\n connection.commit()\r\n connection.close \r\n\r\n clear(self)\r\n ask_quit(self)", "def extract_date_metadata(fname):\n\n try:\n # check if file has creation date, exception if not\n date_metadata = fileops.get_video_creation_date_metadata(fname)\n\n # extract the date/time string from metadata, exception if\n # not the proper format\n datetimestr = metadata_to_datetimestr(date_metadata)\n\n logging.debug(\"Found creation date metadata %r for file %r\",\n datetimestr, os.path.basename(fname))\n\n return datetimestr\n\n except fileops.VideoMetadataError:\n logging.warning(\n \"%r does not have a proper creation date metadata\",\n os.path.basename(fname))\n\n return \"\"\n\n except DateStrError:\n logging.warning(\n \"%r creation data metadata not the right format\",\n os.path.basename(fname))\n \n return \"\"", "def creation_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"creation_time\")", "def creation_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"creation_time\")", "def saveVideo(self,path,filename,filetype):\n\t\tprint('saving video...')\n\t\tif self.recording:\n\t\t\tprint('warning, video was still recording, please call .stop() in main script to end more consistently.')\n\t\t\tself.stop()\n\t\tframe = self.Video[-1]\n\t\t(frame_height,frame_width) = frame.shape[0:2]\n\t\tsavepath = path + filename + filetype\n\t\tout = cv2.VideoWriter(savepath,cf.FOURCC,self.desiredFPS,(frame_width,frame_height))\n\t\tfor i,frame in enumerate(self.Video):\n\t\t\tout.write(frame)\n\t\tprint(' video saved\\n')\n\t\tif self.threaded: # guarantee thread stopped\n\t\t\tself.vs.stop()\n\t\tself.stop()", "def _update_cmd_time_info(self, end=False):\n time_stamp = time.time()\n time_passed = time_stamp - self._start_time\n if end:\n docs_proc_now = self._docs_processed % self._file_write_threshhold\n if docs_proc_now == 0:\n msg = ('Written {} documents to file in total. '\n 'Time passed: {:2f}')\n print(msg.format(self._docs_processed, time_passed))\n else:\n msg = ('Writing {} documents to file. '\n 'Written {} documents to file in total. '\n 'Time passed: {:2f}')\n print(msg.format(\n docs_proc_now, self._docs_processed, time_passed))\n else:\n msg = ('Writing {} documents to file. '\n 'Written {} documents to file in total. '\n 'Time passed: {:2f}')\n print(msg.format(self._file_write_threshhold,\n self._docs_processed, time_passed))", "def getchrony():\n \n filename = \"/var/log/chrony/tracking.log\"\n fileNotOK = True\n try:\n if os.path.isfile(filename):\n fileNotOK = False\n except:\n fileNotOK = True\n # if file is not OK, return default\n if fileNotOK:\n return( \"2020-02-20T02:02:02.000\", 0., 0.)\n \n #get the very last line in the filea\n line = subprocess.check_output(['tail', '-1', filename])\n parts = line.split()\n nparts = len(parts)\n\n if nparts < 10:\n return( \"\", 0., 0.)\n \n date = parts[0]\n time = parts[1]\n ip = parts[2]\n #print(\"Offset: %s\" % (parts[9]))\n offset = float(parts[6])\n offsetrms = float(parts[9])\n datestr = \"%sT%s\" % (date, time)\n return( datestr, offset, offsetrms)", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--visualize', dest='visualize_dir', help=\"Path to directory to load all vizualization info from\")\n parser.add_argument('--overwrite', dest='overwrite', default=False, action='store_true', help=\"Overwrite existing logs parts if found\")\n args = parser.parse_args()\n if not args.visualize_dir:\n print \"Missing required argument, --visualize\"\n exit(-1)\n\n dsrc_log_file = args.visualize_dir + '/dsrc.log'\n radar_log_file = args.visualize_dir + '/radar.log'\n video_file = args.visualize_dir + '/video.mp4'\n log_config = args.visualize_dir + '/config.json'\n\n config = parse_config(log_config)\n\n if 'parts_auto_enabled' in config and config['parts_auto_enabled']:\n cap = cv2.VideoCapture(video_file)\n fps = cap.get(cv2.CAP_PROP_FPS)\n frames = cap.get(cv2.CAP_PROP_FRAME_COUNT)\n duration = float(frames) / fps\n cap.release()\n\n print 'Video duration: %s' % duration\n start = 0\n count = 1\n while start < duration:\n config['parts'].append({\n 'start': start,\n 'end': start + config['parts_auto_interval'],\n 'name': 'auto_part_%s' % count\n })\n count = count + 1\n start = start + config['parts_auto_interval']\n\n print config \n\n for index, part in enumerate(config['parts']):\n part_path = args.visualize_dir + '/' + (part['name'] if 'name' in part else 'part_%s' % (index+1))\n print \"---------------------------------------\"\n print \" Writing log to %s\" % part_path\n print \"---------------------------------------\"\n if not args.overwrite and os.path.exists(part_path):\n print \"Log already exists, skipping...\"\n continue\n\n if not os.path.exists(part_path):\n os.makedirs(part_path)\n\n export_part_video(part, part_path, video_file)\n export_part_log(part, part_path + '/radar.log', radar_log_file, config['video_start'])\n export_part_log(part, part_path + '/dsrc.log', dsrc_log_file, config['video_start'])\n export_part_config(part_path + '/config.json', config)", "def last_run(self):\r\n with sqlite3.connect('fileTransfer.db') as connection:\r\n c = connection.cursor()\r\n cursor = c.execute('SELECT max(id) FROM tbl_lastRun') \r\n max_id = cursor.fetchone()[0]\r\n cursor = c.execute('SELECT col_timestamp FROM tbl_lastRun')\r\n #timeLastRun = cursor.fetchone()[0]\r\n tLR_str = time.strftime('%Y-%m-%d %H:%M %z', time.localtime(cursor.fetchone()[0])) \r\n self.txt_lastRun.delete(0, 'end')\r\n self.txt_lastRun.insert(0, tLR_str)", "def __get_modification_time(filename: str) -> float:\n return os.stat(filename).st_mtime", "def get_timestamps_for_beesbook_video(path):\n import time, datetime\n def to_timestamp(datetime_string):\n # e.g. Cam_0_2018-09-13T17:13:49.501824Z\n dt = datetime.datetime.strptime(datetime_string[6:-1], \"%Y-%m-%dT%H:%M:%S.%f\")\n return time.mktime(dt.utctimetuple()) + dt.microsecond * 0.000001\n ts_path = path[:-3] + \"txt\"\n with open(ts_path, \"r\") as f:\n return [to_timestamp(l) for l in f.read().splitlines()]", "def getCreationDate(filename):\r\n path = cachePath(filename)\r\n \r\n pe = pefile.PE(path)\r\n return datetime.fromtimestamp(pe.FILE_HEADER.TimeDateStamp)", "def extract_date_info(object_key):\n pacific = pytz.timezone('America/Los_Angeles')\n first_parts = object_key.split(\"/\")\n capture_type = first_parts[4]\n last_part_idx = len(first_parts) - 1\n file_name = first_parts[last_part_idx]\n\n # now parse the date and time out of the file name\n second_parts = file_name.split(\"_\")\n last_part_idx = len(second_parts) - 1\n if capture_type == 'snap':\n date_time_string = second_parts[last_part_idx]\n if date_time_string.endswith('.jpg'):\n date_time_string = date_time_string[:-4]\n # FIN\n final_parts = date_time_string.split(\"-\")\n date_part = final_parts[0]\n time_part = final_parts[1]\n\n # FIN\n # FIN\n if capture_type == 'record':\n time_part = second_parts[last_part_idx]\n date_part = second_parts[(last_part_idx - 1)]\n if time_part.endswith('.mp4'):\n time_part = time_part[:-4]\n # FIN\n\n # parse out our date\n year = date_part[:4]\n date_part = date_part[4:]\n month = date_part[:2]\n day = date_part[2:]\n\n # parse out the time\n hour = time_part[:2]\n time_part = time_part[2:]\n seconds = time_part[2:]\n minutes = time_part[:2]\n\n if hour[:1] == '0':\n hour = hour[1:]\n if month[:1] == '0':\n month = month[1:]\n if day[:1] == '0':\n day = day[1:]\n\n this_date = datetime.datetime(int(year), int(month), int(day), int(hour),\n int(minutes), int(seconds), 0, pacific)\n return_object = {'isodate': this_date.isoformat(),\n 'year': year,\n 'month': month,\n 'day': day,\n 'hour': hour,\n 'minutes': minutes,\n 'seconds': seconds}\n return return_object", "def findcreatedate(self):\n try:\n exifdata = subprocess.check_output(['exiftool', '-j', '-TAG', '-CreateDate', self.filename])\n except OSError as e:\n print \"exiftool may not be installed. Please go check.\"\n print \"Here is the error thrown: {}\".format(e)\n raise\n\n data = json.loads(exifdata)\n self.createdate = date(*[int(elt) for elt in re.split('[ :]', data[0]['CreateDate'])][0:3])\n return self.createdate", "def last_new_file_time(self) -> datetime.datetime:\n with self.lock:\n return self._last_new_file_time", "def extension (self):\n return 'mp4'", "def getArchivoVotacion():" ]
[ "0.6664668", "0.6570803", "0.6459046", "0.6410988", "0.63897413", "0.6233249", "0.6155345", "0.6155294", "0.6054676", "0.6041707", "0.6028616", "0.59397346", "0.5918884", "0.5896831", "0.58840746", "0.5882793", "0.58803463", "0.587276", "0.5860853", "0.5849358", "0.58296496", "0.58121204", "0.5811267", "0.58063513", "0.5797488", "0.5792633", "0.5781213", "0.57787424", "0.57633865", "0.5763021", "0.57566047", "0.5745318", "0.57232857", "0.57232857", "0.57219523", "0.5703923", "0.5688598", "0.5669885", "0.56238765", "0.5614853", "0.5614296", "0.56062156", "0.560201", "0.56017154", "0.5596618", "0.559186", "0.5590015", "0.55859345", "0.5585261", "0.5583792", "0.55766684", "0.55634296", "0.5557255", "0.5557255", "0.5543634", "0.5528711", "0.55243695", "0.5511745", "0.5508133", "0.54935396", "0.5474096", "0.5474096", "0.5472876", "0.54728705", "0.5470291", "0.54602265", "0.5456121", "0.54453415", "0.5438178", "0.5436105", "0.54340297", "0.54286647", "0.5428022", "0.5421301", "0.54149926", "0.54101306", "0.5383359", "0.538287", "0.53822196", "0.5381365", "0.5378802", "0.5374274", "0.53738004", "0.5369326", "0.53621453", "0.53620374", "0.53620374", "0.5358363", "0.5347043", "0.5344448", "0.53407985", "0.5338989", "0.5330625", "0.53280467", "0.5326498", "0.5314194", "0.53056705", "0.5304965", "0.53035074", "0.5303409" ]
0.8283047
0
We give location of folder as input
def main_one(string_path_to_folder, destination_folder): # .jpg and .JPG are the same # photos = glob.glob("C:/Personal/pp2_photo/dataBase/*.JPG") # Examples of location format # pho = glob.glob("C:/Personal/pp2_photo/dataBase/*.jpg") photos = glob.glob(string_path_to_folder+"/*.JPG") print("Number of files: ", len(photos)) for k in photos: print(get_photo_date(k)) process_all(k, destination_folder)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_folder():\n return input(\"Folder: \")", "def identify_folder(self, folder):", "def Directory(self) -> str:", "def subdir(self):", "def __init__(self, folder: str):\n self.folder = folder", "def folder(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"folder\")", "def folder(ctx,path):\n data = config.readData()\n fav=data.get('favorites',{})\n\n if path is None:\n displayMenu(data)\n path = click.prompt('Choose folder',default=\"\",show_default=False)\n if not path: ctx.abort()\n \n if path not in fav:\n click.echo(td(text=f\"'<r>{path}</r>' isn't in your favorites.\"))\n ctx.exit()\n\n if os.path.isdir(fav[path]):\n sub.run(f\"cd '{fav[path]}'; exec {data.get('shell','zsh')} \",shell=True,)\n else:\n click.echo(td(text=f\"Folder '<y>{fav[path]}</y>' doesn't exist.\"))", "def syncfolder():", "def path(self, args):\n dir_path = self.dir_path_(*args)\n return os.path.join(dir_path, self.file_name)", "def cwd (self, path):\r\n pass", "def change_dir(filename):", "def selectFolder(): \r\n directory = filedialog.askdirectory(\r\n title='Select file'\r\n )\r\n return directory", "def prepare_src_folder(self, src_folder: str) -> None:", "def _onFolder(self, event):\n eventId = event.GetId()\n if eventId == self.btnexpt.GetId():\n defaultDirectory = self.textexpt.GetValue()\n else:\n defaultDirectory = self.textfold.GetValue()\n dialog = wx.DirDialog(self, 'Choose a directory', defaultDirectory)\n if dialog.ShowModal() == wx.ID_OK:\n if eventId == self.btnexpt.GetId():\n self.textexpt.SetValue(dialog.GetPath())\n else:\n self.textfold.SetValue(dialog.GetPath())", "def GetInputPath(self):\n self.inputDir = raw_input(\"Where should files be read from? This can be a file or a folder of files\\n\\r>>> \")\n if os.path.isabs(self.inputDir):\n if os.path.isdir(self.inputDir):\n self.isFolder = True\n self.inputDirs = os.listdir(self.inputDir)\n elif os.path.isfile(self.inputDir):\n self.isFolder = False\n self.inputDirs = [self.inputDir]\n else:\n print \"That path does not exist. Try again\"\n self.GetInputPath()\n else:\n print \"that was not an excepted path name. Try again.\"\n self.GetInputPath()", "def _browse_folder(self):\n folder = QtWidgets.QFileDialog.getExistingDirectory(\n parent=self,\n caption=\"Select folder\",\n dir=self.folder_line_edit.text(),\n options=QtWidgets.QFileDialog.ShowDirsOnly |\n QtWidgets.QFileDialog.DontResolveSymlinks)\n self.folder_line_edit.setText(folder)", "def _enter_folder(self, event):\n del event\n if Path(self.txt_path.value).is_dir():\n self._open_folder(tgt_folder=self.txt_path.value)", "def cwd_in_path():\n ...", "def browse_input(self):\n path = getAFolder()\n if len(path) > 0:\n self.in_directory.setText(path)\n self.out_directory.setText(join(path, 'merged_results'))\n self.preprocessfolder()", "def pathtofolder():\n return os.getcwd()", "def common_folder_path(folder: str,) -> str:\n return \"folders/{folder}\".format(folder=folder,)", "def folder(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"folder\")", "def folder(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"folder\")", "def subFolder(self, value):\r\n self.__folder = str(value)", "def __call__(self, value): # noqa: D102\n if not os.path.isdir(value):\n raise argparse.ArgumentTypeError(\n f\"Input value must be folder and must exist. '{value}' is not.\"\n )\n return value", "def pickAFolder():\n folder = _tkCall(tkFileDialog.askdirectory)\n if folder == '':\n folder = myro.globvars.mediaFolder\n return folder", "def _open_folder(self, btn=None, tgt_folder=None):\n del btn\n if not tgt_folder:\n tgt_folder = self.select_folder.value\n if tgt_folder == self.PARENT:\n tgt_folder = self.current_folder.parent\n if tgt_folder:\n self.current_folder = (\n Path(self.current_folder).joinpath(tgt_folder).resolve()\n )\n self.txt_path.value = str(self.current_folder)\n folders, files = self.read_folder(self.current_folder)\n self.select_folder.options = self.get_folder_list(folders)\n self.select_file.options = files", "def _get_folder(self):\n # type: () -> str\n headers = Headers({\"content-type\": \"application/json\", \"accept\": \"application/json\"})\n response = self.connection.api_call(\n \"GET\", [\"v1\", \"resources\", self.id, \"folderpath\"], headers=headers\n )\n\n return response.json().get(\"path\")", "def select_folder(self):\r\n\r\n root = Tkinter.Tk()\r\n root.withdraw()\r\n diroption = {}\r\n diroption['initialdir'] = '.'\r\n diroption['mustexist'] = False\r\n diroption['parent'] = root\r\n diroption['title'] = 'Select a directory to organize'\r\n return tkFileDialog.askdirectory(**diroption)\r\n root.destroy()", "def process_IN_ISDIR(self, event):", "def chdir(self):\r\n self.directory=tkf.askdirectory()", "def ask_path():\n\n file_opt = options = {}\n options['initialdir'] = 'User\\\\'\n options['parent'] = root\n options['title'] = 'Choose directory'\n\n # get pathname\n pathname = tk.filedialog.askdirectory(**file_opt)\n\n if pathname:\n Data.out_dir = pathname\n path_var.set(pathname)", "def test_one_param_from_dir(self):\n assert tuttle_dir(\"test\") == path.join(\".tuttle\", \"test\")", "def _define_extension_folder(self, unsupported_file: File) -> str:\n while True:\n directory = input(\n f\"Pick folder where file {unsupported_file} extension should be moved: \\n\"\n )\n if directory in self.possibilities:\n if unsupported_file.get_extension():\n self.possibilities[directory].files.append(\n PlaceHolderFile(unsupported_file.name)\n )\n return directory\n else:\n print(\"Invalid input\")", "def folder(self, step=None):\n if step is None:\n return self._obs_group_folder / self._obs_folder\n else:\n return Path(step) / self._obs_group_folder / self._obs_folder", "def folder(self, step=None):\n if step is None:\n return self._obs_group_folder / self._obs_folder\n else:\n return Path(step) / self._obs_group_folder / self._obs_folder", "def folder(self, step=None):\n if step is None:\n return self._obs_group_folder / self._obs_folder\n else:\n return Path(step) / self._obs_group_folder / self._obs_folder", "def folder(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"folder\")", "def prepare_folder(self) -> str:\n base_folder = self.config['info']['folder']\n today_folder = f'{datetime.today():%Y-%m-%d}'\n folder = os.path.join(base_folder, today_folder)\n if not os.path.isdir(folder):\n os.makedirs(folder)\n return folder", "def list_dir(self, path):", "def test_input_folder(self):\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_folder\"\n params[\"input\"] = data_dir + \"build-custom/files/\"\n params[\"input_extension\"] = \"fna.gz\"\n cfg = Config(\"build-custom\", **params)\n self.assertTrue(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom run failed\")\n res = build_sanity_check_and_parse(vars(cfg))\n self.assertIsNotNone(res, \"ganon build-custom sanity check failed\")\n\n files = list_files_folder(params[\"input\"], params[\"input_extension\"])\n self.assertTrue(res[\"target\"][\"file\"].isin(files).all(), \"Files missing from target\")\n self.assertEqual(len(files), res[\"target\"].shape[0], \"Wrong number of files on target\")\n self.assertTrue(res[\"info\"][\"file\"].isin(files).all(), \"Files missing from info\")\n self.assertEqual(len(files), res[\"info\"].shape[0], \"Wrong number of files on info\")\n\n # Wrong extension\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_folder_wrong_extension\"\n params[\"input\"] = data_dir + \"build-custom/files/\"\n params[\"input_extension\"] = \"xxx.gz\"\n cfg = Config(\"build-custom\", **params)\n self.assertFalse(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom ran but it should fail\")\n\n # Wrong folder\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_folder_wrong_folder\"\n params[\"input\"] = data_dir + \"wrong-place/\"\n params[\"input_extension\"] = \"fna.gz\"\n cfg = Config(\"build-custom\", **params)\n self.assertFalse(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom ran but it should fail\")", "def test_input_folder_recursive(self):\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_folder_recursive\"\n params[\"input\"] = data_dir + \"build-custom/files/\"\n params[\"input_extension\"] = \"fna.gz\"\n params[\"input_recursive\"] = True\n \n cfg = Config(\"build-custom\", **params)\n self.assertTrue(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom run failed\")\n res = build_sanity_check_and_parse(vars(cfg))\n self.assertIsNotNone(res, \"ganon build-custom sanity check failed\")\n\n # list files from base folder and \"more\" (got recursively)\n files = list_files_folder(params[\"input\"], ext=params[\"input_extension\"], recursive=True)\n\n self.assertTrue(res[\"target\"][\"file\"].isin(files).all(), \"Files missing from target\")\n self.assertEqual(len(files), res[\"target\"].shape[0], \"Wrong number of files on target\")\n self.assertTrue(res[\"info\"][\"file\"].isin(files).all(), \"Files missing from info\")\n self.assertEqual(len(files), res[\"info\"].shape[0], \"Wrong number of files on info\")", "def test_input_folders_files(self):\n files = list_files_folder(data_dir + \"build-custom/files/\", ext=\"fna.gz\")\n folder = data_dir + \"build-custom/files/more/\"\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_folders_files\"\n params[\"input\"] = files + [folder]\n params[\"input_extension\"] = \"fna.gz\"\n cfg = Config(\"build-custom\", **params)\n self.assertTrue(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom run failed\")\n res = build_sanity_check_and_parse(vars(cfg))\n self.assertIsNotNone(res, \"ganon build-custom sanity check failed\")\n\n files.extend(list_files_folder(folder, ext=params[\"input_extension\"]))\n self.assertTrue(res[\"target\"][\"file\"].isin(files).all(), \"Files missing from target\")\n self.assertEqual(len(files), res[\"target\"].shape[0], \"Wrong number of files on target\")\n self.assertTrue(res[\"info\"][\"file\"].isin(files).all(), \"Files missing from info\")\n self.assertEqual(len(files), res[\"info\"].shape[0], \"Wrong number of files on info\")", "def common_folder_path(\n folder: str,\n ) -> str:\n return \"folders/{folder}\".format(\n folder=folder,\n )", "def from_folder(cls, *args, **kwargs):\n return cls().add_folder(*args, **kwargs)", "def path_of_image():\n top.folder_name = filedialog.askdirectory(title=\"select directory\",\n initialdir=\"C:/Users/Ayush sagore/JUPITER NOTEBOOK ML/CNN Model/\"\n \"test_dataset/\")\n path_name.insert(0, top.folder_name)", "def read_directory(self, dirpath):\n raise NotImplementedError", "def get_path():\n\n path = input(\"Directory path: \")\n if os.path.isdir(path):\n return path\n else:\n raise(ValueError)", "def open_folder(self, event):\n if self.advancedMenu:\n self.advancedMenu.Show(False)\n home = os.path.expanduser('~')\n c = config.Config()\n panda = None\n if c.username:\n # try for full path if there is a username\n panda = os.path.join(home, 'Digital Panda', c.username)\n if not os.path.exists(panda):\n # if the path doesn't exist - reset\n panda = None\n if not panda:\n # get base folder (without acccount)\n panda = os.path.join(home, 'Digital Panda')\n if not os.path.exists(panda):\n try:\n os.makedirs(panda)\n except:\n print \"TODO: need to handle folder creation failure!\"\n open_folder(panda)", "def setDestFolder(self, offset=0):\n while True:\n tempDest = input(\n offset * \" \" + \"Specify a sub-folder name to save the output files [%s]: \" % self.destFolder) or self.destFolder\n\n # If the folder does not exist, try to create it\n if not os.path.exists(self.currentPath + os.sep + tempDest):\n try:\n os.mkdir(tempDest)\n self.destFolder = tempDest\n self.destPath = self.currentPath + os.sep + self.destFolder\n break\n except OSError:\n print(\"Invalid folder name!\")\n\n # If it does exist set the destPath to it\n else:\n self.destFolder = tempDest\n self.destPath = self.currentPath + os.sep + self.destFolder\n break", "def __init__(self, directory):\n self.directory = pathlib.Path(directory)", "def setPath(*args):", "def getDir(self):\n self.folder = filedialog.askdirectory()\n self.dir_lbl[\"text\"] = self.folder", "def create_folder(self, c_path):\n raise NotImplementedError", "def test_get_result_directory(self):\n pass", "def get_outfolder():\n \n valid = False\n while not valid:\n fname = raw_input(\"Please enter directory to save images. \")\n if not os.path.exists(fname):\n os.makedirs(fname)\n #Check to see if the file is there.\n if os.path.exists(fname): \n valid = True\n #File is not there, check to see if write privileges can be given\n #to created file.\n elif os.access(os.path.dirname(fname), os.W_OK):\n valid = True\n else:\n print \"Invalid local path, please try again.\"\n return fname", "def path_helper(location, date, time, slc_dir='slc', data_dir='/media/bup/Data'):\n\n base_folder = data_dir + '/' + location + '/' + date + '/'\n name = date + '_' + time\n def_path = base_folder + slc_dir + '/' + name\n return def_path", "def get_folder(self):\n return os.path.join(\n settings.PRIVATE_STORAGE_ROOT, Exam.EXAM_FILES_LOCATION,\n str(self.unique_id)[0:2])", "def populateOutputFolder(self):\n filePath = pm.fileDialog2(fileMode=2,\n startingDirectory=self.startDir,\n fileFilter=' Shifter Game Assembly folder')\n if not filePath:\n return\n if not isinstance(filePath, string_types):\n filePath = filePath[0]\n self.gtUIInst.path_lineEdit.setText(filePath)", "def get_file_path(cls, file_name, folder_name):\n return cls.file_path.parent / folder_name / file_name", "def folder_str(f):\n if not os.path.exists(f):\n raise argparse.ArgumentTypeError('\"%s\" does not exist, you must create this folder' % f)\n return f", "def opath ( dir_name, file_name = None ):\n if file_name:\n return os.path.join(output_path, dir_name, file_name)\n return os.path.join(output_path, dir_name)", "def folder(self, name, node, path):\n # by default, use the raw asset container\n return merlin.assets.folder(name=name, node=node, path=path)", "def resolve_path(self):\n # This is the fixed directory template\n out_dir = os.path.join(opts.base_dir, self.board, self.dir)\n\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n os.chdir(out_dir)", "def path(self, prefix, args=()):\n dir_path = self.dir.path(prefix, args)\n return os.path.join(dir_path, self.name)", "def run_workdir(self, path):\n pass", "def _read_directory(self):\n self._filenames = glob.glob(self._directory + \"/*.project\")", "def get_directory() -> str:\n return directory", "def openFolder(self, path=None):\n if not path:\n dialog = OpenDialog()\n dialog.set_folders_only(True)\n path = dialog.getExistingDirectory(self, \"Open Folder\", '')\n\n if path:\n self.handleFileChanged(path)#, filename='index.rst')\n with open('./config.json', 'r') as f:\n c = json.load(f)\n c['last_folder'] = path\n with open('./config.json', 'w') as f:\n json.dump(c, f)", "def browse_folder(self, subdir=\".\"):\n if self.show_save_action:\n self.ui_Action.setEnabled(True)\n if self.show_dirs_only:\n self.ui_Action.setEnabled(True)\n self.ui_DirList.clear()\n if subdir == \".\":\n _sub_dir = self.active_url\n else:\n _sub_dir = subdir\n if len(self.directory_history) == 0:\n self.directory_history.append(_sub_dir)\n for item in reversed(self.directory_history):\n self.ui_DirList.addItem(item)\n self.ui_DirList.setCurrentIndex(self.last_dir_index)", "def __init__(self,folder):\n return", "def __init__(self,folder):\n return", "def __call__(self, components: Sequence[Text]) -> Text:\n return os.path.join(self._dirpath, *components)", "def open_file_directory(self):\r\n index = self.tree.currentIndex()\r\n file_path = self.model.filePath(index)\r\n if Path(file_path).is_dir():\r\n self.set_new_path(file_path)\r\n else:\r\n try:\r\n os.startfile(file_path)\r\n except Exception as e:\r\n QtWidgets.QMessageBox.critical(self, \"File Error\", \r\n \"The system cannot open this file:\\n\\n{}\".format(repr(e)))", "def ListFolder(self, path): # real signature unknown; restored from __doc__\n pass", "def set_dir(dir_type, path):\n\n\t\tif dir_type != 'source_dir' and dir_type != 'lyrics_dir':\n\t\t\tprint('Invalid \"dir_type\". Only \"source_dir\" or \"lyrics_dir\" are valid types.')\n\t\t\tprint('You gave \"dir_type\":', dir_type)\n\t\t\tprint('use \"lyrico --help\" to view commands.')\n\t\t\treturn False\n\n\t\t# If user is setting \"source_dir\", return if the path provided does not exist.\n\t\t# This improves the usage - lyrico <source_dir>\n\t\tif dir_type == 'source_dir':\n\t\t\tif not os.path.isdir(path):\n\t\t\t\tprint('\"source_dir\" does not exist. ', end=\"\")\n\t\t\t\tprint('You gave \"source_dir\":', path)\n\t\t\t\tprint('Please enter path to an existing folder.')\n\t\t\t\treturn False\n\t\t\tConfig.source_dir = path\n\t\t# make directory if user is setting \"lyrics_dir\" and it does not exists.\n\t\t# Refer http://stackoverflow.com/a/14364249/2426469\n\t\t# elif dir_type == 'lyrics_dir':\n\t\t# \ttry:\n\t\t# \t\tos.makedirs(path)\n\t\t# \t\tprint('Directory does not exist. Creating new one.')\n\t\t# \texcept OSError:\n\t\t# \t\tif not os.path.isdir(path):\n\t\t# \t\t\t# this exception is handled by function calling set_dir\n\t\t# \t\t\traise\n\t\t# \tConfig.lyrics_dir = path\n\n\t\t# print(dir_type, 'updated.')\n\t\tif dir_type == 'source_dir':\n\t\t\tprint('Lyric Grabber will scan the following folder for audio files:')\n\t\t# else:\n\t\t# \tprint('lyrico will save lyrics files in the following folder:')\n\t\tprint(' ', path)\n\t\treturn True", "def _folderCheck(self, folder):\n logger.debug(\"Func: _folderCheck\")\n\n if not os.path.isdir(os.path.normpath(folder)):\n os.makedirs(os.path.normpath(folder))", "def _create_folder(self, unsupported_file: File) -> str:\n if not self.possibilities:\n print(\n f\"----\\nNo folders found in directory. Please enter directory name for \"\n f\"{unsupported_file} file:\\n\"\n )\n else:\n print(\"Please enter directory name:\")\n\n while True:\n folder_name = input()\n checker = [True if char.isalnum() else False for char in folder_name]\n if False not in checker and folder_name not in self.possibilities.keys():\n os.makedirs(folder_name)\n temp_folder = Folder(folder_name)\n self.folders.append(temp_folder)\n if unsupported_file.get_extension():\n temp_folder.files.append(PlaceHolderFile(unsupported_file.name))\n return folder_name\n else:\n print(\"Invalid input\")", "def go_to(dir):\n work = \"/home/prm/Desktop/optical/optical/CAHA/cig96_jun16/\" + dir\n os.chdir(work)\n #print \"Work/save directory:\", work", "def dirpath(self, *args):\r\n return self.__class__(self.localpath.dirpath(*args), auth=self.auth)", "def check_folder_exists(location: str) -> bool:\n if os.path.isdir(location):\n return True\n else:\n return False", "def Browse(self):\n \n #run the folder manipulation routines...\n self.dir_opt = options = {}\n options['mustexist'] = False\n options['title'] = 'Select a directory...'\n \n #launch the directory selector\n self.FolderPath = tkFileDialog.askdirectory(**self.dir_opt)\n \n self.FolderPath = os.path.abspath(self.FolderPath)\n \n #set the folder path to the entry window\n self.FolderEntry.delete(0, tk.END)\n self.FolderEntry.insert(0, self.FolderPath)", "def check_for_json_folder(check_path):\n check_abspath = os.path.abspath(check_path)\n json_folders = [\"_JSON\", \"JSON\"]\n for jf in json_folders:\n if jf in check_abspath:\n print(\"{} folder exists : {}\".format(jf, check_abspath))\n top_path, base_path = check_abspath.split(\"{}/\".format(jf))\n out_path = os.path.dirname(os.path.join(top_path, base_path))\n if os.path.exists(out_path):\n print(\"Path exists : {}\".format(out_path))\n return out_path\n else:\n print(\"Path does not exist : {}\".format(out_path))\n print(\"Please create this folder and try again\")\n exit(1)", "def _dodir ( self, dirpath, mkdir_p ):\n return", "def cwd(self):", "def format_folder_path(folder_path):\n if folder_path[-1] != '/':\n folder_path += '/'\n\n return folder_path", "def current_directory (self):\r\n pass", "def createFolder(self):\n raise NotImplementedError", "def get_full_folder_path(self):\n data_dir_path = os.path.join(settings.MEDIA_ROOT,self.folder)\n return data_dir_path", "def set_source_path(self, folder):\n self.source_path = folder", "def test_supply_directory(self):\n supplied_value = '/tmp'\n returned_value = generic.check_path(supplied_value)\n\n self.assertEqual(supplied_value, returned_value)", "def test_files(self, location):\n for filename in os.listdir(location):\n with open(location + '/' + filename) as json_file:\n data = json.load(json_file)\n self.test_data(data)", "def copydir(self):\n pass", "def ReturnPathOfFile(self, url):\n\tcount=0\n\turlComponent = urlparse.urlparse(url)\n\tfor part in urlComponent:\n\t count = count + 1\n\t if count == 3:\n\t\tFolderPath = part\n\treturn FolderPath", "def ask_path(folder_flag=True, multiple_files_flag=False):\n # This method is almost never used, so the required imports are locally called\n import tkinter as tk\n from tkinter import filedialog\n\n root = tk.Tk()\n root.withdraw()\n path = os.getcwd()\n if folder_flag: # Open folder\n path = filedialog.askdirectory(parent=root, initialdir=path, title='Please select directory')\n else: # Open file\n if multiple_files_flag:\n path = filedialog.askopenfilenames(parent=root, initialdir=path, title='Please select data files')\n path = root.tk.splitlist(path)\n else:\n path = filedialog.askopenfilename(parent=root, initialdir=path, title='Please select data file')\n root.destroy()\n return path", "def entry_set_folder(self, entry):\r\n global folder_name\r\n folder_name = filedialog.askdirectory()\r\n entry.delete(0, 'end')\r\n entry.insert(tk.END, folder_name)", "def path(self, f):\n\t\treturn os.path.join(self.directory, f)", "def do_write_folder(self, line):\n\n if self.root_directory:\n self.write_folder = self.root_directory + \"/\" + line\n print(f\"Folder to write files is: {self.root_directory}/{line}\")\n else:\n self.write_folder = line\n print(f\"Folder to write files is: {line}\")", "def main():\r\n parent_dir = 'D:\\\\Profession\\\\Intern\\\\Assignments\\\\Codes\\\\Assignement Codes\\\\Part 2\\\\data_dumps'\r\n\r\n if not (os.path.isdir(parent_dir)):\r\n raise Exception(\"The directory doesn't exist\")\r\n\r\n directories = []\r\n\r\n for directory in os.listdir(parent_dir):\r\n directories.append(os.path.join(parent_dir, directory))\r\n\r\n # The group_dic represents the dictionary with keys equal to the unique dates in the directories\r\n # And the values represent a list of all files that have the same date prefix across the data_dumps\r\n group_dic = grouping(directories)\r\n\r\n # Moving Files into New Directory\r\n move(group_dic, parent_dir)\r\n print(\"Files Moved Successfully\")", "def get_directory(self, subdir=None):\n path = settings.SUBMISSION_DIR / str(self.assignment.id) / str(self.id)\n if subdir:\n path = path / subdir\n\n return path", "def on_dir_pick(self, event):\r\n\r\n directory = self.GetPath()\r\n if directory is None or not exists(directory) or not isdir(directory):\r\n directory = expanduser(\"~\")\r\n directory = dirpickermsg(_(\"Select directory to rummage\"), directory)\r\n if directory is None or directory == \"\":\r\n directory = None\r\n self.SetPath(directory)\r\n evt = DirChangeEvent(directory=directory)\r\n wx.PostEvent(self, evt)\r\n event.Skip()" ]
[ "0.70578855", "0.70199007", "0.6585436", "0.65788156", "0.653169", "0.6479249", "0.635464", "0.6243236", "0.61891365", "0.61666906", "0.6152387", "0.6135909", "0.61036575", "0.60858923", "0.60739803", "0.6059854", "0.60517687", "0.6041118", "0.60151416", "0.601386", "0.59964865", "0.59518456", "0.59518456", "0.594437", "0.59170395", "0.59119505", "0.59095573", "0.58895963", "0.58810574", "0.587911", "0.58708674", "0.58674276", "0.5858527", "0.5851146", "0.58487946", "0.58487946", "0.58487946", "0.5830601", "0.5828355", "0.5824828", "0.58125186", "0.58075637", "0.5805077", "0.5804562", "0.58014435", "0.5799586", "0.5790226", "0.57896703", "0.5772828", "0.57713836", "0.5768337", "0.57636124", "0.5760864", "0.5758713", "0.57580876", "0.5746043", "0.5741793", "0.57398003", "0.57302636", "0.57266766", "0.57264286", "0.5723484", "0.5714793", "0.5709797", "0.570648", "0.56981534", "0.5691194", "0.56908727", "0.5690057", "0.5688421", "0.5670291", "0.5670291", "0.56683165", "0.56677634", "0.5664481", "0.56595457", "0.5657557", "0.5650206", "0.56471926", "0.56403315", "0.5637785", "0.5632957", "0.5631791", "0.5628686", "0.5628029", "0.56246984", "0.5624636", "0.5623278", "0.56224006", "0.5621694", "0.5616932", "0.5609631", "0.5608013", "0.5604275", "0.56041855", "0.55992615", "0.55966556", "0.5573399", "0.55690026", "0.5567604", "0.5563417" ]
0.0
-1
Give actions for AtomSiteL.
def action_atom_site_l(obj: AtomSiteL, thread: QtCore.QThread): w_actions = [] if obj.is_attribute("type_symbol"): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Show b_scat") qtb_1.clicked.connect(lambda: run_function( obj.report, (), thread)) w_actions.append(qtb_1) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def actions():\n pass", "def setupActions(obj):\n at = getToolByName(obj, 'portal_actions')\n ait = getToolByName(obj, 'portal_actionicons')\n for action in at.listActions():\n if action.getId() == 'atom':\n break\n else:\n at.addAction(id = 'atom',\n name = \"Atom feed of this folder's contents\",\n action = \"string:$object_url/ATOM\",\n condition = \"python:portal.portal_syndication.isSyndicationAllowed(object)\",\n permission = CMFCorePermissions.View,\n category = 'document_actions',\n visible = True)\n for ai in ait.listActionIcons():\n if ai.getActionId() == 'atom':\n break\n else:\n ait.addActionIcon(category = 'plone',\n action_id = 'atom',\n icon_expr = 'atom.gif',\n title = 'Atom Syndication')", "def actions(self):\n raise NotImplementedError", "def actions() -> None:\n pass", "def actions(self):\n from moztrap.view.lists.actions import actions\n return actions", "def send_actions(self, actions):\n pass", "def action(self):\n pass", "def action(self):\n pass", "def get_actions(self, request):\n return super(OrganizationAdmin, self).get_actions(request)", "def _run_actions(self):\n\n if \"install-bento\" in self.actions:\n self._do_action_bento_setup()\n\n if \"create-tables\" in self.actions:\n self._do_action_tables_create()\n\n if \"import-ratings\" in self.actions:\n self._do_action_import_ratings()\n\n if \"import-user-info\" in self.actions:\n self._do_action_import_user_info()\n\n if \"import-movie-info\" in self.actions:\n self._do_action_import_movie_info()\n\n if \"train-item-item-cf\" in self.actions:\n self._do_action_train()\n\n if \"register-freshener\" in self.actions:\n self._do_action_register_freshener()", "def actions(self, agent_state):\n raise NotImplementedError(\"Don't know what actions are available\")", "def execute_actions(self, actions):\n execute_actions(self.board, self.agent_locs, actions)", "def apply_action(self, cmd_name, *args):\n\n action = Action(self.tahoma_device.url)\n action.add_command(cmd_name, *args)\n self.controller.apply_actions(\"HomeAssistant\", [action])", "def actions(self, state):\n\t\traise NotImplementedError", "def _action(self):\n pass", "def define_actions(self):\n ListView.define_actions(self)\n\n self.all_action = Gtk.ActionGroup(name=self.title + \"/CitationAll\")\n self.edit_action = Gtk.ActionGroup(name=self.title + \"/CitationEdit\")\n\n self._add_action('FilterEdit', None, _('Citation Filter Editor'),\n callback=self.filter_editor,)\n self._add_action('QuickReport', None, _(\"Quick View\"), None, None, None)\n\n self._add_action_group(self.edit_action)\n self._add_action_group(self.all_action)", "def initActions(self):\n self.hgFetchAct = E5Action(\n self.tr('Fetch changes'),\n UI.PixmapCache.getIcon(\"vcsUpdate.png\"),\n self.tr('Fetch changes'),\n 0, 0, self, 'mercurial_fetch')\n self.hgFetchAct.setStatusTip(self.tr(\n 'Fetch changes from a remote repository'\n ))\n self.hgFetchAct.setWhatsThis(self.tr(\n \"\"\"<b>Fetch changes</b>\"\"\"\n \"\"\"<p>This pulls changes from a remote repository into the \"\"\"\n \"\"\"local repository. If the pulled changes add a new branch\"\"\"\n \"\"\" head, the head is automatically merged, and the result of\"\"\"\n \"\"\" the merge is committed. Otherwise, the working directory\"\"\"\n \"\"\" is updated to include the new changes.</p>\"\"\"\n ))\n self.hgFetchAct.triggered.connect(self.__hgFetch)\n self.actions.append(self.hgFetchAct)", "def onAction(*args):", "def onAction(*args):", "def onAction(*args):", "def onAction(*args):", "def _generate_actions(self) -> list:\n pass", "def legal_actions(self):\n raise NotImplementedError", "def define_actions(self):\n ListView.define_actions(self)\n\n self._add_action('FilterEdit', None, _('Media Filter Editor'), \n callback=self.filter_editor)\n self._add_action('OpenMedia', 'gramps-viewmedia', _('View'), \n tip=_(\"View in the default viewer\"), \n callback=self.view_media)\n self._add_action('OpenContainingFolder', None, \n _('Open Containing _Folder'), \n tip=_(\"Open the folder containing the media file\"), \n callback=self.open_containing_folder)\n\n self._add_action('QuickReport', None, _(\"Quick View\"), None, None, None)", "def actions(self):\n self._actions = {}\n self._actions['getItems'] = ('FileCrawler', None)\n #self._actions['getContents'] = ('ParseContents', ('path'))\n return self._actions", "def actions(self, actions):\n\n self._actions = actions", "def actions(self, actions):\n\n self._actions = actions", "def get_content(self):\r\n for action in self._actions:\r\n action.execute()", "def actions(self, state):\n raise NotImplementedError # Override this!", "def action(self,item):\r\n pass", "def action_run(self):\n pass", "def custom_actions(self, form_entry, request=None):", "def action(self, gstate, actions=None):\n raise NotImplementedError", "def __actions__(self, obj):\n primary_fields = self.__provider__.get_primary_fields(self.__entity__)\n pklist = '/'.join(map(lambda x: str(getattr(obj, x)), primary_fields))\n #if has_permission('manage'):############\n \n historial = DBSession.query(Item.nrohistorial).filter_by(id=pklist).first()\n idlineabase = DBSession.query(Item.idLineaBase).filter_by(nrohistorial=historial, ultimaversion=1).first()\n lineabase = DBSession.query(LineaBase).filter_by(id=idlineabase).first()\n \n value = '<div></div>'\n \n if lineabase != None:\n if str(lineabase.estado).__eq__('abierta'):\n value = '<div><a class=\"loginlogout\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">Revertir</a></div>'\n else:\n value = '<div><a class=\"loginlogout\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">Revertir</a></div>'\n \n return value", "def actions(self):\r\n return Actions(self)", "def demonstrate(self,**kwargs):\n\n members = self.bl.getAllSavedActions()\n entries={}\n\n for param in members:\n entries[str(param)] = self.executeAction # save param names in entries\n\n# entries['search colour for position'] = self.search_menu\n entries['move block to position'] = self.move_block_menu\n entries['move arm to position'] = self.move_menu\n self.mm.addGenericMenu(\"actionMenu\",self.mm.cur_page,\"Select the action to demonstrate\", entries)\n self.mm.loadMenu(\"actionMenu\")", "def _do_action(self):\n pass", "def _do_action(self):\n pass", "def getActions(self, state): \n util.raiseNotDefined()", "def actions(self, states, agent_indices):\n return NotImplementedError()", "def actions(self):\r\n return actions.Actions(self)", "def get_actions(self):\n return []", "def actions(self, request, action_list, group):\n return action_list", "def actionURL(self):\n raise NotImplementedError()", "async def generic_action(self, request):\n pass", "def actions(self):\n return self._action_list", "def windowMenuActions( self, action ):\n\tif (action.text() == 'Weeklies'):\n if not hasattr(sharedDB, 'myWeekliesWidget'):\n\t\tsharedDB.myWeekliesWidget = weeklieswidget.WeekliesWidget(sharedDB.mainWindow)\n\t\t\n\t #sharedDB.myWeekliesWidget.CalculateWeeklies()\n\t sharedDB.myWeekliesWidget.dockWidget.show()\n\tif (action.text() == 'Assignments'):\t\t\n\t sharedDB.myAssignmentsWidget.dockWidget.show()\n\tif (action.text() == 'Attribute Editor'):\t\t\n\t sharedDB.myAttributeEditorWidget.dockWidget.show()", "def get_action(self, context):\n pass", "def _apply_action(self, action):\n current_site = self.design.first_unassigned_site\n paired_site = self.target.get_paired_site(current_site) # None for unpaired sites\n self.design.assign_sites(action, current_site, paired_site)", "def _after_serve_actions(self):\n pass", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def take_action(self, *args, **kwargs):\r\n pass", "def __init__(self):\n self.actions = []", "def get_legal_actions(self):\n pass", "def execute(self):\n for action in self.actions:\n self._logger.info('[~] Executing %s.', action)\n self._execute_action(action)", "def create_actions(self):\n self.minimizeAction.triggered.connect(self.hide)\n self.maximizeAction.triggered.connect(self.showMaximized)\n self.settingsAction.triggered.connect(self.showNormal)\n self.aboutAction.triggered.connect(self.show_about)\n self.quitAction.triggered.connect(self.quit)", "def name(self):\n return \"action_news_search\"", "def menu_python_for_artists(self, event=None):\n self.link('http://spe.pycs.net/stories/6.html')", "def action_type(self):", "def perform_action(self, action_data):\n pass", "def __call__(self):\n action = self.args.get('action', None)\n if not hasattr(self, 'action_%s' % (action,)):\n action = 'plugin_root'\n\n action_method = getattr(self, 'action_%s' % (action, ))\n return action_method()", "def actions(self, state, player):\r\n raise NotImplementedError", "def manage_actions(self):\n # selelect item actions\n self.action_item_read.setDisabled(not self.selected_item)\n self.action_item_shared.setDisabled(not self.selected_item)\n self.action_item_starred.setDisabled(not self.selected_item)\n if self.selected_item:\n self.action_item_read.setChecked(not self.selected_item.unread)\n self.action_item_read.setDisabled(not self.selected_item.can_unread)\n self.action_item_shared.setChecked(self.selected_item.shared)\n self.action_item_starred.setChecked(self.selected_item.starred)\n \n # current feed actions\n self.action_mark_all_read.setDisabled(not (self.current_feed and self.current_feed.unread and not self.current_feed.is_loading))\n self.action_fetch_more.setDisabled(not (self.current_feed and self.can_fetch_more and not self.current_feed.is_loading))\n self.action_refresh.setDisabled(not (self.current_feed and not self.current_feed.is_loading))\n \n # display show mode\n if self.show_unread_only:\n self.action_show_unread_only.setChecked(True)\n else:\n self.action_show_all.setChecked(True)", "def execute_action(self, agent, action):\n abstract", "def get_list_of_actions(self):\n return self.actions", "def SetActions(self, window):\n window.SetName(\"actions\")\n window.SetBackgroundColour(wx.GetApp().settings.action_bg_color)\n window.SetForegroundColour(wx.GetApp().settings.action_fg_color)\n window.SetFont(wx.GetApp().settings.action_text_font)", "def name(self):\n return \"action_news_bbc\"", "def createActions(self, window):\n menu_bar = window.qwindow().menuBar()\n tools_menu = find_menu(menu_bar, \"tools\")\n experimental_menu = find_menu(tools_menu, \"experimental\")\n if experimental_menu is None:\n experimental_menu = create_menu(\"experimental\", i18n(\"Experimental\"), parent=tools_menu)\n tools_menu.addAction(experimental_menu.menuAction())\n\n # add action \"instance\"\n experimental_menu.addAction(self._activate_arc_welding_action)", "def obtain_action(self):\r\n\t\treturn", "def get_actions(self):\n return self.agent.get_actions()", "def actions(self):\n return self._actions", "def custom_actions(self, form_wizard_entry, request=None):", "def name(self):\n return \"action_news_headline_india\"", "def test_actions(self, actions):\n try:\n for action in actions:\n self.get_action(action['type'])(**action)\n except Exception as e:\n print('Exception: {}'.format(str(e)))", "def do_action(self):\n func = self._get_action_func()\n func(self)", "def on_actions_list(self, e):\n self.PopupMenu(self.popup_menu())", "def cluster_actions():\n request_debug(r, logger)\n action = request_get(r, \"action\")\n logger.info(\"cluster_op with action={}\".format(action))\n if action == \"apply\":\n return cluster_apply(r)\n elif action == \"release\":\n return cluster_release(r)\n elif action == \"start\":\n return cluster_start(r)\n elif action == \"stop\":\n return cluster_stop(r)\n elif action == \"restart\":\n return cluster_restart(r)\n else:\n return make_fail_response(\"Unknown action type\")", "def call_action(self, action):\n pass", "def addSiteHandler(site, event):\n make_objectmanager_site(site)", "def actions(self):\r\n return self.puzzle.actions", "def action(self):\n current_action = self.get_script_entry()\n if current_action[\"type\"] == \"request\":\n self._handle_request(current_action)\n elif current_action[\"type\"] == \"event\":\n self._handle_event(current_action)\n elif current_action[\"type\"] == \"response\":\n self._handle_response(current_action)\n else:\n raise AttributeError(\"Wrong action type!\" +\n \" Scenario: \" + str(self._loaded_sc[\"name\"]) +\n \" Action: \" + str(self._scenario_script_cur))", "def name(self):\n return \"action_news_headline_us\"", "def name(self):\n return \"action_news_abc\"", "def apply_rl_actions(self, rl_actions):\n pass", "def act(self):\n pass", "def act(self, infoset):\n assert self.action in infoset.legal_actions\n return self.action", "def _get_legal_actions(self):\n raise NotImplementedError", "def publish_action(self, action):\n raise NotImplementedError", "def name(self):\n return \"action_news_headline_au\"", "def execute_action(self, agent, action):\n raise NotImplementedError", "def execute_action(self, agent, action):\n raise NotImplementedError", "def name(self):\n return \"action_news_cnn\"", "def getLegalActions(self):\n return ['BOT', 'SLD']", "def run(self):\n\n self._action.execute()", "def get_actions(self):\r\n return -4,4", "def _do_action(self, handler: 'Handler') -> CanDo:\n pass", "def post_activities():\n pass", "def actions(self, state):\n myActionList= (1,2);\n return myActionList" ]
[ "0.6464216", "0.63749534", "0.6212826", "0.6211583", "0.6051947", "0.5988117", "0.5883288", "0.5883288", "0.58782697", "0.5835679", "0.5777518", "0.5745189", "0.573512", "0.5696905", "0.5692459", "0.5652118", "0.56314516", "0.56207705", "0.56207705", "0.56207705", "0.56207705", "0.5600126", "0.5599576", "0.55789876", "0.5569427", "0.5555652", "0.5555652", "0.5553671", "0.55324763", "0.55252695", "0.55212027", "0.5510789", "0.55097455", "0.54595757", "0.5459439", "0.5444192", "0.54260784", "0.54260784", "0.54001236", "0.5394581", "0.5392694", "0.53674453", "0.5360784", "0.53557354", "0.53210515", "0.5307907", "0.5303807", "0.5298858", "0.52943003", "0.5293724", "0.52821666", "0.52821666", "0.52821666", "0.5280198", "0.5238366", "0.52340686", "0.52278167", "0.522579", "0.52250534", "0.52170986", "0.5216949", "0.5204941", "0.5202836", "0.5200738", "0.5195465", "0.51802576", "0.516689", "0.5165258", "0.51643467", "0.51532817", "0.5147843", "0.5146018", "0.51319295", "0.51260066", "0.51238227", "0.51237935", "0.51139396", "0.5101609", "0.5099049", "0.50975937", "0.5077124", "0.50747126", "0.5073842", "0.50712603", "0.5069763", "0.5064589", "0.50495565", "0.50487006", "0.5042182", "0.50416124", "0.50342476", "0.50306475", "0.50306475", "0.5030199", "0.50279945", "0.50264215", "0.5001175", "0.49934185", "0.49931183", "0.49904954" ]
0.59871656
6
Give actions for AtomSiteL.
def action_inversed_hessian(obj: InversedHessian, thread: QtCore.QThread): w_actions = [] if obj.is_defined(): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Show correlation") qtb_1.clicked.connect(lambda: run_function( obj.report, (), thread)) w_actions.append(qtb_1) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def actions():\n pass", "def setupActions(obj):\n at = getToolByName(obj, 'portal_actions')\n ait = getToolByName(obj, 'portal_actionicons')\n for action in at.listActions():\n if action.getId() == 'atom':\n break\n else:\n at.addAction(id = 'atom',\n name = \"Atom feed of this folder's contents\",\n action = \"string:$object_url/ATOM\",\n condition = \"python:portal.portal_syndication.isSyndicationAllowed(object)\",\n permission = CMFCorePermissions.View,\n category = 'document_actions',\n visible = True)\n for ai in ait.listActionIcons():\n if ai.getActionId() == 'atom':\n break\n else:\n ait.addActionIcon(category = 'plone',\n action_id = 'atom',\n icon_expr = 'atom.gif',\n title = 'Atom Syndication')", "def actions(self):\n raise NotImplementedError", "def actions() -> None:\n pass", "def actions(self):\n from moztrap.view.lists.actions import actions\n return actions", "def send_actions(self, actions):\n pass", "def action_atom_site_l(obj: AtomSiteL, thread: QtCore.QThread):\n w_actions = []\n if obj.is_attribute(\"type_symbol\"):\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Show b_scat\")\n qtb_1.clicked.connect(lambda: run_function(\n obj.report, (), thread))\n w_actions.append(qtb_1)\n return w_actions", "def action(self):\n pass", "def action(self):\n pass", "def get_actions(self, request):\n return super(OrganizationAdmin, self).get_actions(request)", "def _run_actions(self):\n\n if \"install-bento\" in self.actions:\n self._do_action_bento_setup()\n\n if \"create-tables\" in self.actions:\n self._do_action_tables_create()\n\n if \"import-ratings\" in self.actions:\n self._do_action_import_ratings()\n\n if \"import-user-info\" in self.actions:\n self._do_action_import_user_info()\n\n if \"import-movie-info\" in self.actions:\n self._do_action_import_movie_info()\n\n if \"train-item-item-cf\" in self.actions:\n self._do_action_train()\n\n if \"register-freshener\" in self.actions:\n self._do_action_register_freshener()", "def actions(self, agent_state):\n raise NotImplementedError(\"Don't know what actions are available\")", "def execute_actions(self, actions):\n execute_actions(self.board, self.agent_locs, actions)", "def apply_action(self, cmd_name, *args):\n\n action = Action(self.tahoma_device.url)\n action.add_command(cmd_name, *args)\n self.controller.apply_actions(\"HomeAssistant\", [action])", "def actions(self, state):\n\t\traise NotImplementedError", "def _action(self):\n pass", "def define_actions(self):\n ListView.define_actions(self)\n\n self.all_action = Gtk.ActionGroup(name=self.title + \"/CitationAll\")\n self.edit_action = Gtk.ActionGroup(name=self.title + \"/CitationEdit\")\n\n self._add_action('FilterEdit', None, _('Citation Filter Editor'),\n callback=self.filter_editor,)\n self._add_action('QuickReport', None, _(\"Quick View\"), None, None, None)\n\n self._add_action_group(self.edit_action)\n self._add_action_group(self.all_action)", "def initActions(self):\n self.hgFetchAct = E5Action(\n self.tr('Fetch changes'),\n UI.PixmapCache.getIcon(\"vcsUpdate.png\"),\n self.tr('Fetch changes'),\n 0, 0, self, 'mercurial_fetch')\n self.hgFetchAct.setStatusTip(self.tr(\n 'Fetch changes from a remote repository'\n ))\n self.hgFetchAct.setWhatsThis(self.tr(\n \"\"\"<b>Fetch changes</b>\"\"\"\n \"\"\"<p>This pulls changes from a remote repository into the \"\"\"\n \"\"\"local repository. If the pulled changes add a new branch\"\"\"\n \"\"\" head, the head is automatically merged, and the result of\"\"\"\n \"\"\" the merge is committed. Otherwise, the working directory\"\"\"\n \"\"\" is updated to include the new changes.</p>\"\"\"\n ))\n self.hgFetchAct.triggered.connect(self.__hgFetch)\n self.actions.append(self.hgFetchAct)", "def onAction(*args):", "def onAction(*args):", "def onAction(*args):", "def onAction(*args):", "def _generate_actions(self) -> list:\n pass", "def legal_actions(self):\n raise NotImplementedError", "def define_actions(self):\n ListView.define_actions(self)\n\n self._add_action('FilterEdit', None, _('Media Filter Editor'), \n callback=self.filter_editor)\n self._add_action('OpenMedia', 'gramps-viewmedia', _('View'), \n tip=_(\"View in the default viewer\"), \n callback=self.view_media)\n self._add_action('OpenContainingFolder', None, \n _('Open Containing _Folder'), \n tip=_(\"Open the folder containing the media file\"), \n callback=self.open_containing_folder)\n\n self._add_action('QuickReport', None, _(\"Quick View\"), None, None, None)", "def actions(self):\n self._actions = {}\n self._actions['getItems'] = ('FileCrawler', None)\n #self._actions['getContents'] = ('ParseContents', ('path'))\n return self._actions", "def actions(self, actions):\n\n self._actions = actions", "def actions(self, actions):\n\n self._actions = actions", "def get_content(self):\r\n for action in self._actions:\r\n action.execute()", "def actions(self, state):\n raise NotImplementedError # Override this!", "def action(self,item):\r\n pass", "def action_run(self):\n pass", "def custom_actions(self, form_entry, request=None):", "def action(self, gstate, actions=None):\n raise NotImplementedError", "def __actions__(self, obj):\n primary_fields = self.__provider__.get_primary_fields(self.__entity__)\n pklist = '/'.join(map(lambda x: str(getattr(obj, x)), primary_fields))\n #if has_permission('manage'):############\n \n historial = DBSession.query(Item.nrohistorial).filter_by(id=pklist).first()\n idlineabase = DBSession.query(Item.idLineaBase).filter_by(nrohistorial=historial, ultimaversion=1).first()\n lineabase = DBSession.query(LineaBase).filter_by(id=idlineabase).first()\n \n value = '<div></div>'\n \n if lineabase != None:\n if str(lineabase.estado).__eq__('abierta'):\n value = '<div><a class=\"loginlogout\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">Revertir</a></div>'\n else:\n value = '<div><a class=\"loginlogout\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">Revertir</a></div>'\n \n return value", "def actions(self):\r\n return Actions(self)", "def demonstrate(self,**kwargs):\n\n members = self.bl.getAllSavedActions()\n entries={}\n\n for param in members:\n entries[str(param)] = self.executeAction # save param names in entries\n\n# entries['search colour for position'] = self.search_menu\n entries['move block to position'] = self.move_block_menu\n entries['move arm to position'] = self.move_menu\n self.mm.addGenericMenu(\"actionMenu\",self.mm.cur_page,\"Select the action to demonstrate\", entries)\n self.mm.loadMenu(\"actionMenu\")", "def _do_action(self):\n pass", "def _do_action(self):\n pass", "def getActions(self, state): \n util.raiseNotDefined()", "def actions(self, states, agent_indices):\n return NotImplementedError()", "def actions(self):\r\n return actions.Actions(self)", "def get_actions(self):\n return []", "def actions(self, request, action_list, group):\n return action_list", "def actionURL(self):\n raise NotImplementedError()", "async def generic_action(self, request):\n pass", "def actions(self):\n return self._action_list", "def windowMenuActions( self, action ):\n\tif (action.text() == 'Weeklies'):\n if not hasattr(sharedDB, 'myWeekliesWidget'):\n\t\tsharedDB.myWeekliesWidget = weeklieswidget.WeekliesWidget(sharedDB.mainWindow)\n\t\t\n\t #sharedDB.myWeekliesWidget.CalculateWeeklies()\n\t sharedDB.myWeekliesWidget.dockWidget.show()\n\tif (action.text() == 'Assignments'):\t\t\n\t sharedDB.myAssignmentsWidget.dockWidget.show()\n\tif (action.text() == 'Attribute Editor'):\t\t\n\t sharedDB.myAttributeEditorWidget.dockWidget.show()", "def get_action(self, context):\n pass", "def _apply_action(self, action):\n current_site = self.design.first_unassigned_site\n paired_site = self.target.get_paired_site(current_site) # None for unpaired sites\n self.design.assign_sites(action, current_site, paired_site)", "def _after_serve_actions(self):\n pass", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def take_action(self, *args, **kwargs):\r\n pass", "def __init__(self):\n self.actions = []", "def get_legal_actions(self):\n pass", "def execute(self):\n for action in self.actions:\n self._logger.info('[~] Executing %s.', action)\n self._execute_action(action)", "def create_actions(self):\n self.minimizeAction.triggered.connect(self.hide)\n self.maximizeAction.triggered.connect(self.showMaximized)\n self.settingsAction.triggered.connect(self.showNormal)\n self.aboutAction.triggered.connect(self.show_about)\n self.quitAction.triggered.connect(self.quit)", "def name(self):\n return \"action_news_search\"", "def menu_python_for_artists(self, event=None):\n self.link('http://spe.pycs.net/stories/6.html')", "def action_type(self):", "def perform_action(self, action_data):\n pass", "def __call__(self):\n action = self.args.get('action', None)\n if not hasattr(self, 'action_%s' % (action,)):\n action = 'plugin_root'\n\n action_method = getattr(self, 'action_%s' % (action, ))\n return action_method()", "def actions(self, state, player):\r\n raise NotImplementedError", "def manage_actions(self):\n # selelect item actions\n self.action_item_read.setDisabled(not self.selected_item)\n self.action_item_shared.setDisabled(not self.selected_item)\n self.action_item_starred.setDisabled(not self.selected_item)\n if self.selected_item:\n self.action_item_read.setChecked(not self.selected_item.unread)\n self.action_item_read.setDisabled(not self.selected_item.can_unread)\n self.action_item_shared.setChecked(self.selected_item.shared)\n self.action_item_starred.setChecked(self.selected_item.starred)\n \n # current feed actions\n self.action_mark_all_read.setDisabled(not (self.current_feed and self.current_feed.unread and not self.current_feed.is_loading))\n self.action_fetch_more.setDisabled(not (self.current_feed and self.can_fetch_more and not self.current_feed.is_loading))\n self.action_refresh.setDisabled(not (self.current_feed and not self.current_feed.is_loading))\n \n # display show mode\n if self.show_unread_only:\n self.action_show_unread_only.setChecked(True)\n else:\n self.action_show_all.setChecked(True)", "def execute_action(self, agent, action):\n abstract", "def get_list_of_actions(self):\n return self.actions", "def SetActions(self, window):\n window.SetName(\"actions\")\n window.SetBackgroundColour(wx.GetApp().settings.action_bg_color)\n window.SetForegroundColour(wx.GetApp().settings.action_fg_color)\n window.SetFont(wx.GetApp().settings.action_text_font)", "def name(self):\n return \"action_news_bbc\"", "def createActions(self, window):\n menu_bar = window.qwindow().menuBar()\n tools_menu = find_menu(menu_bar, \"tools\")\n experimental_menu = find_menu(tools_menu, \"experimental\")\n if experimental_menu is None:\n experimental_menu = create_menu(\"experimental\", i18n(\"Experimental\"), parent=tools_menu)\n tools_menu.addAction(experimental_menu.menuAction())\n\n # add action \"instance\"\n experimental_menu.addAction(self._activate_arc_welding_action)", "def obtain_action(self):\r\n\t\treturn", "def get_actions(self):\n return self.agent.get_actions()", "def actions(self):\n return self._actions", "def custom_actions(self, form_wizard_entry, request=None):", "def name(self):\n return \"action_news_headline_india\"", "def test_actions(self, actions):\n try:\n for action in actions:\n self.get_action(action['type'])(**action)\n except Exception as e:\n print('Exception: {}'.format(str(e)))", "def do_action(self):\n func = self._get_action_func()\n func(self)", "def on_actions_list(self, e):\n self.PopupMenu(self.popup_menu())", "def cluster_actions():\n request_debug(r, logger)\n action = request_get(r, \"action\")\n logger.info(\"cluster_op with action={}\".format(action))\n if action == \"apply\":\n return cluster_apply(r)\n elif action == \"release\":\n return cluster_release(r)\n elif action == \"start\":\n return cluster_start(r)\n elif action == \"stop\":\n return cluster_stop(r)\n elif action == \"restart\":\n return cluster_restart(r)\n else:\n return make_fail_response(\"Unknown action type\")", "def call_action(self, action):\n pass", "def addSiteHandler(site, event):\n make_objectmanager_site(site)", "def actions(self):\r\n return self.puzzle.actions", "def action(self):\n current_action = self.get_script_entry()\n if current_action[\"type\"] == \"request\":\n self._handle_request(current_action)\n elif current_action[\"type\"] == \"event\":\n self._handle_event(current_action)\n elif current_action[\"type\"] == \"response\":\n self._handle_response(current_action)\n else:\n raise AttributeError(\"Wrong action type!\" +\n \" Scenario: \" + str(self._loaded_sc[\"name\"]) +\n \" Action: \" + str(self._scenario_script_cur))", "def name(self):\n return \"action_news_headline_us\"", "def name(self):\n return \"action_news_abc\"", "def apply_rl_actions(self, rl_actions):\n pass", "def act(self):\n pass", "def act(self, infoset):\n assert self.action in infoset.legal_actions\n return self.action", "def _get_legal_actions(self):\n raise NotImplementedError", "def publish_action(self, action):\n raise NotImplementedError", "def name(self):\n return \"action_news_headline_au\"", "def execute_action(self, agent, action):\n raise NotImplementedError", "def execute_action(self, agent, action):\n raise NotImplementedError", "def name(self):\n return \"action_news_cnn\"", "def getLegalActions(self):\n return ['BOT', 'SLD']", "def run(self):\n\n self._action.execute()", "def get_actions(self):\r\n return -4,4", "def _do_action(self, handler: 'Handler') -> CanDo:\n pass", "def post_activities():\n pass", "def actions(self, state):\n myActionList= (1,2);\n return myActionList" ]
[ "0.6464216", "0.63749534", "0.6212826", "0.6211583", "0.6051947", "0.5988117", "0.59871656", "0.5883288", "0.5883288", "0.58782697", "0.5835679", "0.5777518", "0.5745189", "0.573512", "0.5696905", "0.5692459", "0.5652118", "0.56314516", "0.56207705", "0.56207705", "0.56207705", "0.56207705", "0.5600126", "0.5599576", "0.55789876", "0.5569427", "0.5555652", "0.5555652", "0.5553671", "0.55324763", "0.55252695", "0.55212027", "0.5510789", "0.55097455", "0.54595757", "0.5459439", "0.5444192", "0.54260784", "0.54260784", "0.54001236", "0.5394581", "0.5392694", "0.53674453", "0.5360784", "0.53557354", "0.53210515", "0.5307907", "0.5303807", "0.5298858", "0.52943003", "0.5293724", "0.52821666", "0.52821666", "0.52821666", "0.5280198", "0.5238366", "0.52340686", "0.52278167", "0.522579", "0.52250534", "0.52170986", "0.5216949", "0.5204941", "0.5202836", "0.5200738", "0.5195465", "0.51802576", "0.516689", "0.5165258", "0.51643467", "0.51532817", "0.5147843", "0.5146018", "0.51319295", "0.51260066", "0.51238227", "0.51237935", "0.51139396", "0.5101609", "0.5099049", "0.50975937", "0.5077124", "0.50747126", "0.5073842", "0.50712603", "0.5069763", "0.5064589", "0.50495565", "0.50487006", "0.5042182", "0.50416124", "0.50342476", "0.50306475", "0.50306475", "0.5030199", "0.50279945", "0.50264215", "0.5001175", "0.49934185", "0.49931183", "0.49904954" ]
0.0
-1
Dock for RhoChi object.
def action_rhochi(obj: RhoChi, thread: QtCore.QThread): w_actions = [] crystals = obj.crystals() experiments = obj.experiments() flag_crystals = len(crystals) != 0 flag_experiments = len(experiments) != 0 flag_diffrn = any([isinstance(exp, Diffrn) for exp in experiments]) # for experiment in experiments: # if isinstance(experiment, Pd): # w_actions_t = action_pd(experiment, thread) # w_actions.extend(w_actions_t) # elif isinstance(experiment, Pd2d): # w_actions_t = action_pd2d(experiment, thread) # w_actions.extend(w_actions_t) # elif isinstance(experiment, Diffrn): # w_actions_t = action_diffrn(experiment, thread) # w_actions.extend(w_actions_t) # Action doc if (flag_crystals & flag_experiments & obj.is_defined()): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Calc. Chi square") qtb_1.clicked.connect(lambda: run_function( obj.calc_chi_sq, (True, ), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Refine") qtb_1.clicked.connect(lambda: run_function( obj.refine, (False, "BFGS",), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Estimate Inversed Hessian") qtb_1.clicked.connect(lambda: run_function( obj.estimate_inversed_hessian, (), thread)) w_actions.append(qtb_1) elif not(flag_crystals & flag_experiments): if not flag_crystals: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add crystal") qtb_1.clicked.connect(lambda: add_items(obj, [ Crystal(data_name="phase")], thread)) w_actions.append(qtb_1) if not flag_experiments: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add diffrn") qtb_1.clicked.connect(lambda: add_items(obj, [ Diffrn(data_name="mono")], thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add pd") qtb_1.clicked.connect(lambda: add_items(obj, [ Pd(data_name="powder1d")], thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add pd2d") qtb_1.clicked.connect(lambda: add_items(obj, [ Pd2d(data_name="powder2d")], thread)) w_actions.append(qtb_1) else: qlabel = QtWidgets.QLabel( "To run calculations all items should be defined.") w_actions.append(qlabel) # layout_actions.addWidget(qlabel) if (flag_diffrn & flag_crystals): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Estimate F_M") qtb_1.clicked.connect(lambda: run_function( obj.estimate_f_mag_for_diffrn, (), thread)) w_actions.append(qtb_1) if obj.is_attribute("inversed_hessian"): w_actions_t = action_inversed_hessian(obj.inversed_hessian, thread) w_actions.extend(w_actions_t) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createDockArea(self):\n self.centralDock = CentralDockArea(self.globalSession)\n self.setCentralWidget(self.centralDock)", "def init_layout(self):\n super(WxDockPane, self).init_layout()\n self.widget.SetDockWidget(self.dock_widget())", "def create(self, verbose=False):\r\n # delete the window if its handle exists\r\n if cmds.window(self.window, exists=True):\r\n cmds.deleteUI(self.window)\r\n # initialize the window as a pane for docking\r\n self.window = cmds.loadUI(uiFile=self.uiFile, verbose=verbose)\r\n #layoutWin = cmds.paneLayout(configuration='single')\r\n # create a dockControl and parent the control to layoutWin\r\n cmds.dockControl(allowedArea='all', area='right', floating=False, \r\n height=cmds.window(self.window, query=True, height=True), \r\n content=self.window, label='Docked Cone Pointer Window')\r\n cmds.showWindow(self.window)", "def open(self):\n super(Nodzgraph, self).open(dockable=self.configuration.maya.docked,\n area=self.configuration.maya.dock_area,\n allowedArea=self.configuration.maya.allowed_dock_areas,\n floating=self.configuration.maya.floating,\n width=self.configuration.maya.width,\n height=self.configuration.maya.height\n )", "def __init__(self):\r\n\r\n object.__init__(self)\r\n \r\n self.dock_direction = 0\r\n self.dock_layer = 0\r\n self.dock_row = 0\r\n self.size = 0\r\n self.min_size = 0\r\n self.resizable = True\r\n self.fixed = False\r\n self.toolbar = False\r\n self.rect = wx.Rect()\r\n self.panes = []", "def Top(self):\r\n\r\n self.dock_direction = AUI_DOCK_TOP\r\n return self", "def Top(self):\r\n\r\n self.dock_direction = AUI_DOCK_TOP\r\n return self", "def __init__(self, other=None):\r\n\r\n if other:\r\n self.Assign(other)\r\n else:\r\n # window representing the docking target\r\n self.host = None\r\n # dock direction (top, bottom, left, right, center)\r\n self.dock_direction = AUI_DOCK_NONE", "def create_dockable(self, dockable_name, widget):\n pass", "def GetDock(self):\n return self.dock", "def Setup(dock, parent, name = NAME, color = DEFAULT_COLOR):\n object = Tab(parent, dock, name, color)\n return object", "def __init__(self, parent):\r\n\r\n AuiDockingGuide.__init__(self, parent, style=wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP |\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER | wx.FRAME_SHAPED,\r\n name=\"auiCenterDockTarget\")\r\n\r\n self.Hide()\r\n\r\n self.CreateShapesWithStyle()\r\n self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)\r\n \r\n if wx.Platform == \"__WXGTK__\":\r\n self.Bind(wx.EVT_WINDOW_CREATE, self.SetGuideShape)\r\n else:\r\n self.SetGuideShape()\r\n \r\n self.SetSize(self.region.GetBox().GetSize())\r\n\r\n self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)\r\n self.Bind(wx.EVT_PAINT, self.OnPaint)", "def __init__(self, size, class_to_use, master, row, column, report=None):\n try:\n master.master.geometry(size)\n except AttributeError:\n pass\n self.window = class_to_use(master=master, borderwidth=0, relief=tk.GROOVE)\n self.window.grid(row=row, column=column, padx=10, pady=20)", "def create_main_enviroment(self):\n # self.layout=QGridLayout()\n self.resize(900, 900)\n self.centralWidget = CentralWidget(self) # CentralWidget(self)\n self.setCentralWidget(self.centralWidget)\n\n # self.toolbar = QToolBar(self)\n # self.addToolBar(self.toolbar)\n\n # self.setLayout(self.layout)\n self.setWindowTitle(\"Fitting elastic constants\")", "def __init__(self, parent=None):\n QtGui.QWidget.__init__(self, parent)\n \n self.setWindowTitle('The Visual Climate Data Analysis Tools - (VCDAT)')\n layout = QtGui.QVBoxLayout()\n self.setLayout(layout)\n\n # Init Menu Widget\n self.menuWidget = QMenuWidget(self)\n\n # Init File Widget\n vsplitter = QtGui.QSplitter(QtCore.Qt.Vertical) \n fileWidget = QLabeledWidgetContainer(QCDATFileWidget(),\n 'FILE VARIABLES')\n vsplitter.addWidget(fileWidget)\n\n # Init Defined Variables Widget\n definedVar = QLabeledWidgetContainer(QDefinedVariable(),\n 'DEFINED VARIABLES')\n vsplitter.addWidget(definedVar)\n hsplitter = QtGui.QSplitter(QtCore.Qt.Horizontal)\n hsplitter.addWidget(vsplitter)\n\n # Init Var Plotting Widget\n varView = QLabeledWidgetContainer(QVariableView(),\n 'PLOTTING')\n hsplitter.addWidget(varView)\n hsplitter.setStretchFactor(1, 1)\n layout.addWidget(hsplitter)\n\n # Init guiController\n guiController = GuiController(fileWidget.getWidget(),\n definedVar.getWidget(),\n varView.getWidget())\n guiController.initTeachingCommands()\n self.guiController = guiController # So guicontroller doesn't get garbage collected\n\n # Connect signals between self & GuiController\n self.connect(self, QtCore.SIGNAL('setRecordCommands'),\n guiController.setRecordCommands)\n self.connect(self, QtCore.SIGNAL('viewTeachingCommands'),\n guiController.viewTeachingCommands)\n self.connect(self, QtCore.SIGNAL('closeTeachingCommands'),\n guiController.closeTeachingCommands) \n\n # Connect Signals between QVariableView & QDefinedVariable\n varView.connect(definedVar.getWidget(), QtCore.SIGNAL('selectDefinedVariableEvent'),\n varView.getWidget().selectDefinedVariableEvent)\n varView.connect(definedVar.getWidget(), QtCore.SIGNAL('setupDefinedVariableAxes'),\n varView.getWidget().setupDefinedVariableAxes)\n definedVar.connect(varView.getWidget(), QtCore.SIGNAL('plotPressed'),\n definedVar.getWidget().defineQuickplot)\n definedVar.connect(varView.getWidget(), QtCore.SIGNAL('defineVariable'),\n definedVar.getWidget().defineVariable)\n\n # Connect Signals between QFileWidget & QVariableView\n varView.connect(fileWidget.getWidget(), QtCore.SIGNAL('variableChanged'),\n varView.getWidget().setupDefinedVariableAxes)\n varView.connect(fileWidget.getWidget(), QtCore.SIGNAL('defineVariableEvent'),\n varView.getWidget().defineVariableEvent)", "def initDocks(self):\n # Define the grid of widgets\n gridLayout = QtWidgets.QGridLayout()\n gridLayout.setOriginCorner(QtCore.Qt.TopLeftCorner)\n\n # Set QWidget object as main window in order to develop the appropriate functions\n widget = QtWidgets.QWidget(self)\n widget.setLayout(gridLayout)\n self.setCentralWidget(widget)\n\n # Set the text font\n font = QtGui.QFont()\n font.setPointSize(14)\n font.setBold(True)\n\n # Add figure widget scenes\n self.lineFigureScene = GraphicsLineScene(self)\n self.barFigureScene = GraphicsBarScene(self)\n\n # Init view windows\n self.displayLineFigure = QtWidgets.QGraphicsView()\n self.displayBarFigure = QtWidgets.QGraphicsView()\n\n self.displayLineFigure.setScene(self.lineFigureScene)\n self.displayBarFigure.setScene(self.barFigureScene)\n\n # Initialize the classification scenes\n self.lineFigures = LineFigures(self)\n self.barFigures = BarFigures(self)\n self.displayLineFigures = QtWidgets.QGraphicsView(self.lineFigures)\n self.displayBarFigures = QtWidgets.QGraphicsView(self.barFigures)\n\n # Set item index method\n self.lineFigures.setItemIndexMethod(QtWidgets.QGraphicsScene.BspTreeIndex)\n self.barFigures.setItemIndexMethod(QtWidgets.QGraphicsScene.BspTreeIndex)\n\n # Define text widgets\n lineText = QtWidgets.QLabel()\n lineText.setFont(font)\n lineText.setText('Line Figures Classification')\n #\n barText = QtWidgets.QLabel()\n barText.setFont(font)\n barText.setText('Bar Figures Classification')\n\n # Add widgets to grid layout\n gridLayout.addWidget(lineText, 1, 0, 1, -1, QtCore.Qt.AlignHCenter)\n gridLayout.addWidget(barText, 3, 0, 1, -1, QtCore.Qt.AlignHCenter)\n gridLayout.addWidget(self.displayLineFigure, 2, 0, QtCore.Qt.AlignLeft)\n gridLayout.addWidget(self.displayBarFigure, 4, 0, QtCore.Qt.AlignLeft)\n gridLayout.addWidget(self.displayLineFigures, 2, 1, 1, -1, QtCore.Qt.AlignLeft)\n gridLayout.addWidget(self.displayBarFigures, 4, 1, 1, -1, QtCore.Qt.AlignLeft)\n\n gridLayout.setHorizontalSpacing(70)\n gridLayout.setVerticalSpacing(15)\n\n self.screenWidth = QtWidgets.QDesktopWidget().width()\n self.screenHeight = QtWidgets.QDesktopWidget().height()\n\n # Create slots to update slider initial position\n self.displayBarFigures.horizontalScrollBar().rangeChanged.connect( self.barFigures.changeSliderPos)\n self.displayLineFigures.horizontalScrollBar().rangeChanged.connect( self.lineFigures.changeSliderPos)\n\n # Overlay loading widget\n self.overlay = Overlay(self)\n self.overlay.hide()", "def createWidgets(self):\n self.tab = WorkspaceTab( self )\n self.tab.setMinimumWidth(500)\n self.tab.setDocumentMode( False )\n self.tab.setMovable( True )\n\n # self.dockToolbar = QToolBar(self)\n # self.dockToolbar.setOrientation(Qt.Vertical)\n\n self.findWidget = FindReplace(self)\n self.findWidget.setDisabled(True)\n self.findWidget.hide()\n\n layout = QVBoxLayout()\n layout.addWidget( self.tab )\n layout.setContentsMargins(0,0,0,0)\n\n \n self.setLayout(layout)", "def init_widget(self):\n super(WxDockPane, self).init_widget()\n d = self.declaration\n self.set_title(d.title)\n self.set_title_bar_visible(d.title_bar_visible)\n self.set_title_bar_orientation(d.title_bar_orientation)\n self.set_closable(d.closable)\n self.set_movable(d.movable)\n self.set_floatable(d.floatable)\n self.set_floating(d.floating)\n self.set_dock_area(d.dock_area)\n self.set_allowed_dock_areas(d.allowed_dock_areas)\n widget = self.widget\n widget.Bind(EVT_DOCK_PANE_FLOATED, self.on_floated)\n widget.Bind(EVT_DOCK_PANE_DOCKED, self.on_docked)\n widget.Bind(EVT_DOCK_PANE_CLOSED, self.on_closed)", "def createWidgets(self):\r\n top = self.winfo_toplevel()\r\n top.rowconfigure(0, weight=1)\r\n top.columnconfigure(0, weight=1)\r\n self.rowconfigure(0, weight=1)\r\n self.columnconfigure(0, weight=1) \r\n\r\n self.button_quit = tk.Button(self, text='Quit', command=self.quit)\r\n self.button_quit.grid(row=0, column=0, sticky=tk.N+tk.S+tk.E+tk.W)", "def inicialUI(self):\r\n\r\n self.setGeometry(500, 500, 500, 500)\r\n self.setWindownTitle(\"Pesquisa\")\r\n self.displayWidgets()\r\n\r\n self.show()", "def _AuiDockingGuide_init(self, *args, **kwargs):\n\n if 'style' in kwargs:\n style = kwargs['style']\n\n # This is the default style, as defined\n # in the AuiDockingGuide constructor\n else:\n style = (wx.FRAME_TOOL_WINDOW |\n wx.FRAME_STAY_ON_TOP |\n wx.FRAME_NO_TASKBAR |\n wx.NO_BORDER)\n\n if fwidgets.inSSHSession():\n style &= ~wx.FRAME_TOOL_WINDOW\n\n kwargs['style'] = style\n\n _AuiDockingGuide_real_init(self, *args, **kwargs)", "def workspaceControl(*args, checksPlugins: bool=True, close: bool=True, closeCommand:\n Script=None, collapse: bool=True, defineTemplate: AnyStr=\"\",\n dockToControl: List[AnyStr, AnyStr]=None, dockToMainWindow: List[AnyStr,\n bool]=None, dockToPanel: List[AnyStr, AnyStr, bool]=None, duplicatable:\n bool=True, exists: bool=True, floating: bool=True, height: bool=True,\n heightProperty: Union[AnyStr, bool]=\"\", horizontal: bool=True,\n initCallback: Union[AnyStr, bool]=\"\", initialHeight: int=0, initialWidth:\n int=0, label: Union[AnyStr, bool]=\"\", loadImmediately: bool=True,\n minimumHeight: Union[int, bool]=0, minimumWidth: Union[int, bool]=0, r:\n bool=True, requiredControl: Union[AnyStr, List[AnyStr]]=\"\",\n requiredPlugin: Union[AnyStr, List[AnyStr]]=\"\", resizeHeight: int=0,\n resizeWidth: int=0, restore: bool=True, retain: bool=True, stateString:\n Union[AnyStr, bool]=\"\", tabPosition: List[AnyStr, bool]=None,\n tabToControl: List[AnyStr, int]=None, uiScript: Script=None, useTemplate:\n AnyStr=\"\", visible: bool=True, visibleChangeCommand: Script=None, width:\n bool=True, widthProperty: Union[AnyStr, bool]=\"\", q=True, query=True,\n e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def open_window(self,size):\n # Window\n self.root = Tk()\n self.root.geometry(size)\n self.root.resizable(0, 0)\n\n\n # Tree\n self.tree = ttk.Treeview(self.root, heigh=20)\n self.tree.grid(row=4, column=0, padx=20)\n self.tree.grid(columnspan=5)\n\n hsb = ttk.Scrollbar(self.root, orient=\"horizontal\")\n hsb.configure(command=self.tree.xview)\n self.tree.configure(xscrollcommand=hsb.set)\n hsb.grid(row=5, column=0, padx=20, pady=20, columnspan=5, sticky=(W + E))", "def __init__(self, parent, id=wx.ID_ANY, title=\"\", pos=wx.DefaultPosition,\r\n size=wx.DefaultSize, style=wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP |\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER, name=\"AuiDockingGuide\"):\r\n\r\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name=name)", "def __init__(self, parent, direction=0):\r\n\r\n self._direction = direction\r\n\r\n style = wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP | \\\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER\r\n\r\n # Use of FRAME_SHAPED on wxMac causes the frame to be visible\r\n # breaking the docking hints.\r\n if wx.Platform != '__WXMAC__':\r\n style |= wx.FRAME_SHAPED\r\n\r\n AuiDockingGuide.__init__(self, parent, style=style, name=\"auiSingleDockTarget\")\r\n \r\n self.Hide()\r\n\r\n useAero = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_AERO_DOCKING_GUIDES\r\n useWhidbey = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_WHIDBEY_DOCKING_GUIDES\r\n \r\n self._useAero = useAero or useWhidbey\r\n self._valid = True\r\n \r\n if useAero:\r\n sizeX, sizeY = aeroguideSizeX, aeroguideSizeY\r\n elif useWhidbey:\r\n sizeX, sizeY = whidbeySizeX, whidbeySizeY\r\n else:\r\n sizeX, sizeY = guideSizeX, guideSizeY\r\n\r\n if direction not in [wx.TOP, wx.BOTTOM]:\r\n sizeX, sizeY = sizeY, sizeX\r\n\r\n if self._useAero:\r\n self.CreateShapesWithStyle(useWhidbey)\r\n \r\n if wx.Platform == \"__WXGTK__\":\r\n self.Bind(wx.EVT_WINDOW_CREATE, self.SetGuideShape)\r\n else:\r\n self.SetGuideShape()\r\n \r\n self.SetSize(self.region.GetBox().GetSize())\r\n else:\r\n self.SetSize((sizeX, sizeY))\r\n \r\n self.rect = wx.Rect(0, 0, sizeX, sizeY)\r\n\r\n if self._useAero:\r\n useAero = (useWhidbey and [2] or [1])[0]\r\n else:\r\n useAero = 0\r\n \r\n self.target = AuiDockingGuideWindow(self, self.rect, direction, False, useAero)", "def dockControl(*args, allowedArea: Union[AnyStr, List[AnyStr], bool]=\"all\", annotation:\n Union[AnyStr, bool]=\"\", area: Union[AnyStr, bool]=\"\", backgroundColor:\n Union[List[float, float, float], bool]=None, closeCommand: Script=None,\n content: Union[AnyStr, bool]=\"\", defineTemplate: AnyStr=\"\", docTag:\n Union[AnyStr, bool]=\"\", dockStation: AnyStr=\"\", dragCallback: Script=None,\n dropCallback: Script=None, enable: bool=True, enableBackground: bool=True,\n enableKeyboardFocus: bool=True, enablePopupOption: bool=True, exists: bool=True,\n fixedHeight: bool=True, fixedWidth: bool=True, floatChangeCommand: Script=None,\n floating: bool=True, fullPathName: bool=True, height: Union[int, bool]=0,\n highlightColor: Union[List[float, float, float], bool]=None, isObscured:\n bool=True, label: Union[AnyStr, bool]=\"\", manage: bool=True, moveable:\n bool=True, noBackground: bool=True, numberOfPopupMenus: bool=True, parent:\n Union[AnyStr, bool]=\"\", popupMenuArray: bool=True, preventOverride: bool=True,\n r: bool=True, retain: bool=True, sizeable: bool=True, splitLayout: AnyStr=\"\",\n state: Union[AnyStr, bool]=\"\", statusBarMessage: AnyStr=\"\", useTemplate:\n AnyStr=\"\", visible: bool=True, visibleChangeCommand: Union[Script, bool]=None,\n width: Union[int, bool]=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def initView(self):\n #Draw the Session View\n self._sessionView = SessionView(self._app)\n leftDockWidget = QtGui.QDockWidget(\"Session\", self)\n leftDockWidget.setAllowedAreas(QtCore.Qt.LeftDockWidgetArea | QtCore.Qt.RightDockWidgetArea)\n leftDockWidget.setWidget(self._sessionView)\n leftDockWidget.setFeatures(QtGui.QDockWidget.DockWidgetMovable | QtGui.QDockWidget.DockWidgetClosable)\n\n #temporary !\n titleBar = QtGui.QWidget()\n leftDockWidget.setTitleBarWidget(titleBar)\n\n self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, leftDockWidget)\n\n #Draw the central widget\n self.mdiArea = QtGui.QMdiArea()\n self.setCentralWidget(self.mdiArea)\n\n #Draw the Player View\n #rightDockWidget = QtGui.QDockWidget(\"Player\", self)\n #rightDockWidget.setAllowedAreas(QtCore.Qt.LeftDockWidgetArea | QtCore.Qt.RightDockWidgetArea)\n ##rightDockWidget.setWidget(self.player)\n #rightDockWidget.setFeatures(QtGui.QDockWidget.DockWidgetMovable | QtGui.QDockWidget.DockWidgetClosable)\n #self.addDockWidget(QtCore.Qt.RightDockWidgetArea, rightDockWidget)", "def __init__(self, master):\n self.ports = list(serial.tools.list_ports.comports())\n self.master = master # initialize Root window\n self.master.title('Zeng.ltd Dashboard') # set title for Root window\n self.master.geometry(\"1000x700\") # set size and location for Root window\n self.master.configure(background=\"lightgrey\")\n self.mainframe = ttk.Notebook(self.master,\n padding=\"0 0 0 0\") # left top right bottem create mainframe in Root winow\n self.mainframe.grid(column=0, row=0, sticky=(N, W, S, E)) # set mainframe to root windows size\n self.master.protocol(\"WM_DELETE_WINDOW\", lambda: self.quit())\n\n menubar = Menu(self.master) # create a menubar\n\n # display the menu\n self.master.config(menu=menubar)\n\n # List of all our views\n self.views = {} # Empty dict\n\n # The back-end process\n self.pyctrl = PyCtrl(self)\n self.pyctrl.start()", "def widgetSetup(self):\n self.master.resizable(0, 0)\n self.master.iconbitmap('logo.ico')\n self.master.title(\"Ejercicio POO\")\n\n self.master.bind(\"<Return>\", lambda e: self.create())\n self.master.bind(\"<Delete>\", lambda e: self.delete())", "def __init__(self, windowId, windowTitle, windowDimensions):\n # check if another copy of the window exists\n if cmds.window(windowId, exists=True):\n cmds.deleteUI(windowId)\n\n # load furtility\n mel.eval(\"ta_loadFurtilityAndMPCSolver()\")\n\n # initialize main window features\n super(nHairToolset, self).__init__(getMayaMainPtr())\n self.setObjectName(windowId)\n self.setWindowTitle(windowTitle)\n self.setAttribute(QtCore.Qt.WA_DeleteOnClose)\n self.setFixedSize(windowDimensions[0], windowDimensions[1])\n self.centerWindow()\n # style setup\n self.style = QtGui.QStyleFactory.create('plastique')\n self.setStyle(self.style)\n\n # add layout and widgets\n centerWidget = QtGui.QWidget(self)\n # setup central widget to hold all elements\n centerWidget.setLayout(QtGui.QVBoxLayout(centerWidget))\n self.setCentralWidget(centerWidget)\n\n # create the tab widget to hold the tabs\n innerTabs = QtGui.QTabWidget()\n innerTabs.setFocusPolicy(QtCore.Qt.NoFocus)\n centerWidget.layout().addWidget(innerTabs)\n\n # first Tab\n mainControlsTab = QtGui.QWidget()\n mainControlsTab.setLayout(QtGui.QHBoxLayout())\n mainControlsTab.layout().setAlignment(QtCore.Qt.AlignLeft)\n mainControlsTab.layout()\n #----------------------------------------------------------------------------------------------------------#\n # first column for the main tab\n mainTabColumn1_Layout = QtGui.QVBoxLayout()\n mainTabColumn1_Layout.setAlignment(QtCore.Qt.AlignTop)\n innerTabs.addTab(mainControlsTab, 'Main Functions')\n\n # rigFix options group box\n rigFxGrpBox = customGroupBox('RigFx Options', (240,160), self.style.objectName(), 0, 0)\n rigFixNameField = self.createLabeledNameField('Name: ', 'enter a rigFx name...')\n buildRigFxBtn = QtGui.QPushButton('Build RigFx')\n updateSetsBtn = QtGui.QPushButton('Update Sets')\n motionMultBtn = QtGui.QPushButton('MotionMultiplier')\n # populate rigFx groupBox\n rigFxGrpBox.layout().addLayout(rigFixNameField)\n rigFxGrpBox.layout().addWidget(buildRigFxBtn)\n rigFxGrpBox.layout().addWidget(updateSetsBtn)\n rigFxGrpBox.layout().addWidget(motionMultBtn)\n rigFxGrpBox.layout().addStretch(True)\n\n # nHair groups options groupBox\n nHairGroupsGrpBox = customGroupBox('nHair Groups', (240,330), self.style.objectName(), 0, 0)\n nHairGroupList = QtGui.QListWidget()\n nHairGroupCreationField = self.createLabeledNameField('Group Name:', 'name your group...')\n nHairGroupCreateBtn = QtGui.QPushButton('Create')\n nHairGroupDeleteBtn = QtGui.QPushButton('Delete')\n nHairGroupControlsBtn = QtGui.QPushButton('Create Controls')\n nHairGroupWindControlsBtn = QtGui.QPushButton('Create Wind Controls')\n # add the nHair group elements to the corresponding groupbox\n nHairGroupsGrpBox.layout().addWidget(nHairGroupList)\n nHairGroupsGrpBox.layout().addLayout(nHairGroupCreationField)\n nHairGroupsGrpBox.layout().addWidget(nHairGroupCreateBtn)\n nHairGroupsGrpBox.layout().addWidget(nHairGroupDeleteBtn)\n nHairGroupsGrpBox.layout().addWidget(nHairGroupControlsBtn)\n nHairGroupsGrpBox.layout().addWidget(nHairGroupWindControlsBtn)\n\n # little nucleus groupboxer\n nucleusGroupBox = customGroupBox('Nucleus', (240, 48), self.style.objectName(), 0, 0, QtGui.QHBoxLayout())\n nucleusGroupBox.layout().setAlignment(QtCore.Qt.AlignCenter)\n nucleusSateLabel = QtGui.QLabel('State: ')\n nucleusOnRadioBtn = QtGui.QRadioButton('On')\n nucleusOffRadioBtn = QtGui.QRadioButton('Off')\n nucleusOnRadioBtn.setChecked(True)\n # add widgets to groupBox\n nucleusGroupBox.layout().addWidget(nucleusSateLabel)\n nucleusGroupBox.layout().addItem(QtGui.QSpacerItem(20,2))\n nucleusGroupBox.layout().addWidget(nucleusOnRadioBtn)\n nucleusGroupBox.layout().addItem(QtGui.QSpacerItem(20,2))\n nucleusGroupBox.layout().addWidget(nucleusOffRadioBtn)\n\n # nHair groupBox\n nHairToolBox = customGroupBox('nHair Tools', [240,200], self.style.objectName(), 0, 0, QtGui.QGridLayout())\n nHairToolBox.layout().setAlignment(QtCore.Qt.AlignTop)\n createHairBtn = customIconButton(':/hairCreate.png',(40,40), 'Create Hair')\n paintHairBtn = customIconButton(':/hairPaint.png',(40,40), 'Paint Hair Tool')\n mkDynCurvesBtn = customIconButton(':/hairDynamicCurves.png',(40,40), 'Make Selected Curves Dynamic')\n interactiveBtn = customIconButton(':/interactivePlayback.png',(40,40), 'Interactive Playback')\n currentPosDispBtn = customIconButton(':/hairDisplayCurrent.png',(40,40), 'Display Current Position')\n startPosDispBtn = customIconButton(':/hairDisplayStart.png',(40,40), 'Display Start Position')\n restPosDispBtn = customIconButton(':/hairDisplayRest.png',(40,40), 'Display Rest Position')\n nCacheCreateBtn = customIconButton(':/nClothCacheCreate.png',(40,40), 'Create nCache')\n nCacheDeleteBtn = customIconButton(':/nClothCacheDelete.png',(40,40), 'Delete nCache')\n passiveColliderBtn = customIconButton(':/nClothCreatePassive.png',(40,40), 'Delete nCache')\n # add them to groupBox\n nHairToolBox.layout().addWidget(createHairBtn, 0,0)\n nHairToolBox.layout().addWidget(paintHairBtn, 0,1)\n nHairToolBox.layout().addWidget(mkDynCurvesBtn, 0,2)\n nHairToolBox.layout().addWidget(interactiveBtn, 0,3)\n nHairToolBox.layout().addWidget(currentPosDispBtn, 0,4)\n nHairToolBox.layout().addWidget(startPosDispBtn, 1,0)\n nHairToolBox.layout().addWidget(restPosDispBtn, 1,1)\n nHairToolBox.layout().addWidget(nCacheCreateBtn, 1,2)\n nHairToolBox.layout().addWidget(nCacheDeleteBtn, 1,3)\n nHairToolBox.layout().addWidget(passiveColliderBtn, 1,4)\n\n # add the first column widgets to the corresponding layout\n mainTabColumn1_Layout.addWidget(rigFxGrpBox)\n mainTabColumn1_Layout.addWidget(nHairGroupsGrpBox)\n mainTabColumn1_Layout.addWidget(nucleusGroupBox)\n mainTabColumn1_Layout.addWidget(nHairToolBox)\n\n #----------------------------------------------------------------------------------------------------------#\n # second column for the main tab\n mainTabColumn2_Layout = QtGui.QVBoxLayout()\n mainTabColumn2_Layout.setAlignment(QtCore.Qt.AlignTop)\n\n # control list set\n controlsGroupBox = customGroupBox('Controls', (220, 160), self.style.objectName())\n controlList = QtGui.QListWidget()\n # add the control list to the groupbox\n controlsGroupBox.layout().addWidget(controlList)\n\n # dynamic node list\n dynamicNodesGroupBox = customGroupBox('Dynamic nodes', (220, 160), self.style.objectName())\n dynamicNodeList = QtGui.QListWidget()\n # add the dynamic node list to the groupbox\n dynamicNodesGroupBox.layout().addWidget(dynamicNodeList)\n\n # collider and constraint nodes list\n colConstGroupBox = customGroupBox('Colliders and Consraints', (220, 160), self.style.objectName())\n colConstNodeList = QtGui.QListWidget()\n # add the colliders and constraints list to the groupbox\n colConstGroupBox.layout().addWidget(colConstNodeList)\n\n ## force nodes list\n #forcesGroupBox = customGroupBox('Forces', (200, 140), self.style.objectName())\n #forcesNodeList = QtGui.QListWidget()\n ## add the force list to the groupbox\n #forcesGroupBox.layout().addWidget(forcesNodeList)\n\n # add the second column widgets to the second column\n mainTabColumn2_Layout.addWidget(controlsGroupBox)\n mainTabColumn2_Layout.addWidget(dynamicNodesGroupBox)\n mainTabColumn2_Layout.addWidget(colConstGroupBox)\n #----------------------------------------------------------------------------------------------------------#\n\n # add the main tab columns to the first tab\n mainControlsTab.layout().addLayout(mainTabColumn1_Layout)\n mainControlsTab.layout().addItem(QtGui.QSpacerItem(20,0))\n mainControlsTab.layout().addLayout(mainTabColumn2_Layout)\n\n # second tab\n secondaryControlsTab = QtGui.QWidget(innerTabs)\n innerTabs.addTab(secondaryControlsTab, 'Secondary')", "def set_dock_area(self, dock_area):\n self.widget.SetDockArea(_DOCK_AREA_MAP[dock_area])", "def Dock(self):\r\n\r\n if self.IsNotebookPage():\r\n self.notebook_id = -1\r\n self.dock_direction = AUI_DOCK_NONE\r\n \r\n return self.SetFlag(self.optionFloating, False)", "def create_widget(self):\n self.widget = wxDockPane(self.parent_widget())", "def NotebookControl(self, id):\r\n\r\n self.notebook_id = id\r\n self.window = None\r\n self.buttons = []\r\n \r\n if self.dock_direction == AUI_DOCK_NOTEBOOK_PAGE:\r\n self.dock_direction = AUI_DOCK_NONE\r\n \r\n return self", "def OnDocked(self, event):\n self._floating = False\n self._dock_area = event.GetPane().dock_direction\n wx.PostEvent(self, wxDockPaneDockedEvent())", "def test_vs_docking():\n vs = virtualscreening(n_cpu=-1)\n vs.load_ligands('sdf', os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'))\n vs.dock(engine='autodock_vina',\n protein=os.path.join(test_data_dir, 'data/dude/xiap/receptor_rdkit.pdb'),\n auto_ligand=os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'),\n exhaustiveness=1,\n seed=0)\n mols = list(vs.fetch())\n assert_equal(len(mols), 3)\n mol_data = mols[0].data\n assert_in('vina_affinity', mol_data)\n assert_in('vina_rmsd_lb', mol_data)\n assert_in('vina_rmsd_ub', mol_data)", "def Center(self):\r\n\r\n self.dock_direction = AUI_DOCK_CENTER\r\n return self", "def onDockClosed(self): # used when Dock dialog is closed\n self.profile_dock = None", "def __init__(self):\n self.stack = QWidget()", "def __init__(self):\n # Root window\n self.root = tk.Tk()\n self.root.title(\"Crossword\")\n # Padding frame\n self.frame = tk.Frame(self.root)\n self.frame.pack(fill=\"both\", padx=PAD, pady=PAD)\n # Initialize widget groups\n self.header = HeaderView(self)\n self.puzzle = PuzzleView(self)\n self.clues = CluesView(self)\n # Show widgets\n self.header.show()\n self.puzzle.show()\n self.clues.show()", "def Layer(self, layer):\r\n \r\n self.dock_layer = layer\r\n return self", "def Center(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTER\r\n return self", "def Centre(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTRE\r\n return self", "def Centre(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTRE\r\n return self", "def __init__(self, controller, target_configuration, title, width, height, force_focus = True):\n\n\t\tConfigurationWindow.__init__(self, controller, target_configuration, title, width, height, force_focus)\n\n\t\tself.controller = controller\n\n\t\t# Display on/off toggle variables\n\t\tself.show_title = BooleanVar()\n\t\tif self.controller.plot_configuration.get_property('show_title') == True: self.show_title.set(True)\n\t\telse: self.show_title.set(False)\n\n\t\t# Chart title\n\t\tself.title_label = Label(self, text = \"Title:\")\n\t\tself.title_label.grid(row = 0, column = 0)\n\n\t\tself.title_checkbox = Checkbutton(self, text = \"Display\", var = self.show_title, onvalue = True, offvalue = False)\n\t\tif self.show_title.get(): self.title_checkbox.select()\n\t\tself.title_checkbox.grid(row = 0, column = 1)\n\n\t\tself.title_field = Entry(self)\n\t\tself.title_field.insert(0, self.controller.plot_configuration.get_property('title'))\n\t\tself.title_field.grid(row = 0, column = 2)\n\n\t\t# Plot width\n\t\tself.plot_width_label = Label(self, text = \"Plot Width (200 - 1000):\")\n\t\tself.plot_width_label.grid(row = 1, column = 0)\n\t\tself.plot_width_field = Entry(self)\n\t\tself.plot_width_field.insert(0, self.controller.plot_configuration.get_property('plot_width'))\n\t\tself.plot_width_field.grid(row = 1, column = 2)\n\n\t\t# Plot height\n\t\tself.plot_height_label = Label(self, text = \"Plot Height (200 - 1000):\")\n\t\tself.plot_height_label.grid(row = 2, column = 0)\n\t\tself.plot_height_field = Entry(self)\n\t\tself.plot_height_field.insert(0, self.controller.plot_configuration.get_property('plot_height'))\n\t\tself.plot_height_field.grid(row = 2, column = 2)\n\n\t\t# Margin top\n\t\tself.margin_top_label = Label(self, text = \"Top Margin (15 - 200):\")\n\t\tself.margin_top_label.grid(row = 3, column = 0)\n\t\tself.margin_top_field = Entry(self)\n\t\tself.margin_top_field.insert(0, self.controller.plot_configuration.get_property('margin_top'))\n\t\tself.margin_top_field.grid(row = 3, column = 2)\n\n\t\t# Margin left and right\n\t\tself.margin_left_label = Label(self, text = \"Left and Right Margins (15 - 200):\")\n\t\tself.margin_left_label.grid(row = 4, column = 0)\n\t\tself.margin_left_field = Entry(self)\n\t\tself.margin_left_field.insert(0, self.controller.plot_configuration.get_property('margin_left'))\n\t\tself.margin_left_field.grid(row = 4, column = 1)\n\t\tself.margin_right_field = Entry(self)\n\t\tself.margin_right_field.insert(0, self.controller.plot_configuration.get_property('margin_right'))\n\t\tself.margin_right_field.grid(row = 4, column = 3)\n\n\t\t# Margin bottom\n\t\tself.margin_bottom_label = Label(self, text = \"Bottom Margin (15 - 200):\")\n\t\tself.margin_bottom_label.grid(row = 5, column = 0)\n\t\tself.margin_bottom_field = Entry(self)\n\t\tself.margin_bottom_field.insert(0, self.controller.plot_configuration.get_property('margin_bottom'))\n\t\tself.margin_bottom_field.grid(row = 5, column = 2)\n\n\t\t# Save and cancel buttons\n\t\tsave_button = Button(self, text = \"Save and Update\", command = self.validate)\n\t\tsave_button.grid(row = 6, column = 1)\n\t\tcancel_button = Button(self, text = \"Cancel\", command = self.destroy)\n\t\tcancel_button.grid(row = 6, column = 2)", "def __init__(self, parent=None):\n super(MainWindow, self).__init__(parent)\n self.setupUi(self)\n \n self.setWindowTitle(\"RPI HMI - pH Debug\") # Title creation", "def Bottom(self):\r\n\r\n self.dock_direction = AUI_DOCK_BOTTOM\r\n return self", "def Bottom(self):\r\n\r\n self.dock_direction = AUI_DOCK_BOTTOM\r\n return self", "def SetDockPos(self, source):\r\n \r\n self.dock_direction = source.dock_direction\r\n self.dock_layer = source.dock_layer\r\n self.dock_row = source.dock_row\r\n self.dock_pos = source.dock_pos\r\n self.dock_proportion = source.dock_proportion\r\n self.floating_pos = wx.Point(*source.floating_pos)\r\n self.floating_size = wx.Size(*source.floating_size)\r\n self.rect = wx.Rect(*source.rect)\r\n \r\n return self", "def onInvoke():\n if dock.isVisible():\n dock.toggleViewAction().trigger()\n else:\n dock.setFloating(True)\n pos = QtGui.QCursor.pos()\n dock.move(pos.x() - dock.size().width() / 2,\n pos.y() - dock.size().height() / 2)\n dock.setVisible(True)", "def createMirrorOfModule_UI(self):\n\n # copy the settings of the module\n self.copySettings()\n\n # get basename and classname\n networkNode = self.returnNetworkNode\n baseName = cmds.getAttr(networkNode + \".baseName\")\n className = cmds.getAttr(networkNode + \".moduleType\")\n\n # launch a UI to get the name information\n self.mirrorWindow = QtWidgets.QMainWindow()\n\n # load stylesheet\n styleSheetFile = utils.returnNicePath(self.toolsPath, \"Core/Scripts/Interfaces/StyleSheets/mainScheme.qss\")\n f = open(styleSheetFile, \"r\")\n style = f.read()\n f.close()\n\n self.mirrorWindow.setStyleSheet(style)\n\n # size policies\n mainSizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n\n # create the main widget\n self.mainWidget = QtWidgets.QWidget()\n self.mirrorWindow.setCentralWidget(self.mainWidget)\n\n # set qt object name\n self.mirrorWindow.setObjectName(\"ART_createMirrorModuleUI\")\n self.mirrorWindow.setWindowTitle(\"Create Mirror Module\")\n\n # create the mainLayout for the rig creator UI\n self.mainLayout = QtWidgets.QVBoxLayout(self.mainWidget)\n self.mainLayout.setContentsMargins(0, 0, 0, 0)\n\n self.mirrorWindow.resize(300, 150)\n self.mirrorWindow.setSizePolicy(mainSizePolicy)\n self.mirrorWindow.setMinimumSize(QtCore.QSize(300, 150))\n self.mirrorWindow.setMaximumSize(QtCore.QSize(300, 150))\n\n # create the background image\n self.frame = QtWidgets.QFrame()\n self.mainLayout.addWidget(self.frame)\n\n # create the layout for the widgets\n self.widgetLayout = QtWidgets.QVBoxLayout(self.frame)\n\n # create the prefix pair of fields\n self.prefixForm = QtWidgets.QFormLayout()\n self.widgetLayout.addLayout(self.prefixForm)\n\n self.prefixLabel = QtWidgets.QLabel(\"Prefix: \")\n self.prefixForm.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.prefixLabel)\n\n self.prefix = QtWidgets.QLineEdit()\n self.prefixForm.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.prefix)\n\n # hookup signal/slot connection\n self.prefix.textChanged.connect(partial(self.updatePreview, baseName))\n\n # create the suffix pair of fields\n self.suffixForm = QtWidgets.QFormLayout()\n self.widgetLayout.addLayout(self.suffixForm)\n\n self.suffixLabel = QtWidgets.QLabel(\"Suffix: \")\n self.suffixForm.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.suffixLabel)\n\n self.suffix = QtWidgets.QLineEdit()\n self.suffixForm.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.suffix)\n\n # hookup signal/slot connection\n self.suffix.textChanged.connect(partial(self.updatePreview, baseName))\n\n # spacer\n spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)\n self.widgetLayout.addItem(spacerItem)\n\n # realtime preview of final module name\n self.previewForm = QtWidgets.QFormLayout()\n self.widgetLayout.addLayout(self.previewForm)\n self.previewLabel = QtWidgets.QLabel(\"Preview: \")\n self.previewName = QtWidgets.QLabel(baseName)\n self.previewName.setMinimumSize(QtCore.QSize(200, 20))\n self.previewName.setMaximumSize(QtCore.QSize(200, 20))\n self.previewName.setAlignment(QtCore.Qt.AlignHCenter)\n self.previewForm.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.previewLabel)\n self.previewForm.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.previewName)\n\n # set preview font\n font = QtGui.QFont()\n font.setPointSize(12)\n self.previewName.setFont(font)\n\n spacerItem1 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)\n self.widgetLayout.addItem(spacerItem1)\n\n # create button\n self.createButton = QtWidgets.QPushButton(\"CREATE\")\n self.createButton.setObjectName(\"blueButton\")\n self.widgetLayout.addWidget(self.createButton)\n self.createButton.setMinimumSize(QtCore.QSize(285, 40))\n self.createButton.setMaximumSize(QtCore.QSize(285, 40))\n self.createButton.setSizePolicy(mainSizePolicy)\n font = QtGui.QFont()\n font.setPointSize(12)\n self.createButton.setFont(font)\n\n # hookup signal/slot on create button\n self.createButton.clicked.connect(self.createMirrorModule)\n\n # show the window\n self.mirrorWindow.show()", "def dock(self):\n if not self.dockingClient.gh or not self.dockingClient.get_state() in (GoalStatus.SUCCEEDED, GoalStatus.PENDING, GoalStatus.ACTIVE):\n self.dockingClient.send_goal(AutoDockingGoal()) #TODO test if parameter is required\n rospy.loginfo(self.name + \": docking\") \n if self.dockingClient.get_state() == GoalStatus.SUCCEEDED: \n self.dockingClient.stop_tracking_goal()\n rospy.loginfo(self.name + \": docking succeeded\")\n self.docked = True \n return True\n return False", "def main():\r\n root = tk.Tk()\r\n app = Home(root)\r\n root.geometry(app.resize())\r\n root.configure(background = jt.color_background)\r\n root.mainloop()", "def gui(self):\n return gui", "def __create_ui(self):\n vbox = gtk.VBox()\n\n # Create the viewable area of the file browser\n self.__view_port = gtk.ScrolledWindow()\n self.__view_port.set_policy(gtk.POLICY_AUTOMATIC,\n gtk.POLICY_AUTOMATIC)\n # Create the tree view and add it to the viewable area\n self.__tree_view = ProjectTreeView()\n self.__project_explorer = ProjectExplorer(self.window, self.__tree_view)\n self.__tree_view.connect('button_press_event',\n self.__on_treeview_button_press_event)\n self.__project_explorer.set_repository()\n self.__view_port.add(self.__tree_view)\n # Create the toolbar\n hbox = gtk.HBox()\n toolbar = gtk.Toolbar()\n toolbar.set_style(gtk.TOOLBAR_ICONS)\n toolbar.set_icon_size(gtk.ICON_SIZE_MENU)\n back = gtk.ToolButton(gtk.STOCK_GO_UP)\n back.connect('clicked', self.__on_back_clicked)\n toolbar.insert(back, 0)\n toolbar.insert(gtk.SeparatorToolItem(), 1)\n refresh = gtk.ToolButton(gtk.STOCK_REFRESH)\n refresh.connect('clicked', self.__on_refresh_clicked)\n toolbar.insert(refresh, 2)\n hbox.pack_start(toolbar, True, True, 0)\n vbox.pack_start(hbox, False, False, 0)\n vbox.pack_start(self.__view_port, True, True, 0)\n\n # Setup the create the buttons for:\n # New File, New Folder\n # ----------------------------------------------------------------------\n hbox1 = gtk.VBox()\n toolbar_actions = gtk.Toolbar()\n toolbar_actions.set_style(gtk.TOOLBAR_ICONS)\n toolbar_actions.set_icon_size(gtk.ICON_SIZE_MENU)\n new_file = gtk.ToolButton(gtk.STOCK_NEW)\n new_file.connect('clicked', self.__on_new_file_clicked_cb)\n toolbar_actions.insert(new_file, 0)\n new_dir = gtk.ToolButton(gtk.STOCK_OPEN) # TODO: use a custom icon\n new_dir.connect('clicked', self.__on_new_dir_clicked_cb)\n toolbar_actions.insert(new_dir, 1)\n hbox1.pack_start(gtk.HSeparator(), True, True, 0)\n hbox1.pack_start(toolbar_actions, True, True, 0)\n vbox.pack_end(hbox1, False, False, 0)\n # ----------------------------------------------------------------------\n vbox.show_all()\n # Attach the project explorer to GMate's side panel\n self.__side_panel = self.window.get_side_panel()\n self.__side_panel.add_tab(vbox, msg0005, gtk.STOCK_HARDDISK)", "def SetDockArea(self, dock_area):\n if self._dock_area != dock_area:\n self._dock_area = dock_area\n def closure(pane):\n pane.Direction(dock_area)\n self._PaneInfoOperation(closure)", "def create_risk_analysis_page(self, notebook):\r\n\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n # Build-up the containers for the tab. #\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n _hpaned = gtk.HPaned()\r\n self.pack1(_hpaned, resize=True, shrink=True)\r\n\r\n # Create the organizational risk pane.\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Organization\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack1(_frame, True, True)\r\n\r\n _labels = [_(u\"1. There are separate design and coding \"\r\n u\"organizations.\"),\r\n _(u\"2. There is an independent software test \"\r\n u\"organization.\"),\r\n _(u\"3. There is an independent software quality \"\r\n u\"assurance organization.\"),\r\n _(u\"4. There is an independent software configuration \"\r\n u\"management organization.\"),\r\n _(u\"5. There is an independent software verification \"\r\n u\"and validation organization.\"),\r\n _(u\"6. A structured programming team will develop the \"\r\n u\"software.\"),\r\n _(u\"7. The educational level of the software team members \"\r\n u\"is above average.\"),\r\n _(u\"8. The experience level of the software team members \"\r\n u\"is above average.\")]\r\n (_x_pos,\r\n _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n _x_pos += 125\r\n\r\n _fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])\r\n\r\n # Create the methods risk pane.\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Methods\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack2(_frame, True, True)\r\n\r\n _labels = [_(u\"1. Standards are defined and will be enforced.\"),\r\n _(u\"2. Software will be developed using a higher order \"\r\n u\"language.\"),\r\n _(u\"3. The development process will include formal \"\r\n u\"reviews (PDR, CDR, etc.).\"),\r\n _(u\"4. The development process will include frequent \"\r\n u\"walkthroughs.\"),\r\n _(u\"5. Development will take a top-down and \"\r\n u\"structured approach.\"),\r\n _(u\"6. Unit development folders will be used.\"),\r\n _(u\"7. A software development library will be used.\"),\r\n _(u\"8. A formal change and error reporting process \"\r\n u\"will be used.\"),\r\n _(u\"9. Progress and status will routinely be \"\r\n u\"reported.\")]\r\n (__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n\r\n _fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])\r\n _fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])\r\n\r\n # Create the documentation risk pane.\r\n _hpaned = gtk.HPaned()\r\n self.pack2(_hpaned, resize=True, shrink=True)\r\n\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Documentation\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack1(_frame, True, True)\r\n\r\n _labels = [_(u\" 1. System requirements specifications will be \"\r\n u\"documented.\"),\r\n _(u\" 2. Software requirements specifications will be \"\r\n u\"documented.\"),\r\n _(u\" 3. Interface design specifications will be \"\r\n u\"documented.\"),\r\n _(u\" 4. Software design specification will be \"\r\n u\"documented.\"),\r\n _(u\" 5. Test plans, procedures, and reports will be \"\r\n u\"documented.\"),\r\n _(u\" 6. The software development plan will be \"\r\n u\"documented.\"),\r\n _(u\" 7. The software quality assurance plan will be \"\r\n u\"documented.\"),\r\n _(u\" 8. The software configuration management plan will \"\r\n u\"be documented.\"),\r\n _(u\" 9. A requirements traceability matrix will be \"\r\n u\"used.\"),\r\n _(u\"10. The software version description will be \"\r\n u\"documented.\"),\r\n _(u\"11. All software discrepancies will be \"\r\n u\"documented.\")]\r\n (__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n\r\n _fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])\r\n _fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])\r\n _fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])\r\n _fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])\r\n\r\n # Create the tools and test techniques risk pane.\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Tools &amp; Test Techniques\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack2(_frame, True, True)\r\n\r\n _labels = [_(u\" 1. The software language requirements will be \"\r\n u\"specified.\"),\r\n _(u\" 2. Formal program design language will be used.\"),\r\n _(u\" 3. Program design graphical techniques \"\r\n u\"(flowcharts, HIPO, etc.) will be used.\"),\r\n _(u\" 4. Simulation/emulation tools will be used.\"),\r\n _(u\" 5. Configuration management tools will be used.\"),\r\n _(u\" 6. A code auditing tool will be used.\"),\r\n _(u\" 7. A data flow analyzer will be used.\"),\r\n _(u\" 8. A programmer's workbench will be used.\"),\r\n _(u\" 9. Measurement tools will be used.\"),\r\n _(u\"10. Software code reviews will be used.\"),\r\n _(u\"11. Software branch testing will be used.\"),\r\n _(u\"12. Random testing will be used.\"),\r\n _(u\"13. Functional testing will be used.\"),\r\n _(u\"14. Error and anomaly detection testing will be \"\r\n u\"used.\"),\r\n _(u\"15. Structure analysis will be used.\")]\r\n (__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n\r\n _fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])\r\n _fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])\r\n _fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])\r\n _fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])\r\n _fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])\r\n _fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])\r\n _fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])\r\n _fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])\r\n\r\n _label = gtk.Label()\r\n _label.set_markup(\"<span weight='bold'>\" +\r\n _(u\"Development\\nEnvironment\") +\r\n \"</span>\")\r\n _label.set_alignment(xalign=0.5, yalign=0.5)\r\n _label.set_justify(gtk.JUSTIFY_CENTER)\r\n _label.set_angle(0)\r\n _label.show_all()\r\n _label.set_tooltip_text(_(u\"Assesses risk due to the development \"\r\n u\"environment.\"))\r\n notebook.insert_page(self, tab_label=_label, position=-1)\r\n\r\n return False", "def __init__(self, simulator, display, control=None, **kwargs):\n super(ZasimMainWindow, self).__init__(**kwargs)\n\n self.setAttribute(Qt.WA_DeleteOnClose)\n\n self.simulator = simulator\n self.display = display\n self.control = control\n\n central_widget = QWidget(self)\n\n if self.control is None:\n self.control = ControlWidget(self.simulator, parent=central_widget)\n\n layout = QVBoxLayout(central_widget)\n\n sim_name = QLabel(str(self.simulator), self)\n # make text selectable and links (if any) clickable\n sim_name.setTextInteractionFlags(Qt.TextBrowserInteraction)\n # there are some nasty long names if base gets bigger than 2.\n sim_name.setWordWrap(True)\n\n layout.addWidget(sim_name)\n\n scroller = QScrollArea()\n scroller.setWidget(self.display)\n\n layout.addWidget(scroller)\n layout.addWidget(self.control)\n self.control.setObjectName(\"control\")\n\n self.setCentralWidget(central_widget)\n\n self.setup_menu()\n\n self.elementary_tool = None\n #self.comp_dlg = None\n self.new_dlg = None\n\n self.resetter = ResetDocklet(self)\n self.addDockWidget(Qt.RightDockWidgetArea, self.resetter)", "def SetDockWidget(self, widget):\n old_widget = self._dock_widget\n if old_widget:\n old_widget.Hide()\n self._dock_widget = widget\n self.GetSizer().Add(widget)\n self.UpdateSizing()", "def init_ui(self):\n self.parent.title(\"Roku Player Controller\")\n self.style.theme_use(\"default\")", "def Show(self, show=True):\r\n \r\n super(AuiDockingHintWindow, self).Show(show)\r\n if wx.Platform == '__WXMAC__':\r\n # Need to manually do layout since its a borderless frame.\r\n self.Layout()", "def __init__(self, master, **kwargs):\n GenericWindow.__init__(self, master, **kwargs)\n self.states = None\n self.master = master\n self.display()", "def _update_proxy(self, change):\n # The superclass implementation is sufficient.\n super(DockArea, self)._update_proxy(change)", "def onWorkbench():\n workbench = Gui.activeWorkbench().__class__.__name__\n\n if layout:\n while not layout.isEmpty():\n item = layout.takeAt(0)\n del item\n\n buttons = cpcmd.workbenchButtons(workbench)\n\n if p.GetString(\"Layout\") == \"Grid\":\n columns = p.GetInt(\"ColumnNumber\", 1) - 1\n x = 0\n y = 0\n for btn in buttons:\n if y > columns:\n y = 0\n x += 1\n layout.addWidget(btn, x, y)\n y += 1\n else:\n for btn in buttons:\n layout.addWidget(btn)", "def createWindow(self):\r\n\t\t# give the window a title\r\n\t\tself.parent.title( 'Acrobat Data Acquisition')\r\n\t\t# set the style\r\n\t\tself.style = ttk.Style()\r\n\t\tself.style.theme_use('default')\r\n\t\tself.pack(fill= tk.BOTH, expand=1)", "def build_workspace_control_ui(shotgun_panel_name):\n\n from maya.OpenMayaUI import MQtUtil\n\n # In the context of this function, we know that we are running in Maya 2017 and later\n # with the newer versions of PySide and shiboken.\n from PySide2 import QtWidgets\n from shiboken2 import wrapInstance\n\n import sgtk.platform\n\n # Retrieve the Maya engine.\n engine = sgtk.platform.current_engine()\n\n # Retrieve the calling Maya workspace control.\n ptr = MQtUtil.getCurrentParent()\n workspace_control = wrapInstance(long(ptr), QtWidgets.QWidget)\n\n # Search for the Shotgun app panel widget.\n for widget in QtWidgets.QApplication.allWidgets():\n if widget.objectName() == shotgun_panel_name:\n\n maya_panel_name = workspace_control.objectName()\n\n engine.logger.debug(\n \"Reparenting Shotgun app panel %s under Maya workspace panel %s.\",\n shotgun_panel_name,\n maya_panel_name,\n )\n\n # When possible, give a minimum width to the workspace control;\n # otherwise, it will use the width of the currently displayed tab.\n # Note that we did not use the workspace control \"initialWidth\" and \"minimumWidth\"\n # to set the minimum width to the initial width since these values are not\n # properly saved by Maya 2017 in its layout preference files.\n # This minimum width behaviour is consistent with Maya standard panels.\n size_hint = widget.sizeHint()\n if size_hint.isValid():\n # Use the widget recommended width as the workspace control minimum width.\n minimum_width = size_hint.width()\n engine.logger.debug(\n \"Setting Maya workspace panel %s minimum width to %s.\",\n maya_panel_name,\n minimum_width,\n )\n workspace_control.setMinimumWidth(minimum_width)\n else:\n # The widget has no recommended size.\n engine.logger.debug(\n \"Cannot set Maya workspace panel %s minimum width.\", maya_panel_name\n )\n\n # Reparent the Shotgun app panel widget under Maya workspace control.\n widget.setParent(workspace_control)\n\n # Add the Shotgun app panel widget to the Maya workspace control layout.\n workspace_control.layout().addWidget(widget)\n\n # Install an event filter on Maya workspace control to monitor\n # its close event in order to reparent the Shotgun app panel widget\n # under Maya main window for later use.\n engine.logger.debug(\n \"Installing a close event filter on Maya workspace panel %s.\",\n maya_panel_name,\n )\n panel_util.install_event_filter_by_widget(\n workspace_control, shotgun_panel_name\n )\n\n # Delete any leftover workspace control state to avoid a spurious deletion\n # of our workspace control when the user switches to another workspace and back.\n if cmds.workspaceControlState(maya_panel_name, exists=True):\n # Once Maya will have completed its UI update and be idle,\n # delete the leftover workspace control state.\n engine.logger.debug(\n \"Deleting leftover Maya workspace control state %s.\",\n maya_panel_name,\n )\n maya.utils.executeDeferred(\n cmds.workspaceControlState, maya_panel_name, remove=True\n )\n\n break\n else:\n # The Shotgun app panel widget was not found and needs to be recreated.\n\n # Search for the Shotgun app panel that needs to be restored\n # among the panels registered with the engine.\n for panel_id in engine.panels:\n\n # The name of the Qt widget at the root of the Shotgun app panel\n # was constructed by prepending to the panel unique identifier.\n if shotgun_panel_name.endswith(panel_id):\n\n # Once Maya will have completed its UI update and be idle,\n # recreate and dock the Shotgun app panel.\n maya.utils.executeDeferred(engine.panels[panel_id][\"callback\"])\n\n break\n else:\n # The Shotgun app panel that needs to be restored is not in the context configuration.\n engine.logger.error(\n \"Cannot restore %s: Shotgun app panel not found. \"\n \"Make sure the app is in the context configuration. \",\n shotgun_panel_name,\n )", "def DefaultPane(self):\r\n \r\n state = self.state \r\n state |= self.optionTopDockable | self.optionBottomDockable | \\\r\n self.optionLeftDockable | self.optionRightDockable | \\\r\n self.optionNotebookDockable | \\\r\n self.optionFloatable | self.optionMovable | self.optionResizable | \\\r\n self.optionCaption | self.optionPaneBorder | self.buttonClose\r\n\r\n self.state = state\r\n \r\n return self", "def __init__(self):\n self.view = GuiView(self)\n return", "def create(self, parent):\n self.widget = _QMainWindow(parent)", "def show(self, target=None, position=0):\n\n # if there is no Qt (eg, our UI framework...) then there is no UI\n if not QT_AVAILABLE:\n return\n\n # the UI has already been created, and is also visible. nothing to do\n if (self.dockable and self.dockable.visible):\n return\n\n #\n # if the UI has not yet been created, or has been previously closed\n # then we are free to create new UI elements to take the place of\n # anything that once was\n\n self.view = HexView(self, self.model)\n new_dockable = DockableWindow(self._title, self.view)\n\n #\n # if there is a reference to a left over dockable window (e.g, from a\n # previous close of this window type) steal its dock positon so we can\n # hopefully take the same place as the old one\n #\n\n if self.dockable:\n new_dockable.copy_dock_position(self.dockable)\n elif (target or position):\n new_dockable.set_dock_position(target, position)\n\n # make the dockable/widget visible\n self.dockable = new_dockable\n self.dockable.show()", "def main():\r\n root = tk.Tk()\r\n root.config(bg='gray40')\r\n root.geometry('800x400')\r\n # OpenUser('Christof', lambda: print('Hallo'), 'german')\r\n inter = User_Interface(root, 'Christof')\r\n inter.UserInterface.place(x=10, y=10)\r\n\r\n root.mainloop()", "def __init__(self, master=None, cnf={}, **kw):\n if kw:\n cnf = _cnfmerge((cnf, kw))\n extra = ()\n for wmkey in ['screen', 'class_', 'class', 'visual',\n 'colormap']:\n if wmkey in cnf:\n val = cnf[wmkey]\n # TBD: a hack needed because some keys\n # are not valid as keyword arguments\n if wmkey[-1] == '_': opt = '-'+wmkey[:-1]\n else: opt = '-'+wmkey\n extra = extra + (opt, val)\n del cnf[wmkey]\n BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)\n root = self._root()\n self.iconname(root.iconname())\n self.title(root.title())\n self.protocol(\"WM_DELETE_WINDOW\", self.destroy)", "def __init__(self, iface=None):\n parent = None if iface is None else iface.mainWindow()\n super(DockWidget, self).__init__(parent)\n # Set up the user interface from Designer.\n # After setupUI you can access any designer object by doing\n # self.<objectname>, and you can use autoconnect slots - see\n # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html\n # #widgets-and-dialogs-with-auto-connect\n\n self.config = QSettings()\n self.readconfig()\n\n self.setupUi(self)\n\n self.iface = iface", "def __init__(self, gui, rank, category, name, *args):\n CompWorkspace.__init__(self, gui, rank, category, 'lhc', name,\n [LHCWorkspace.CooldownView()])\n gui._addJSFragment(\"%s/javascript/Overview/Example.js\" % gui.contentpath)", "def widgets(self):\r\n self.setWindowTitle(\"PyCrypt\")\r\n self.setMinimumSize(QSize(500, 500))\r\n self.setMaximumSize(QSize(500, 500))\r\n# Adding the sub def for widgets etc\r\n self.add_menus_and_status()\r\n self.add_buttons()", "def dock_widget(self):\n d = self.declaration.dock_widget()\n if d is not None:\n return d.proxy.widget", "def auto_flexdock(self, binding_residues, radius, ligand_path=None, force_rerun=False):\n log.debug('\\n{}: running DOCK6...\\n'\n '\\tBinding residues: {}\\n'\n '\\tBinding residues radius: {}\\n'\n '\\tLigand to dock: {}\\n'.format(self.id, binding_residues, radius, op.basename(ligand_path)))\n\n self.dockprep(force_rerun=force_rerun)\n self.protein_only_and_noH(force_rerun=force_rerun)\n self.dms_maker(force_rerun=force_rerun)\n self.sphgen(force_rerun=force_rerun)\n self.binding_site_mol2(residues=binding_residues, force_rerun=force_rerun)\n self.sphere_selector_using_residues(radius=radius, force_rerun=force_rerun)\n self.showbox(force_rerun=force_rerun)\n self.grid(force_rerun=force_rerun)\n\n if ligand_path:\n self.do_dock6_flexible(ligand_path=ligand_path, force_rerun=force_rerun)", "def main():\n main = tk.Tk()\n\n # Add Title\n main.title(\"Space Craft Creator\")\n\n # Disable Resizing\n main.resizable(False, False)\n\n data = Data()\n\n spacecraft = Spacecraft(data)\n\n notebook = Notebook(main)\n\n spacecraft_tab = spacecraft.make_tab(notebook)\n\n notebook.add(spacecraft_tab, text=\"Spacecraft\")\n for key, subsystem in spacecraft.subsections.items():\n notebook.add(subsystem.make_tab(notebook), text=key)\n notebook.add(spacecraft.sizes.make_tab(notebook), text=\"Sizes\")\n notebook.add(spacecraft.velocities.make_tab(notebook), text=\"Velocity Profiles\")\n\n notebook.grid(column=0, row=0)\n notebook.enable_traversal()\n\n button = Button(main, text=\"Caclulate\", command=spacecraft.calculate)\n button.grid(column=0, row=1)\n\n main.mainloop()", "def __init__(self, inWindowTitleStr):\n super(MainWindow, self).__init__()\n self._mainWorkspace = None\n\n self.setWindowTitle(inWindowTitleStr)\n self.setGeometry(500, 100, 700, 900)\n\n self.mainWorkspace = workspace.WorkSpace(parent=self)", "def openCmdWindow(self): \n panel = Toplevel(self.root)\n panel.wm_title('Command Panel')\n\n # create text input entry\n text0 = tki.Label(panel,\n text='This Controller map keyboard inputs to Tello control commands\\n'\n 'Adjust the trackbar to reset distance and degree parameter',\n font='Helvetica 10 bold'\n )\n text0.pack(side='top')\n\n text1 = tki.Label(panel, text=\n 'W - Move Tello Up\\t\\t\\tArrow Up - Move Tello Forward\\n'\n 'S - Move Tello Down\\t\\t\\tArrow Down - Move Tello Backward\\n'\n 'A - Rotate Tello Counter-Clockwise\\tArrow Left - Move Tello Left\\n'\n 'D - Rotate Tello Clockwise\\t\\tArrow Right - Move Tello Right',\n justify='left')\n text1.pack(side='top')\n\n self.btn_landing = tki.Button(\n panel, text='Land', relief='raised', command=self.telloLanding)\n self.btn_landing.pack(side='bottom', fill='both',\n expand='yes', padx=10, pady=5)\n\n self.btn_takeoff = tki.Button(\n panel, text='Takeoff', relief='raised', command=self.telloTakeOff)\n self.btn_takeoff.pack(side='bottom', fill='both',\n expand='yes', padx=10, pady=5)\n\n # binding arrow keys to drone control\n self.tmp_f = tki.Frame(panel, width=100, height=2)\n self.tmp_f.bind('<KeyPress-w>', self.on_keypress_w)\n self.tmp_f.bind('<KeyPress-s>', self.on_keypress_s)\n self.tmp_f.bind('<KeyPress-a>', self.on_keypress_a)\n self.tmp_f.bind('<KeyPress-d>', self.on_keypress_d)\n self.tmp_f.bind('<KeyPress-Up>', self.on_keypress_up)\n self.tmp_f.bind('<KeyPress-Down>', self.on_keypress_down)\n self.tmp_f.bind('<KeyPress-Left>', self.on_keypress_left)\n self.tmp_f.bind('<KeyPress-Right>', self.on_keypress_right)\n self.tmp_f.pack(side='bottom')\n self.tmp_f.focus_set()\n\n self.btn_landing = tki.Button(\n panel, text='Flip', relief='raised', command=self.openFlipWindow)\n self.btn_landing.pack(side='bottom', fill='both',\n expand='yes', padx=10, pady=5)\n\n self.distance_bar = Scale(panel, from_=0.02, to=5, tickinterval=0.01, \n digits=3, label='Distance(m)',\n resolution=0.01)\n self.distance_bar.set(0.2)\n self.distance_bar.pack(side='left')\n\n self.btn_distance = tki.Button(panel, text='Reset Distance', relief='raised',\n command=self.updateDistancebar,\n )\n self.btn_distance.pack(side='left', fill='both',\n expand='yes', padx=10, pady=5)\n\n self.degree_bar = Scale(panel, from_=1, to=360, tickinterval=10, label='Degree')\n self.degree_bar.set(30)\n self.degree_bar.pack(side='right')\n\n self.btn_distance = tki.Button(panel, text='Reset Degree', relief='raised', \n command=self.updateDegreebar)\n self.btn_distance.pack(side='right', fill='both',\n expand='yes', padx=10, pady=5)", "def ui(self):\n return ui", "def __init__(self, master, **kwargs):\n GenericWindow.__init__(self, master, **kwargs)\n self.master = master\n self.all_players_created = False\n self.display()", "def __init__(self, master):\n\t\tFrame.__init__(self,master)\n\t\t\"\"\"Set the Window Title\"\"\"\n\t\tself.master.title(\"RXF Data Fit\")\n\t\tself.configure(height=200,width=200)\n\t\t\"\"\"Display the main window with a little bit of padding\"\"\"\n\t\tself.grid(padx=15, pady=15,sticky=N+S+E+W) \n\t\t#Create the Menu base\n\t\tself.menu = Menu(self)\n\t\t#Add the Menu\n\t\tself.master.config(menu=self.menu)\n\t\tself.menu.add_command(label=\"Open\", command=self.fileOpen)\n\t\tself.menu.add_command(label=\"Help\", command=self.Simple)\n\t\tself.menu.add_command(label=\"Quit\", command=self.exitProgram)\n\t\tself.pack()\n\t\tf = Figure(figsize=(5,4), dpi=100)\n\t\tcanvas=FigureCanvasTkAgg(f,master=root)\n\t\tcanvas.show()\n\t\tcanvas.get_tk_widget().pack(side=\"top\", fill=\"both\", expand=1)\n\t\ttoolbar = NavigationToolbar2TkAgg( canvas, root )\n\t\ttoolbar.update()\n\t\tcanvas._tkcanvas.pack(side=\"top\", fill=\"both\", expand=1)\t\t\n\n\n\t\txRangeLabel=Label(root,text=\"X Range\")\n\t\txRangeLabel.pack()\t\t\n\t\n\t\treplotButton=Button(root, text=\"Replot\", command=self.replot)\n\t\treplotButton.pack()\n\t\n\t\tclearButton=Button(root,text=\"Clear Plot\", command=self.clearPlot)\n\t\tclearButton.pack(padx=20,pady=5)", "def __init__(self, name, object):\n self.object = object\n self.scrolled_window = gtk.ScrolledWindow()\n self.scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n self.model = gtk.ListStore(gobject.TYPE_INT,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING,\n gobject.TYPE_STRING)\n self.model_sort = gtk.TreeModelSort(self.model)\n # self.model_sort.set_sort_column_id(0, gtk.SORT_ASCENDING)\n self.tree_view = gtk.TreeView(self.model_sort)\n self.add_column()\n self.scrolled_window.add(self.tree_view)\n self.row_reordered_signal = self.model_sort.connect(\"rows-reordered\", self.on_reordered)\n self.tree_view.connect('cursor-changed', self.on_cursor_changed)\n\n self.button = gtk.Button(\"X\")\n self.button.set_size_request(22, 15)\n self.button.connect(\"clicked\", self.on_tab_close)\n\n self.hbox = gtk.HBox()\n self.hbox.pack_start(gtk.Label(name), True, True, 0)\n self.hbox.pack_end(self.button, False, False, 0)\n self.hbox.show_all()", "def set_ui(self):\n\n self.setLayout(self.horizon_layout)\n self.setWindowTitle(\"数据采集\")\n self.setWindowIcon(self.Icon)\n self.setWindowState(Qt.WindowMaximized)\n # self.resize(self._size_of_x, self._size_of_y)\n\n # //-set left\n self.horizon_left_layout1.addWidget(self.ECG)\n self.horizon_left_layout1.addWidget(self.ECGWin)\n self.horizon_left_layout2.addWidget(self.Respiration)\n self.horizon_left_layout2.addWidget(self.RespirationWin)\n self.horizon_left_layout3.addWidget(self.PulseWave)\n self.horizon_left_layout3.addWidget(self.PulseWaveWin)\n # self.horizon_left_layout4.addWidget(self.SpO2)\n # self.horizon_left_layout4.addWidget(self.SpO2Win)\n\n # self.vertical_left_layout.addStretch(1)\n self.vertical_left_layout.addLayout(self.horizon_left_layout1)\n # self.vertical_left_layout.addStretch(1)\n self.vertical_left_layout.addLayout(self.horizon_left_layout2)\n # self.vertical_left_layout.addStretch(1)\n self.vertical_left_layout.addLayout(self.horizon_left_layout3)\n # self.vertical_left_layout.addStretch(1)\n # self.vertical_left_layout.addLayout(self.horizon_left_layout4)\n # self.vertical_left_layout.addStretch(1)\n\n # //-set right\n # self.vertical_right_layout.addStretch(1)\n self.vertical_right_layout.addWidget(self.save)\n self.vertical_right_layout.addWidget(self.clear)\n self.vertical_right_layout.addWidget(self.receive)\n self.vertical_right_layout.addStretch(1)\n self.vertical_right_layout.addWidget(self.exit)\n # self.vertical_right_layout.addStretch(1)\n\n # //-set layout\n # self.horizon_layout.addStretch(0)\n self.horizon_layout.addLayout(self.vertical_left_layout)\n # self.horizon_layout.addStretch(0)\n # self.horizon_layout.addWidget(self.dataWin)\n self.horizon_layout.addLayout(self.vertical_right_layout)", "def openRocConfig(self):\n self.rocConfig_Window = QtWidgets.QDialog()\n self.rocConfig_ui = Ui_rocConfigure()\n self.rocConfig_ui.setupUi(self.rocConfig_Window)\n self.rocConfig_Window.show()", "def build_frames(self):\n self.cntrl_frame = tk.PanedWindow(self.root)\n self.cntrl_frame.pack(side = tk.TOP, padx = 1, pady = 1, fill = tk.Y)\n self.info_frame_1 = tk.PanedWindow(self.root)\n self.info_frame_1.pack(side = tk.TOP, padx = 1, pady = 2, fill = tk.Y)", "def __init__(self, parent=None):\n self._window = None\n\n self.setup_ui()", "def createUI(self):\n\n q.getQItem(windowID, QtWidgets.QWidget)\n cmds.setParent(q.fullPath)\n\n # ################################################\n # Active Render Layer\n\n # cmds.separator(height=12, style='none')\n addFrameLayout(\n '%s_frameLayoutLayers' % windowID,\n 'Visible Render Layer', collapsable=False,\n labelVisible=False,\n marginHeight=0\n )\n\n addRowLayout(\n '%s_rowLayoutActiveRenderLayer' % windowID,\n 4,\n columnAlign4=('left', 'left', 'right', 'right'),\n columnAttach4=('left', 'both', 'right', 'right'),\n columnWidth4=(\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.775,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075\n )\n )\n\n\n addButton('%s_addNewLayer' % windowID, 'New', rsAddNewLayer,\n image='RS_create_layer', size=(21, 21))\n addOptionMenu('%s_selectActiveLayer' % windowID,\n 'Active Layer ', (), rsSelectActiveLayer)\n addButton('rsOpenRenderSetupWindow', 'Render Setup',\n rsOpenRenderSetupWindow, image='render_setup.png',\n size=(21, 21))\n addButton('rsOpenUnifiedRenderGlobals', 'Render Globals',\n rsOpenUnifiedRenderGlobals, image='render_setup.png',\n size=(21, 21))\n\n # ################################################\n # Work Render Layers\n\n cmds.setParent(q.fullPath)\n addFrameLayout('%s_frameLayoutLayersB' % windowID,\n 'Work Render Layer', collapsable=False,\n labelVisible=False, marginHeight=0)\n addRowLayout('%s_rowLayoutVisibleRenderLayer' % windowID, 3,\n columnAlign3=('left', 'left', 'right'),\n columnAttach3=('left', 'both', 'right'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.075, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.85,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075))\n\n cmds.separator()\n addOptionMenu('%s_selectVisibleLayer' % windowID,\n 'Visible Layer ', (), rsSelectVisibleLayer)\n cmds.separator()\n\n cmds.setParent(q.fullPath)\n cmds.separator(height=12, style='none')\n\n # ################################################\n # Collections\n\n addFrameLayout('%s_frameLayout02' % windowID, 'Collections',\n labelVisible=False, marginHeight=0)\n\n addRowLayout(\n '%s_rowLayout02' % windowID,\n 6,\n columnAlign6=('left', 'left', 'left', 'left', 'left', 'left'),\n columnAttach6=('both', 'both', 'right', 'right', 'right', 'right'),\n columnWidth6=(\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.18,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.18,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.415,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n )\n )\n\n addButton('rsAddCollection', 'Add', rsAddCollection)\n addButton('rsRemoveCollection', 'Remove', rsRemoveCollection)\n addButton('rsSelectShapes', 'Select Shapes', rsSelectShapes,\n image='selectObject.png', size=(21, 21))\n addButton('rsRenameShader', 'Rename Shader', rsRenameShader,\n size=(21, 21), image='QR_rename.png')\n addButton('rsDuplicateShader', 'Duplicate Shader',\n duplicateShader, size=(21, 21), image='newPreset.png')\n addButton('rsRefreshUI', 'Refresh', rsRefreshUI, size=(21, 21),\n image='QR_refresh.png')\n\n # ###########################\n # Filter List\n\n cmds.setParent('%s_frameLayout02' % windowID)\n addRowLayout('%s_rowLayout03' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'both'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.6, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.42))\n\n addTextField('%s_filterShaderList' % windowID, 'Search',\n rsFilterShaderList_off, rsFilterShaderList_off,\n window.updateUI)\n addOptionMenu('rsShaderGroups', '|', (), rsShaderGroups)\n\n # ###########################\n # The shaders scroll list\n\n cmds.setParent('%s_frameLayout02' % windowID)\n addRowLayout('%s_rowLayout04' % windowID, 1, columnAlign1='both', columnAttach1='both', columnWidth1=WINDOW_WIDTH\n + 12)\n addTextScrollList('%s_ShaderScrollList' % windowID, (),\n rsShaderScrollList_doubleClick,\n rsShaderScrollList_onSelect,\n rsShaderScrollList_deleteKey)\n\n # Add popup menu:\n\n cmds.popupMenu('rsShaderScrollListPopupMenu',\n parent='%s_ShaderScrollList' % windowID,\n allowOptionBoxes=False, markingMenu=True,\n postMenuCommand=postMenuCommand)\n cmds.menuItem('%s_popupMenuItem02' % windowID,\n label='Duplicate Shader', command=duplicateShader)\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem04' % windowID,\n label='Graph Shader')\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem03' % windowID,\n label='Select Shader')\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem05' % windowID,\n label='Select Assigned Shapes')\n cmds.menuItem('%s_popupMenuItem06' % windowID,\n label='Select Assigned Transforms')\n\n # ##################################################\n # Arnold Property Overrides\n\n cmds.setParent('%s_frameLayout02' % windowID)\n cmds.columnLayout(\n '%s_columnLayout20' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n\n cmds.separator(parent='%s_columnLayout20' % windowID, height=4,\n style='none')\n\n addRowLayout('%s_rowLayout05' % windowID, 2,\n columnAlign2=('left', 'both'),\n columnAttach2=('left', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.75, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.25))\n addText('%s_textArnoldPropertyOverridesLabel' % windowID,\n 'Apply Arnold Property Overrides', 'plainLabelFont')\n addCheckBox('rsArnoldPropertyOverridesCheckBox', '',\n rsArnoldPropertyOverridesCheckBox,\n rsArnoldPropertyOverridesCheckBox)\n cmds.separator(parent='%s_columnLayout20' % windowID, height=4,\n style='none')\n\n # Column Layout to toggle\n\n cmds.setParent('%s_columnLayout20' % windowID)\n cmds.columnLayout(\n '%s_columnLayout02' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n\n addCheckboxes('%s_columnLayout02' % windowID)\n cmds.columnLayout('%s_columnLayout02' % windowID, edit=True,\n visible=False)\n\n # #################################################\n # Shader Override\n\n cmds.setParent('%s_frameLayout02' % windowID)\n cmds.columnLayout(\n '%s_columnLayout21' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n cmds.separator(parent='%s_columnLayout21' % windowID, height=4,\n style='none')\n addRowLayout('%s_rowLayout06' % windowID, 2,\n columnAlign2=('left', 'right'),\n columnAttach2=('left', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.75, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.25))\n addText('%s_shaderOverrideLabel' % windowID, 'Shader Override',\n 'plainLabelFont')\n addCheckBox('%s_shaderOverrideCheckbox' % windowID, '',\n rsShaderOverrideCheckbox, rsShaderOverrideCheckbox)\n cmds.separator(parent='%s_columnLayout21' % windowID, height=4,\n style='none')\n\n cmds.setParent('%s_columnLayout21' % windowID)\n cmds.columnLayout(\n '%s_columnLayout03' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('both', 4),\n adjustableColumn=True,\n rowSpacing=0,\n )\n cmds.setParent('%s_columnLayout03' % windowID)\n addOptionMenu('%s_optionMenu02' % windowID, 'Select: ', (),\n rsShaderOverridesMenu)\n\n global selectedShaderOverride\n\n # default selection\n\n selectedShaderOverride = SHADER_OVERRIDE_OPTIONS[0]['ui']\n cmds.columnLayout('%s_columnLayout03' % windowID, edit=True,\n visible=False)\n\n # #################################################\n\n cmds.setParent(q.fullPath)\n cmds.separator(height=10, style='none')\n\n # #################################################\n # Extras\n\n addFrameLayout('%s_frameLayout50' % windowID, 'Extras',\n collapsable=True, marginHeight=0,\n labelVisible=False)\n\n # #################################################\n # Add & Assign Shader Groups\n\n addFrameLayout(\n '%s_frameLayout05' % windowID,\n 'Add & Assign Shader Groups',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=False,\n labelVisible=True,\n )\n\n # Add the renamer window\n\n self.gwCustomRenamer = CustomRenamer()\n self.gwCustomRenamer.createUI()\n\n # #################################################\n # AutoConnect\n\n cmds.setParent('%s_frameLayout50' % windowID)\n\n addFrameLayout(\n '%s_frameLayout03' % windowID,\n 'Adobe Connector',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=True,\n labelVisible=True,\n )\n addRowLayout('%s_rowLayout07', 3, columnAlign3=('left', 'left',\n 'left'), columnAttach3=('both', 'both', 'both'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.4, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3))\n addButton('updateConnections', '> Update Connections <',\n updateConnections)\n addButton('uvSnapshot', 'UV Snapshot', uvSnapshot)\n addButton('editTexture', 'Edit Texture', editTexture)\n\n # After Effects\n\n cmds.setParent('%s_frameLayout03' % windowID)\n addRowLayout('%s_rowLayout11' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'both'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.4, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.6))\n addText('%s_text90' % windowID, 'Send to After Effects:')\n addButton('makeCompButton', 'Send to After Effects', rsMakeComp)\n\n # #################################################\n # Render Setup /\n # Output settings\n\n cmds.setParent('%s_frameLayout50' % windowID)\n addFrameLayout(\n '%s_frameLayout04' % windowID,\n 'Output Settings',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=True,\n labelVisible=True,\n )\n addRowLayout('%s_rowLayout08' % windowID, 1,\n columnAlign1='center', columnAttach1='both',\n columnWidth1=WINDOW_WIDTH - FRAME_MARGIN * 2)\n addButton('%s_revealOutputDirectory' % windowID,\n 'Output path not set yet', rsRevealOutputDirectory)\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout09' % windowID, 3,\n columnAlign3=('left', 'right', 'right'),\n columnAttach3=('left', 'right', 'right'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.8, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.14,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.06))\n\n addOptionMenu('%s_optionMenu05' % windowID, '', (),\n rsSelectOutputTemplate)\n addOptionMenu('%s_outputVersionMenu' % windowID, '', (),\n rsSelectOutputVersion)\n cmds.menuItem(label='v001')\n\n cmds.setParent('%s_rowLayout09' % windowID)\n addButton('%s_incrementOutputVersionButton' % windowID, '+1',\n rsIncrementOutputVersion, size=(21, 21))\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout10' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.7, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3))\n addOptionMenu('%s_optionMenu03' % windowID, 'Format:', (),\n rsOutputTemplatesMenu)\n addOptionMenu('%s_optionMenu06' % windowID, '', (),\n rsSetFPSMenu)\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout12' % windowID, 4,\n columnAlign4=('right', 'left', 'right', 'left'),\n columnAttach4=('both', 'both', 'both', 'both'),\n columnWidth4=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.50, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.15,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.20,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.15))\n\n addText('%s_setInFrameLabel' % windowID, 'In Frame ')\n addTextField('%s_setInFrame' % windowID, '', setInFrame,\n setInFrame, setInFrame)\n\n addText('%s_setOutFrameLabel' % windowID, 'Out Frame ')\n addTextField('%s_setOutFrame' % windowID, '', setOutFrame,\n setOutFrame, setOutFrame)", "def mainWidget(self):\n raise RuntimeError('Not implemented')", "def __init__(self, parent):\r\n Frame.__init__(self, parent) \r\n \r\n self.parent = parent\r\n self.initUI()", "def create(self, parent):\n self.widget = QFrame(parent)", "def do_startup(self):\n \n import json\n\n GLib.set_application_name(\"Deity\")\n Gtk.Application.do_startup(self)\n \n settings = self.get_settings()\n\n menub = Gtk.MenuButton(name=\"input-menu_button\",\n use_popover=True)\n\n headerbar = Gtk.HeaderBar(name=\"input-headerbar\",\n show_close_button=True,\n title=\"Deity\")\n\n main_grid = Gtk.Grid(name=\"input-main_grid\")\n\n statusbar = Gtk.Box(name=\"input-statusbar\",\n orientation=0,\n spacing=2)\n statusbar.pack_start(self.statuslabel, 1, 1, 1)\n\n self.connector.connect(\"query-status\", self.show_output)\n self.connector.connect(\"query-waiting\",\n lambda wid, count: self.statuslabel.set_text(\n f\"Queries on hold : {count}\"))\n self.connector.connect(\"request\", print)\n\n headerbar.pack_end(menub)\n\n main_grid.attach(self.iogrid.get_widget(), 0, 0, 1, 1)\n main_grid.attach(statusbar, 0, 1, 1, 1)\n\n self.output_window.add(self.get_placeholder_image())\n\n self.window.set_titlebar(headerbar)\n self.window.set_default_icon_from_file(\"artwork/Logo.png\")\n self.window.add(main_grid)\n\n self.window.connect(\"key-press-event\", self.parse_keypress)\n self.window.connect(\"delete-event\", self.request_quit)\n \n self.other[\"connector\"] = self.connector\n self.other[\"headerbar\"] = headerbar\n self.other[\"history\"] = self.history\n self.other[\"input-window\"] = self.window\n self.other[\"iogrid\"] = self.iogrid\n self.other[\"plugins\"] = self.get_plugins(settings[\"enabled-plugins\"])\n self.other[\"statusbar\"] = statusbar\n self.other[\"statuslabel\"] = self.statuslabel\n self.other[\"output-notebook\"] = self.notebook\n self.other[\"output-window\"] = self.output_window\n self.other[\"main-grid\"] = main_grid\n self.other[\"menu_button\"] = menub\n \n self.apply_settings(settings)\n self.current_prompt = self.iogrid.add_prompt()\n\n self.window.set_application(self)\n self.output_window.set_application(self)\n\n self.output_window.move(800, 150)\n self.window.move(75, 160)", "def run(self):\n self.root.title(\"Etymology relations\")\n self.root.geometry(\"1080x600\")\n self.root.deiconify()\n self.root.mainloop()", "def draw_window_pane():\n houseturtle.begin_fill()\n for y in range(4):\n houseturtle.pendown()\n houseturtle.forward(35)\n houseturtle.left(90)\n houseturtle.penup()\n houseturtle.end_fill()", "def dock(rec_outpath, reorder_outpath, init='dock_init'):\n init = eval(init)\n receptor = os.path.basename(rec_outpath).split('_')[0]\n dock_dir = os.path.join(init.data_dir, init.dock_folder) \n rec_path = os.path.join(init.data_dir, rec_outpath)\n reorder_path = os.path.join(init.data_dir, reorder_outpath)\n\n dock_name = os.path.basename(rec_path).replace('receptor','dock')\n out_path = os.path.join(dock_dir, receptor, dock_name)\n\n\n\n if not os.path.exists(os.path.dirname(out_path)):\n os.makedirs(os.path.dirname(out_path))\n\n kw = {\n 'receptor': rec_path,\n 'ligand': reorder_path,\n 'autobox_ligand':reorder_path,\n 'out':out_path\n }\n\n cmd = init._make_command(**kw)\n cl = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n cl.wait()\n\n return [[rec_outpath, reorder_outpath, os.path.join(init.dock_folder, receptor, dock_name)]]", "def __init__(self, parent, controller):\n super().__init__(parent, width=\"10m\")\n\n self.parent = parent\n self.controller = controller\n\n self.grid_propagate(0)\n\n self.rowconfigure(0, weight=1)\n self.rowconfigure(1, weight=20)\n self.rowconfigure(2, weight=20)\n self.columnconfigure(0, weight=1)\n self.columnconfigure(1, weight=1)\n\n if (self.controller.game.color_queried and\n self.controller.game.in_turn == 0):\n self.create_title()\n self.create_buttons()", "def TopDockable(self, b=True):\r\n \r\n return self.SetFlag(self.optionTopDockable, b)", "def __init__(self, master):\n self.window = tk.Canvas(master, width=500, height=300)\n self.reset_button = tk.Button(master, text=\"Reset\", command=self.reset_window)\n self.start_button = tk.Button(master, text=\"Start\", command=self.start_sorting)\n self.window.pack()\n self.reset_button.pack()\n self.start_button.pack()\n self.reset_window()", "def showUI(cls):\r\n win = cls()\r\n win.create()\r\n return win", "def layout(self):\n menu = self.menuBar()\n menu.setNativeMenuBar(False)\n\n file_menu = menu.addMenu(\"File\")\n fitting_commands = menu.addMenu(\"Fitting\")\n help_menu = menu.addMenu(\"Help\")\n\n prog_info = QAction(\"About\", self)\n prog_info.triggered.connect(self.version)\n help_menu.addAction(prog_info)\n\n doc_info = QAction(\"Documentation\", self)\n doc_info.triggered.connect(self.docs)\n help_menu.addAction(doc_info)\n\n fit_exp = QAction(\"Fit Experiments\", self)\n fit_exp.setShortcut(\"Ctrl+F\")\n fit_exp.triggered.connect(self.fit_exp)\n fitting_commands.addAction(fit_exp)\n\n add_exp = QAction(\"Add Experiment\", self)\n add_exp.setShortcut(\"Ctrl+Shift+N\")\n add_exp.triggered.connect(self.add_file)\n file_menu.addAction(add_exp)\n\n save_exp = QAction(\"Export Results\", self)\n save_exp.setShortcut(\"Ctrl+S\")\n save_exp.triggered.connect(self.save_file)\n file_menu.addAction(save_exp)\n\n file_menu.addSeparator()\n\n new_exp = QAction(\"New Session\", self)\n new_exp.setShortcut(\"Ctrl+N\")\n new_exp.triggered.connect(self.new_exp)\n file_menu.addAction(new_exp)\n\n close_window = QAction(\"Close Window\", self)\n close_window.setShortcut(\"Ctrl+W\")\n close_window.triggered.connect(self.close_program)\n file_menu.addAction(close_window)\n\n # add shortcut actions to main window, for qt5 bug\n self.addAction(add_exp)\n self.addAction(fit_exp)\n self.addAction(save_exp)\n self.addAction(new_exp)\n self.addAction(close_window)\n self.addAction(doc_info)\n self.addAction(prog_info)\n\n self._exp = Splitter(self)\n self.setCentralWidget(self._exp)\n\n self.resize(1000, 600)\n self.move(QApplication.desktop().screen().rect().center()-self.rect().center())\n self.setWindowTitle('pytc')\n self.show()" ]
[ "0.6622695", "0.5954567", "0.5893806", "0.57845724", "0.57674456", "0.5709266", "0.5709266", "0.5704553", "0.56602496", "0.5624555", "0.5622377", "0.55571485", "0.5540067", "0.5499297", "0.54939884", "0.5486771", "0.54789144", "0.54647344", "0.5457878", "0.5430313", "0.54281336", "0.54207337", "0.54079336", "0.5405601", "0.54030937", "0.539713", "0.5369709", "0.5364974", "0.5356353", "0.5344065", "0.5327576", "0.5321478", "0.53179467", "0.53043985", "0.53008735", "0.5289377", "0.52747035", "0.5271803", "0.5263004", "0.5256001", "0.5248328", "0.5244499", "0.52410275", "0.52410275", "0.5236421", "0.52222115", "0.52187175", "0.52187175", "0.5214655", "0.52133954", "0.5212397", "0.52097964", "0.51973665", "0.51957583", "0.5195021", "0.5161608", "0.51523834", "0.51470244", "0.51463634", "0.5122206", "0.51191735", "0.5109188", "0.5097577", "0.50781935", "0.5074098", "0.5071374", "0.5069202", "0.50654763", "0.5056912", "0.5053178", "0.5045603", "0.50440854", "0.5042633", "0.50417453", "0.5033589", "0.5029622", "0.50271726", "0.5020114", "0.50200933", "0.50128335", "0.5010629", "0.5009045", "0.50043494", "0.500119", "0.4998962", "0.49912325", "0.49866554", "0.49830276", "0.49813178", "0.49786624", "0.49759617", "0.49728456", "0.49659315", "0.49619132", "0.49519458", "0.49510834", "0.4948913", "0.49488747", "0.49470803", "0.4935869", "0.49323857" ]
0.0
-1
Dock for MEM object.
def action_mem(obj: MEM, thread: QtCore.QThread): w_actions = [] crystals = obj.crystals() experiments = obj.experiments() flag_crystals = len(crystals) != 0 flag_experiments = len(experiments) != 0 # for experiment in experiments: # w_actions_temp = action_diffrn(experiment, thread) # w_actions.extend(w_actions_temp) # Action doc if (flag_crystals & flag_experiments & obj.is_defined()): if flag_crystals: crystal = crystals[0] if not(crystal.is_attribute("atom_electron_configuration")): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Create AtomElectronConfiguration") qtb_1.clicked.connect(lambda: crystal.add_items([ AtomElectronConfigurationL()])) qtb_1.clicked.connect(lambda: run_function(pass_func, (), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Create prior density") qtb_1.clicked.connect(lambda: run_function( obj.create_prior_density, (), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Calculate FR") qtb_1.clicked.connect(lambda: run_function(obj.calc_fr, (), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Maximize entropy") qtb_1.clicked.connect(lambda: run_function(obj.maximize_entropy, (), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Chi refinement") qtb_1.clicked.connect(lambda: run_function(obj.refine_susceptibility, (), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Run cycle") qtb_1.clicked.connect(lambda: run_function(obj.make_cycle, (), thread)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Save to '.den' files") qtb_1.clicked.connect(lambda: run_function(obj.save_to_file_den, (), thread)) w_actions.append(qtb_1) if obj.is_attribute("section"): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Plot sections") def func_plot(obj): crystal = obj.crystals()[0] space_group = crystal.space_group f_s_g_s = space_group.full_space_group_symop r_11 = numpy.array(f_s_g_s.r_11, dtype=float) r_12 = numpy.array(f_s_g_s.r_12, dtype=float) r_13 = numpy.array(f_s_g_s.r_13, dtype=float) r_21 = numpy.array(f_s_g_s.r_21, dtype=float) r_22 = numpy.array(f_s_g_s.r_22, dtype=float) r_23 = numpy.array(f_s_g_s.r_23, dtype=float) r_31 = numpy.array(f_s_g_s.r_31, dtype=float) r_32 = numpy.array(f_s_g_s.r_32, dtype=float) r_33 = numpy.array(f_s_g_s.r_33, dtype=float) r_ij = (r_11, r_12, r_13, r_21, r_22, r_23, r_31, r_32, r_33) b_1 = numpy.array(f_s_g_s.b_1, dtype=float) b_2 = numpy.array(f_s_g_s.b_2, dtype=float) b_3 = numpy.array(f_s_g_s.b_3, dtype=float) b_i = (b_1, b_2, b_3) atom_site = crystal.atom_site fract_x = atom_site.numpy_fract_x fract_y = atom_site.numpy_fract_y fract_z = atom_site.numpy_fract_z fract_xyz = (fract_x, fract_y, fract_z) atom_label = atom_site.numpy_label fract_uc_x, fract_uc_y, fract_uc_z, label_uc = \ calc_atoms_in_unit_cell(r_ij, b_i, fract_xyz, atom_label) cell = crystal.cell atom_site_susceptibility = crystal.atom_site_susceptibility section = obj.section[0] density_point = obj.density_point mem_parameters = obj.mem_parameters atom_x, atom_y, atom_label = section.calc_atoms( cell, atom_site, f_s_g_s, distance_min=0.3) den_chi_section, den_b_section = \ calc_section_from_density_point( section, density_point, mem_parameters, cell, f_s_g_s, atom_site, atom_site_susceptibility) fract_atom_xyz = numpy.array(fract_xyz, dtype=float ).transpose() fract_sec_xyz = section.calc_fractions(cell, atom_site) fract_sec_xyz = numpy.transpose(numpy.array(fract_sec_xyz, dtype=float)) n_atom_index, n_symmetry, distance = \ calc_index_atom_symmetry_closest_to_fract_xyz( fract_sec_xyz, fract_atom_xyz, r_ij, b_i, cell) n_at_2d = numpy.transpose(n_atom_index.reshape( section.points_x, section.points_y)) fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(4.2, 4.2), dpi=300) plt.set_cmap('Accent') ax1.imshow(n_at_2d, interpolation='bilinear', extent=(-0.5*section.size_x, 0.5*section.size_x, -0.5*section.size_y, 0.5*section.size_y), alpha=0.1, origin="lower") den_x = numpy.linspace(-0.5*section.size_x, 0.5*section.size_x, section.points_x) den_y = numpy.linspace(-0.5*section.size_y, 0.5*section.size_y, section.points_y) blk = '#000000' ax1.contour(den_x, den_y, den_chi_section.transpose(), levels=[0.1, 0.5, 1., 5., 10., 50.], colors=[blk, blk, blk, blk, blk, blk], linewidths=0.5) ax1.plot(atom_x, atom_y, 'ko', ms=3) for _1, _2, _3 in zip(atom_x, atom_y, atom_label): ax1.text(_1, _2, _3) ax1.set_title( f"Tensor. Max is {den_chi_section.max():.1f}") # plt.set_cmap('RdBu') ax2.imshow(n_at_2d, interpolation='bilinear', extent=(-0.5*section.size_x, 0.5*section.size_x, -0.5*section.size_y, 0.5*section.size_y), alpha=0.1, origin="lower") hh = numpy.abs(den_b_section).max() rd = '#FF0000' ax2.contour(den_x, den_y, den_b_section.transpose(), levels=[-50., -10., -5., -1., -0.5, -0.1, 0.1, 0.5, 1., 5., 10., 50.], colors=[rd, rd, rd, rd, rd, rd, blk, blk, blk, blk, blk, blk], linewidths=0.5) # ax2.imshow(den_b_section, interpolation='bilinear', # extent=(-0.5*section.size_x, 0.5*section.size_x, # -0.5*section.size_y, 0.5*section.size_y), # vmin=-hh, vmax=hh, # alpha=1., origin="lower") ax2.set_title(f"2channel. Max is {hh:.1f}") ax2.plot(atom_x, atom_y, 'ko', ms=3) for _1, _2, _3 in zip(atom_x, atom_y, atom_label): ax2.text(_1, _2, _3) plt.show() return qtb_1.clicked.connect(lambda: func_plot(obj)) w_actions.append(qtb_1) elif not(flag_crystals & flag_experiments): if not flag_crystals: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add crystal") qtb_1.clicked.connect(lambda: add_items(obj, [ Crystal(data_name="phase")], thread)) w_actions.append(qtb_1) if not flag_experiments: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add diffrn") qtb_1.clicked.connect(lambda: add_items(obj, [ Diffrn(data_name="mono")], thread)) w_actions.append(qtb_1) else: qlabel = QtWidgets.QLabel( "To run calculations all items should be defined.") qlabel.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) w_actions.append(qlabel) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createDockArea(self):\n self.centralDock = CentralDockArea(self.globalSession)\n self.setCentralWidget(self.centralDock)", "def __init__(self):\r\n\r\n object.__init__(self)\r\n \r\n self.dock_direction = 0\r\n self.dock_layer = 0\r\n self.dock_row = 0\r\n self.size = 0\r\n self.min_size = 0\r\n self.resizable = True\r\n self.fixed = False\r\n self.toolbar = False\r\n self.rect = wx.Rect()\r\n self.panes = []", "def GetDock(self):\n return self.dock", "def create(self, verbose=False):\r\n # delete the window if its handle exists\r\n if cmds.window(self.window, exists=True):\r\n cmds.deleteUI(self.window)\r\n # initialize the window as a pane for docking\r\n self.window = cmds.loadUI(uiFile=self.uiFile, verbose=verbose)\r\n #layoutWin = cmds.paneLayout(configuration='single')\r\n # create a dockControl and parent the control to layoutWin\r\n cmds.dockControl(allowedArea='all', area='right', floating=False, \r\n height=cmds.window(self.window, query=True, height=True), \r\n content=self.window, label='Docked Cone Pointer Window')\r\n cmds.showWindow(self.window)", "def create_dockable(self, dockable_name, widget):\n pass", "def test_vs_docking():\n vs = virtualscreening(n_cpu=-1)\n vs.load_ligands('sdf', os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'))\n vs.dock(engine='autodock_vina',\n protein=os.path.join(test_data_dir, 'data/dude/xiap/receptor_rdkit.pdb'),\n auto_ligand=os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'),\n exhaustiveness=1,\n seed=0)\n mols = list(vs.fetch())\n assert_equal(len(mols), 3)\n mol_data = mols[0].data\n assert_in('vina_affinity', mol_data)\n assert_in('vina_rmsd_lb', mol_data)\n assert_in('vina_rmsd_ub', mol_data)", "def LayoutAddDock(self, cont, dock, uiparts, spacer_only):\r\n \r\n sizer_item = wx.SizerItem()\r\n part = AuiDockUIPart()\r\n\r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n orientation = (dock.IsHorizontal() and [wx.HORIZONTAL] or [wx.VERTICAL])[0]\r\n\r\n # resizable bottom and right docks have a sash before them\r\n if not self._has_maximized and not dock.fixed and \\\r\n dock.dock_direction in [AUI_DOCK_BOTTOM, AUI_DOCK_RIGHT]:\r\n \r\n sizer_item = cont.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n\r\n part.type = AuiDockUIPart.typeDockSizer\r\n part.orientation = orientation\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n # create the sizer for the dock\r\n dock_sizer = wx.BoxSizer(orientation)\r\n\r\n # add each pane to the dock\r\n has_maximized_pane = False\r\n pane_count = len(dock.panes)\r\n\r\n if dock.fixed:\r\n \r\n # figure out the real pane positions we will\r\n # use, without modifying the each pane's pane_pos member\r\n pane_positions, pane_sizes = self.GetPanePositionsAndSizes(dock)\r\n\r\n offset = 0\r\n for pane_i in xrange(pane_count):\r\n \r\n pane = dock.panes[pane_i]\r\n pane_pos = pane_positions[pane_i]\r\n\r\n if pane.IsMaximized():\r\n has_maximized_pane = True\r\n\r\n amount = pane_pos - offset\r\n if amount > 0:\r\n \r\n if dock.IsVertical():\r\n sizer_item = dock_sizer.Add((1, amount), 0, wx.EXPAND)\r\n else:\r\n sizer_item = dock_sizer.Add((amount, 1), 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = (orientation==wx.HORIZONTAL and \\\r\n [wx.VERTICAL] or [wx.HORIZONTAL])[0]\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n offset = offset + amount\r\n \r\n uiparts = self.LayoutAddPane(dock_sizer, dock, pane, uiparts, spacer_only)\r\n\r\n offset = offset + pane_sizes[pane_i]\r\n \r\n # at the end add a very small stretchable background area\r\n sizer_item = dock_sizer.Add((0, 0), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n else:\r\n \r\n for pane_i in xrange(pane_count):\r\n \r\n pane = dock.panes[pane_i]\r\n\r\n if pane.IsMaximized():\r\n has_maximized_pane = True\r\n\r\n # if this is not the first pane being added,\r\n # we need to add a pane sizer\r\n if not self._has_maximized and pane_i > 0:\r\n sizer_item = dock_sizer.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typePaneSizer\r\n part.dock = dock\r\n part.pane = dock.panes[pane_i-1]\r\n part.button = None\r\n part.orientation = (orientation==wx.HORIZONTAL and \\\r\n [wx.VERTICAL] or [wx.HORIZONTAL])[0]\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n uiparts = self.LayoutAddPane(dock_sizer, dock, pane, uiparts, spacer_only)\r\n \r\n if dock.dock_direction == AUI_DOCK_CENTER or has_maximized_pane:\r\n sizer_item = cont.Add(dock_sizer, 1, wx.EXPAND)\r\n else:\r\n sizer_item = cont.Add(dock_sizer, 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeDock\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if dock.IsHorizontal():\r\n cont.SetItemMinSize(dock_sizer, (0, dock.size))\r\n else:\r\n cont.SetItemMinSize(dock_sizer, (dock.size, 0))\r\n\r\n # top and left docks have a sash after them\r\n if not self._has_maximized and not dock.fixed and \\\r\n dock.dock_direction in [AUI_DOCK_TOP, AUI_DOCK_LEFT]:\r\n \r\n sizer_item = cont.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeDockSizer\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n return uiparts", "def Dock(self):\r\n\r\n if self.IsNotebookPage():\r\n self.notebook_id = -1\r\n self.dock_direction = AUI_DOCK_NONE\r\n \r\n return self.SetFlag(self.optionFloating, False)", "def SetDockPos(self, source):\r\n \r\n self.dock_direction = source.dock_direction\r\n self.dock_layer = source.dock_layer\r\n self.dock_row = source.dock_row\r\n self.dock_pos = source.dock_pos\r\n self.dock_proportion = source.dock_proportion\r\n self.floating_pos = wx.Point(*source.floating_pos)\r\n self.floating_size = wx.Size(*source.floating_size)\r\n self.rect = wx.Rect(*source.rect)\r\n \r\n return self", "def CalculateDockSizerLimits(self, dock):\r\n\r\n docks, panes = CopyDocksAndPanes2(self._docks, self._panes)\r\n\r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n opposite_size = self.GetOppositeDockTotalSize(docks, dock.dock_direction)\r\n\r\n for tmpDock in docks:\r\n \r\n if tmpDock.dock_direction == dock.dock_direction and \\\r\n tmpDock.dock_layer == dock.dock_layer and \\\r\n tmpDock.dock_row == dock.dock_row:\r\n \r\n tmpDock.size = 1\r\n break\r\n \r\n sizer, panes, docks, uiparts = self.LayoutAll(panes, docks, [], True, False)\r\n client_size = self._frame.GetClientSize()\r\n sizer.SetDimension(0, 0, client_size.x, client_size.y)\r\n sizer.Layout()\r\n\r\n for part in uiparts:\r\n \r\n part.rect = wx.RectPS(part.sizer_item.GetPosition(), part.sizer_item.GetSize())\r\n if part.type == AuiDockUIPart.typeDock:\r\n part.dock.rect = part.rect\r\n \r\n sizer.Destroy()\r\n new_dock = None\r\n\r\n for tmpDock in docks:\r\n if tmpDock.dock_direction == dock.dock_direction and \\\r\n tmpDock.dock_layer == dock.dock_layer and \\\r\n tmpDock.dock_row == dock.dock_row:\r\n \r\n new_dock = tmpDock\r\n break\r\n \r\n partnerDock = self.GetPartnerDock(dock)\r\n\r\n if partnerDock:\r\n partnerRange = partnerDock.size - partnerDock.min_size\r\n if partnerDock.min_size == 0:\r\n partnerRange -= sash_size\r\n if dock.IsHorizontal():\r\n partnerRange -= caption_size\r\n \r\n direction = dock.dock_direction\r\n \r\n if direction == AUI_DOCK_LEFT:\r\n minPix = new_dock.rect.x + new_dock.rect.width\r\n maxPix = dock.rect.x + dock.rect.width\r\n maxPix += partnerRange\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n minPix = new_dock.rect.y + new_dock.rect.height\r\n maxPix = dock.rect.y + dock.rect.height\r\n maxPix += partnerRange\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n minPix = dock.rect.x - partnerRange - sash_size\r\n maxPix = new_dock.rect.x - sash_size\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n minPix = dock.rect.y - partnerRange - sash_size\r\n maxPix = new_dock.rect.y - sash_size\r\n\r\n return minPix, maxPix\r\n \r\n direction = new_dock.dock_direction\r\n \r\n if direction == AUI_DOCK_LEFT:\r\n minPix = new_dock.rect.x + new_dock.rect.width\r\n maxPix = client_size.x - opposite_size - sash_size\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n minPix = new_dock.rect.y + new_dock.rect.height\r\n maxPix = client_size.y - opposite_size - sash_size\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n minPix = opposite_size\r\n maxPix = new_dock.rect.x - sash_size\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n minPix = opposite_size\r\n maxPix = new_dock.rect.y - sash_size\r\n\r\n return minPix, maxPix", "def init_layout(self):\n super(WxDockPane, self).init_layout()\n self.widget.SetDockWidget(self.dock_widget())", "def dockControl(*args, allowedArea: Union[AnyStr, List[AnyStr], bool]=\"all\", annotation:\n Union[AnyStr, bool]=\"\", area: Union[AnyStr, bool]=\"\", backgroundColor:\n Union[List[float, float, float], bool]=None, closeCommand: Script=None,\n content: Union[AnyStr, bool]=\"\", defineTemplate: AnyStr=\"\", docTag:\n Union[AnyStr, bool]=\"\", dockStation: AnyStr=\"\", dragCallback: Script=None,\n dropCallback: Script=None, enable: bool=True, enableBackground: bool=True,\n enableKeyboardFocus: bool=True, enablePopupOption: bool=True, exists: bool=True,\n fixedHeight: bool=True, fixedWidth: bool=True, floatChangeCommand: Script=None,\n floating: bool=True, fullPathName: bool=True, height: Union[int, bool]=0,\n highlightColor: Union[List[float, float, float], bool]=None, isObscured:\n bool=True, label: Union[AnyStr, bool]=\"\", manage: bool=True, moveable:\n bool=True, noBackground: bool=True, numberOfPopupMenus: bool=True, parent:\n Union[AnyStr, bool]=\"\", popupMenuArray: bool=True, preventOverride: bool=True,\n r: bool=True, retain: bool=True, sizeable: bool=True, splitLayout: AnyStr=\"\",\n state: Union[AnyStr, bool]=\"\", statusBarMessage: AnyStr=\"\", useTemplate:\n AnyStr=\"\", visible: bool=True, visibleChangeCommand: Union[Script, bool]=None,\n width: Union[int, bool]=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def _placeDock(self, dock, pos=None, otherDock=None):\n if otherDock is not None and pos is not None:\n self.area.addDock(dock,pos,otherDock)\n elif pos is not None:\n self.area.addDock(dock,pos,otherDock)\n else:\n self.area.addDock(dock)\n return dock", "def __init__(self, size, class_to_use, master, row, column, report=None):\n try:\n master.master.geometry(size)\n except AttributeError:\n pass\n self.window = class_to_use(master=master, borderwidth=0, relief=tk.GROOVE)\n self.window.grid(row=row, column=column, padx=10, pady=20)", "def OnDocked(self, event):\n self._floating = False\n self._dock_area = event.GetPane().dock_direction\n wx.PostEvent(self, wxDockPaneDockedEvent())", "def __init__(self, other=None):\r\n\r\n if other:\r\n self.Assign(other)\r\n else:\r\n # window representing the docking target\r\n self.host = None\r\n # dock direction (top, bottom, left, right, center)\r\n self.dock_direction = AUI_DOCK_NONE", "def initDocks(self):\n # Define the grid of widgets\n gridLayout = QtWidgets.QGridLayout()\n gridLayout.setOriginCorner(QtCore.Qt.TopLeftCorner)\n\n # Set QWidget object as main window in order to develop the appropriate functions\n widget = QtWidgets.QWidget(self)\n widget.setLayout(gridLayout)\n self.setCentralWidget(widget)\n\n # Set the text font\n font = QtGui.QFont()\n font.setPointSize(14)\n font.setBold(True)\n\n # Add figure widget scenes\n self.lineFigureScene = GraphicsLineScene(self)\n self.barFigureScene = GraphicsBarScene(self)\n\n # Init view windows\n self.displayLineFigure = QtWidgets.QGraphicsView()\n self.displayBarFigure = QtWidgets.QGraphicsView()\n\n self.displayLineFigure.setScene(self.lineFigureScene)\n self.displayBarFigure.setScene(self.barFigureScene)\n\n # Initialize the classification scenes\n self.lineFigures = LineFigures(self)\n self.barFigures = BarFigures(self)\n self.displayLineFigures = QtWidgets.QGraphicsView(self.lineFigures)\n self.displayBarFigures = QtWidgets.QGraphicsView(self.barFigures)\n\n # Set item index method\n self.lineFigures.setItemIndexMethod(QtWidgets.QGraphicsScene.BspTreeIndex)\n self.barFigures.setItemIndexMethod(QtWidgets.QGraphicsScene.BspTreeIndex)\n\n # Define text widgets\n lineText = QtWidgets.QLabel()\n lineText.setFont(font)\n lineText.setText('Line Figures Classification')\n #\n barText = QtWidgets.QLabel()\n barText.setFont(font)\n barText.setText('Bar Figures Classification')\n\n # Add widgets to grid layout\n gridLayout.addWidget(lineText, 1, 0, 1, -1, QtCore.Qt.AlignHCenter)\n gridLayout.addWidget(barText, 3, 0, 1, -1, QtCore.Qt.AlignHCenter)\n gridLayout.addWidget(self.displayLineFigure, 2, 0, QtCore.Qt.AlignLeft)\n gridLayout.addWidget(self.displayBarFigure, 4, 0, QtCore.Qt.AlignLeft)\n gridLayout.addWidget(self.displayLineFigures, 2, 1, 1, -1, QtCore.Qt.AlignLeft)\n gridLayout.addWidget(self.displayBarFigures, 4, 1, 1, -1, QtCore.Qt.AlignLeft)\n\n gridLayout.setHorizontalSpacing(70)\n gridLayout.setVerticalSpacing(15)\n\n self.screenWidth = QtWidgets.QDesktopWidget().width()\n self.screenHeight = QtWidgets.QDesktopWidget().height()\n\n # Create slots to update slider initial position\n self.displayBarFigures.horizontalScrollBar().rangeChanged.connect( self.barFigures.changeSliderPos)\n self.displayLineFigures.horizontalScrollBar().rangeChanged.connect( self.lineFigures.changeSliderPos)\n\n # Overlay loading widget\n self.overlay = Overlay(self)\n self.overlay.hide()", "def __init__(self):\n self.stack = QWidget()", "def Position(self, pos):\r\n\r\n self.dock_pos = pos\r\n return self", "def UpdateDockGuide(self, pos):\r\n\r\n self.target.UpdateDockGuide(pos)", "def __init__(self, parent):\r\n\r\n AuiDockingGuide.__init__(self, parent, style=wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP |\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER | wx.FRAME_SHAPED,\r\n name=\"auiCenterDockTarget\")\r\n\r\n self.Hide()\r\n\r\n self.CreateShapesWithStyle()\r\n self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)\r\n \r\n if wx.Platform == \"__WXGTK__\":\r\n self.Bind(wx.EVT_WINDOW_CREATE, self.SetGuideShape)\r\n else:\r\n self.SetGuideShape()\r\n \r\n self.SetSize(self.region.GetBox().GetSize())\r\n\r\n self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)\r\n self.Bind(wx.EVT_PAINT, self.OnPaint)", "def __init__(self, master):\n self.ports = list(serial.tools.list_ports.comports())\n self.master = master # initialize Root window\n self.master.title('Zeng.ltd Dashboard') # set title for Root window\n self.master.geometry(\"1000x700\") # set size and location for Root window\n self.master.configure(background=\"lightgrey\")\n self.mainframe = ttk.Notebook(self.master,\n padding=\"0 0 0 0\") # left top right bottem create mainframe in Root winow\n self.mainframe.grid(column=0, row=0, sticky=(N, W, S, E)) # set mainframe to root windows size\n self.master.protocol(\"WM_DELETE_WINDOW\", lambda: self.quit())\n\n menubar = Menu(self.master) # create a menubar\n\n # display the menu\n self.master.config(menu=menubar)\n\n # List of all our views\n self.views = {} # Empty dict\n\n # The back-end process\n self.pyctrl = PyCtrl(self)\n self.pyctrl.start()", "def GetDockArea(self):\n return self._dock_area", "def BalloonInstanceMemory(self, instance, mem):\n # Currently chroots don't have memory limits\n pass", "def open(self):\n super(Nodzgraph, self).open(dockable=self.configuration.maya.docked,\n area=self.configuration.maya.dock_area,\n allowedArea=self.configuration.maya.allowed_dock_areas,\n floating=self.configuration.maya.floating,\n width=self.configuration.maya.width,\n height=self.configuration.maya.height\n )", "def LayoutAll(self, panes, docks, uiparts, spacer_only=False, oncheck=True):\r\n \r\n container = wx.BoxSizer(wx.VERTICAL)\r\n\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n cli_size = self._frame.GetClientSize()\r\n \r\n # empty all docks out\r\n for dock in docks:\r\n dock.panes = []\r\n if dock.fixed:\r\n # always reset fixed docks' sizes, because\r\n # the contained windows may have been resized\r\n dock.size = 0\r\n \r\n dock_count = len(docks)\r\n \r\n # iterate through all known panes, filing each\r\n # of them into the appropriate dock. If the\r\n # pane does not exist in the dock, add it\r\n for p in panes:\r\n\r\n # don't layout hidden panes.\r\n if p.IsShown():\r\n \r\n # find any docks with the same dock direction, dock layer, and\r\n # dock row as the pane we are working on\r\n arr = FindDocks(docks, p.dock_direction, p.dock_layer, p.dock_row)\r\n\r\n if arr:\r\n dock = arr[0]\r\n\r\n else:\r\n # dock was not found, so we need to create a new one\r\n d = AuiDockInfo()\r\n d.dock_direction = p.dock_direction\r\n d.dock_layer = p.dock_layer\r\n d.dock_row = p.dock_row\r\n docks.append(d)\r\n dock = docks[-1]\r\n\r\n if p.HasFlag(p.needsRestore) and not p.HasFlag(p.wasMaximized):\r\n \r\n isHor = dock.IsHorizontal()\r\n sashSize = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n\r\n # get the sizes of any docks that might \r\n # overlap with our restored dock\r\n\r\n # make list of widths or heights from the size in the dock rects\r\n sizes = [d.rect[2:][isHor] for \\\r\n d in docks if d.IsOk() and \\\r\n (d.IsHorizontal() == isHor) and \\\r\n not d.toolbar and \\\r\n d.dock_direction != AUI_DOCK_CENTER]\r\n \r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n\r\n # set max size allowing for sashes and absolute minimum\r\n maxsize = frameRect[2:][isHor] - sum(sizes) - (len(sizes)*10) - (sashSize*len(sizes))\r\n dock.size = min(p.previousDockSize,maxsize)\r\n\r\n else:\r\n dock.size = 0\r\n\r\n if p.HasFlag(p.wasMaximized):\r\n self.MaximizePane(p, savesizes=False)\r\n p.SetFlag(p.wasMaximized, False)\r\n\r\n if p.HasFlag(p.needsRestore):\r\n if p.previousDockPos is not None:\r\n DoInsertPane(dock.panes, dock.dock_direction, dock.dock_layer, dock.dock_row, p.previousDockPos)\r\n p.dock_pos = p.previousDockPos\r\n p.previousDockPos = None\r\n p.SetFlag(p.needsRestore, False)\r\n\r\n if p.IsDocked():\r\n # remove the pane from any existing docks except this one\r\n docks = RemovePaneFromDocks(docks, p, dock)\r\n\r\n # pane needs to be added to the dock,\r\n # if it doesn't already exist \r\n if not FindPaneInDock(dock, p.window):\r\n dock.panes.append(p)\r\n else:\r\n # remove the pane from any existing docks\r\n docks = RemovePaneFromDocks(docks, p)\r\n \r\n # remove any empty docks\r\n docks = [dock for dock in docks if dock.panes]\r\n\r\n dock_count = len(docks)\r\n # configure the docks further\r\n for ii, dock in enumerate(docks):\r\n # sort the dock pane array by the pane's\r\n # dock position (dock_pos), in ascending order\r\n dock.panes.sort(PaneSortFunc)\r\n dock_pane_count = len(dock.panes)\r\n \r\n # for newly created docks, set up their initial size\r\n if dock.size == 0:\r\n size = 0\r\n for pane in dock.panes:\r\n pane_size = pane.best_size\r\n if pane_size == wx.Size(-1, -1):\r\n pane_size = pane.min_size\r\n if pane_size == wx.Size(-1, -1) and pane.window:\r\n pane_size = pane.window.GetSize()\r\n if dock.IsHorizontal():\r\n size = max(pane_size.y, size)\r\n else:\r\n size = max(pane_size.x, size)\r\n \r\n # add space for the border (two times), but only\r\n # if at least one pane inside the dock has a pane border\r\n for pane in dock.panes:\r\n if pane.HasBorder():\r\n size = size + pane_border_size*2\r\n break\r\n \r\n # if pane is on the top or bottom, add the caption height,\r\n # but only if at least one pane inside the dock has a caption\r\n if dock.IsHorizontal():\r\n for pane in dock.panes:\r\n if pane.HasCaption() and not pane.HasCaptionLeft():\r\n size = size + caption_size\r\n break\r\n else:\r\n for pane in dock.panes:\r\n if pane.HasCaptionLeft() and not pane.HasCaption():\r\n size = size + caption_size\r\n break\r\n \r\n # new dock's size may not be more than the dock constraint\r\n # parameter specifies. See SetDockSizeConstraint()\r\n max_dock_x_size = int(self._dock_constraint_x*float(cli_size.x))\r\n max_dock_y_size = int(self._dock_constraint_y*float(cli_size.y))\r\n if cli_size <= wx.Size(20, 20):\r\n max_dock_x_size = 10000\r\n max_dock_y_size = 10000\r\n\r\n if dock.IsHorizontal():\r\n size = min(size, max_dock_y_size)\r\n else:\r\n size = min(size, max_dock_x_size)\r\n\r\n # absolute minimum size for a dock is 10 pixels\r\n if size < 10:\r\n size = 10\r\n\r\n dock.size = size\r\n\r\n # determine the dock's minimum size\r\n plus_border = False\r\n plus_caption = False\r\n plus_caption_left = False\r\n dock_min_size = 0\r\n for pane in dock.panes:\r\n if pane.min_size != wx.Size(-1, -1):\r\n if pane.HasBorder():\r\n plus_border = True\r\n if pane.HasCaption():\r\n plus_caption = True\r\n if pane.HasCaptionLeft():\r\n plus_caption_left = True\r\n if dock.IsHorizontal():\r\n if pane.min_size.y > dock_min_size:\r\n dock_min_size = pane.min_size.y\r\n else:\r\n if pane.min_size.x > dock_min_size:\r\n dock_min_size = pane.min_size.x\r\n \r\n if plus_border:\r\n dock_min_size += pane_border_size*2\r\n if plus_caption and dock.IsHorizontal():\r\n dock_min_size += caption_size\r\n if plus_caption_left and dock.IsVertical():\r\n dock_min_size += caption_size\r\n \r\n dock.min_size = dock_min_size\r\n \r\n # if the pane's current size is less than it's\r\n # minimum, increase the dock's size to it's minimum\r\n if dock.size < dock.min_size:\r\n dock.size = dock.min_size\r\n\r\n # determine the dock's mode (fixed or proportional)\r\n # determine whether the dock has only toolbars\r\n action_pane_marked = False\r\n dock.fixed = True\r\n dock.toolbar = True\r\n for pane in dock.panes:\r\n if not pane.IsFixed():\r\n dock.fixed = False\r\n if not pane.IsToolbar():\r\n dock.toolbar = False\r\n if pane.HasFlag(AuiPaneInfo.optionDockFixed):\r\n dock.fixed = True\r\n if pane.HasFlag(AuiPaneInfo.actionPane):\r\n action_pane_marked = True\r\n\r\n # if the dock mode is proportional and not fixed-pixel,\r\n # reassign the dock_pos to the sequential 0, 1, 2, 3\r\n # e.g. remove gaps like 1, 2, 30, 500\r\n if not dock.fixed:\r\n for jj in xrange(dock_pane_count):\r\n pane = dock.panes[jj]\r\n pane.dock_pos = jj\r\n \r\n # if the dock mode is fixed, and none of the panes\r\n # are being moved right now, make sure the panes\r\n # do not overlap each other. If they do, we will\r\n # adjust the panes' positions\r\n if dock.fixed and not action_pane_marked:\r\n pane_positions, pane_sizes = self.GetPanePositionsAndSizes(dock)\r\n offset = 0\r\n for jj in xrange(dock_pane_count):\r\n pane = dock.panes[jj]\r\n pane.dock_pos = pane_positions[jj]\r\n amount = pane.dock_pos - offset\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n pane.dock_pos += -amount\r\n\r\n offset += pane_sizes[jj]\r\n dock.panes[jj] = pane\r\n\r\n if oncheck:\r\n self._docks[ii] = dock \r\n\r\n # shrink docks if needed \r\n## docks = self.SmartShrink(docks, AUI_DOCK_TOP)\r\n## docks = self.SmartShrink(docks, AUI_DOCK_LEFT)\r\n\r\n if oncheck:\r\n self._docks = docks\r\n \r\n # discover the maximum dock layer\r\n max_layer = 0\r\n dock_count = len(docks)\r\n \r\n for ii in xrange(dock_count):\r\n max_layer = max(max_layer, docks[ii].dock_layer)\r\n\r\n # clear out uiparts\r\n uiparts = []\r\n\r\n # create a bunch of box sizers,\r\n # from the innermost level outwards.\r\n cont = None\r\n middle = None\r\n\r\n if oncheck:\r\n docks = self._docks\r\n \r\n for layer in xrange(max_layer+1):\r\n # find any docks in this layer\r\n arr = FindDocks(docks, -1, layer, -1)\r\n # if there aren't any, skip to the next layer\r\n if not arr:\r\n continue\r\n\r\n old_cont = cont\r\n\r\n # create a container which will hold this layer's\r\n # docks (top, bottom, left, right)\r\n cont = wx.BoxSizer(wx.VERTICAL)\r\n\r\n # find any top docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_TOP, layer, -1)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(cont, row, uiparts, spacer_only)\r\n \r\n # fill out the middle layer (which consists\r\n # of left docks, content area and right docks)\r\n \r\n middle = wx.BoxSizer(wx.HORIZONTAL)\r\n\r\n # find any left docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_LEFT, layer, -1)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n # add content dock (or previous layer's sizer\r\n # to the middle\r\n if not old_cont:\r\n # find any center docks\r\n arr = FindDocks(docks, AUI_DOCK_CENTER, -1, -1)\r\n if arr:\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n elif not self._has_maximized:\r\n # there are no center docks, add a background area\r\n sizer_item = middle.Add((1, 1), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.pane = None\r\n part.dock = None\r\n part.button = None\r\n part.cont_sizer = middle\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n else:\r\n middle.Add(old_cont, 1, wx.EXPAND)\r\n \r\n # find any right docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_RIGHT, layer, -1, reverse=True)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n if len(middle.GetChildren()) > 0:\r\n cont.Add(middle, 1, wx.EXPAND)\r\n\r\n # find any bottom docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_BOTTOM, layer, -1, reverse=True)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(cont, row, uiparts, spacer_only)\r\n\r\n if not cont:\r\n # no sizer available, because there are no docks,\r\n # therefore we will create a simple background area\r\n cont = wx.BoxSizer(wx.VERTICAL)\r\n sizer_item = cont.Add((1, 1), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.pane = None\r\n part.dock = None\r\n part.button = None\r\n part.cont_sizer = middle\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if oncheck:\r\n self._uiparts = uiparts\r\n self._docks = docks\r\n\r\n container.Add(cont, 1, wx.EXPAND)\r\n\r\n if oncheck:\r\n return container\r\n else:\r\n return container, panes, docks, uiparts", "def Bottom(self):\r\n\r\n self.dock_direction = AUI_DOCK_BOTTOM\r\n return self", "def Bottom(self):\r\n\r\n self.dock_direction = AUI_DOCK_BOTTOM\r\n return self", "def dock(rec_outpath, reorder_outpath, init='dock_init'):\n init = eval(init)\n receptor = os.path.basename(rec_outpath).split('_')[0]\n dock_dir = os.path.join(init.data_dir, init.dock_folder) \n rec_path = os.path.join(init.data_dir, rec_outpath)\n reorder_path = os.path.join(init.data_dir, reorder_outpath)\n\n dock_name = os.path.basename(rec_path).replace('receptor','dock')\n out_path = os.path.join(dock_dir, receptor, dock_name)\n\n\n\n if not os.path.exists(os.path.dirname(out_path)):\n os.makedirs(os.path.dirname(out_path))\n\n kw = {\n 'receptor': rec_path,\n 'ligand': reorder_path,\n 'autobox_ligand':reorder_path,\n 'out':out_path\n }\n\n cmd = init._make_command(**kw)\n cl = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n cl.wait()\n\n return [[rec_outpath, reorder_outpath, os.path.join(init.dock_folder, receptor, dock_name)]]", "def NotebookDockable(self, b=True):\r\n \r\n return self.SetFlag(self.optionNotebookDockable, b)", "def Dockable(self, b=True):\r\n\r\n return self.TopDockable(b).BottomDockable(b).LeftDockable(b).RightDockable(b)", "def __init__(self, parent, id=wx.ID_ANY, title=\"\", pos=wx.DefaultPosition,\r\n size=wx.DefaultSize, style=wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP |\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER, name=\"AuiDockingGuide\"):\r\n\r\n wx.Frame.__init__(self, parent, id, title, pos, size, style, name=name)", "def set_dock_area(self, dock_area):\n self.widget.SetDockArea(_DOCK_AREA_MAP[dock_area])", "def _update_proxy(self, change):\n # The superclass implementation is sufficient.\n super(DockArea, self)._update_proxy(change)", "def __init__(self):\n\t\tself.dataMemory = sysv_ipc.SharedMemory(65)\n\t\tself.statusMemory = sysv_ipc.SharedMemory(88)\n\t\treturn", "def create_main_enviroment(self):\n # self.layout=QGridLayout()\n self.resize(900, 900)\n self.centralWidget = CentralWidget(self) # CentralWidget(self)\n self.setCentralWidget(self.centralWidget)\n\n # self.toolbar = QToolBar(self)\n # self.addToolBar(self.toolbar)\n\n # self.setLayout(self.layout)\n self.setWindowTitle(\"Fitting elastic constants\")", "def SetDockArea(self, dock_area):\n if self._dock_area != dock_area:\n self._dock_area = dock_area\n def closure(pane):\n pane.Direction(dock_area)\n self._PaneInfoOperation(closure)", "def list():\n return [Dock.OMNI, Dock.LEFT, Dock.RIGHT]", "def Layer(self, layer):\r\n \r\n self.dock_layer = layer\r\n return self", "def __init__(self, sys, rcut, pad):\n self.sys = sys \n self.rcut = rcut \n self.pad = pad\n self.cell_list = CellList(self.sys.box, self.rcut + self.pad)", "def createWidgets(self):\n self.tab = WorkspaceTab( self )\n self.tab.setMinimumWidth(500)\n self.tab.setDocumentMode( False )\n self.tab.setMovable( True )\n\n # self.dockToolbar = QToolBar(self)\n # self.dockToolbar.setOrientation(Qt.Vertical)\n\n self.findWidget = FindReplace(self)\n self.findWidget.setDisabled(True)\n self.findWidget.hide()\n\n layout = QVBoxLayout()\n layout.addWidget( self.tab )\n layout.setContentsMargins(0,0,0,0)\n\n \n self.setLayout(layout)", "def Center(self):\r\n\r\n self.dock_direction = AUI_DOCK_CENTER\r\n return self", "def __init__(self, parent, populator):\n ListHeap.__init__(self, parent, populator, 6)\n self.propagate(0)\n self.config(width=10, height=100)", "def IsDockable(self):\r\n \r\n return self.IsTopDockable() or self.IsBottomDockable() or self.IsLeftDockable() or \\\r\n self.IsRightDockable() or self.IsNotebookDockable()", "def onInvoke():\n if dock.isVisible():\n dock.toggleViewAction().trigger()\n else:\n dock.setFloating(True)\n pos = QtGui.QCursor.pos()\n dock.move(pos.x() - dock.size().width() / 2,\n pos.y() - dock.size().height() / 2)\n dock.setVisible(True)", "def Center(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTER\r\n return self", "def main():\n main = tk.Tk()\n\n # Add Title\n main.title(\"Space Craft Creator\")\n\n # Disable Resizing\n main.resizable(False, False)\n\n data = Data()\n\n spacecraft = Spacecraft(data)\n\n notebook = Notebook(main)\n\n spacecraft_tab = spacecraft.make_tab(notebook)\n\n notebook.add(spacecraft_tab, text=\"Spacecraft\")\n for key, subsystem in spacecraft.subsections.items():\n notebook.add(subsystem.make_tab(notebook), text=key)\n notebook.add(spacecraft.sizes.make_tab(notebook), text=\"Sizes\")\n notebook.add(spacecraft.velocities.make_tab(notebook), text=\"Velocity Profiles\")\n\n notebook.grid(column=0, row=0)\n notebook.enable_traversal()\n\n button = Button(main, text=\"Caclulate\", command=spacecraft.calculate)\n button.grid(column=0, row=1)\n\n main.mainloop()", "def SetupToolBar( self ):\n tb = self.CreateToolBar( self.TBFLAGS )\n tsize = (24,24)\n tb.ToolBitmapSize = tsize\n open_bmp = wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_TOOLBAR, tsize)\n tb.AddLabelTool(ID_OPEN, \"Open\", open_bmp, shortHelp=\"Open\", longHelp=\"Open a (c)Profile trace file\")\n tb.AddSeparator()\n# self.Bind(wx.EVT_TOOL, self.OnOpenFile, id=ID_OPEN)\n self.rootViewTool = tb.AddLabelTool(\n ID_ROOT_VIEW, _(\"Root View\"),\n wx.ArtProvider.GetBitmap(wx.ART_GO_HOME, wx.ART_TOOLBAR, tsize),\n shortHelp=_(\"Display the root of the current view tree (home view)\")\n )\n self.rootViewTool = tb.AddLabelTool(\n ID_BACK_VIEW, _(\"Back\"), \n wx.ArtProvider.GetBitmap(wx.ART_GO_BACK, wx.ART_TOOLBAR, tsize),\n shortHelp=_(\"Back to the previously activated node in the call tree\")\n )\n self.upViewTool = tb.AddLabelTool(\n ID_UP_VIEW, _(\"Up\"),\n wx.ArtProvider.GetBitmap(wx.ART_GO_UP, wx.ART_TOOLBAR, tsize),\n shortHelp=_(\"Go one level up the call tree (highest-percentage parent)\")\n )\n tb.AddSeparator()\n # TODO: figure out why the control is sizing the label incorrectly on Linux\n self.percentageViewTool = wx.CheckBox( tb, -1, _(\"Percent \") )\n self.percentageViewTool.SetToolTip( wx.ToolTip(_(\"Toggle display of percentages in list views\")) )\n tb.AddControl( self.percentageViewTool )\n wx.EVT_CHECKBOX( self.percentageViewTool, self.percentageViewTool.GetId(), self.OnPercentageView )\n \n self.packageViewTool = wx.CheckBox( tb, -1, _(\"File View \") )\n self.packageViewTool.SetToolTip( wx.ToolTip(_(\"Switch between call-hierarchy and package/module/function hierarchy\")) )\n tb.AddControl( self.packageViewTool )\n wx.EVT_CHECKBOX( self.packageViewTool, self.packageViewTool.GetId(), self.OnPackageView )\n tb.Realize()", "def __init__(self, parent, direction=0):\r\n\r\n self._direction = direction\r\n\r\n style = wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP | \\\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER\r\n\r\n # Use of FRAME_SHAPED on wxMac causes the frame to be visible\r\n # breaking the docking hints.\r\n if wx.Platform != '__WXMAC__':\r\n style |= wx.FRAME_SHAPED\r\n\r\n AuiDockingGuide.__init__(self, parent, style=style, name=\"auiSingleDockTarget\")\r\n \r\n self.Hide()\r\n\r\n useAero = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_AERO_DOCKING_GUIDES\r\n useWhidbey = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_WHIDBEY_DOCKING_GUIDES\r\n \r\n self._useAero = useAero or useWhidbey\r\n self._valid = True\r\n \r\n if useAero:\r\n sizeX, sizeY = aeroguideSizeX, aeroguideSizeY\r\n elif useWhidbey:\r\n sizeX, sizeY = whidbeySizeX, whidbeySizeY\r\n else:\r\n sizeX, sizeY = guideSizeX, guideSizeY\r\n\r\n if direction not in [wx.TOP, wx.BOTTOM]:\r\n sizeX, sizeY = sizeY, sizeX\r\n\r\n if self._useAero:\r\n self.CreateShapesWithStyle(useWhidbey)\r\n \r\n if wx.Platform == \"__WXGTK__\":\r\n self.Bind(wx.EVT_WINDOW_CREATE, self.SetGuideShape)\r\n else:\r\n self.SetGuideShape()\r\n \r\n self.SetSize(self.region.GetBox().GetSize())\r\n else:\r\n self.SetSize((sizeX, sizeY))\r\n \r\n self.rect = wx.Rect(0, 0, sizeX, sizeY)\r\n\r\n if self._useAero:\r\n useAero = (useWhidbey and [2] or [1])[0]\r\n else:\r\n useAero = 0\r\n \r\n self.target = AuiDockingGuideWindow(self, self.rect, direction, False, useAero)", "def create(self):\n self.panel = pg.rect.Rect(self.position, self.dimensions)", "def Setup(dock, parent, name = NAME, color = DEFAULT_COLOR):\n object = Tab(parent, dock, name, color)\n return object", "def build_frames(self):\n self.cntrl_frame = tk.PanedWindow(self.root)\n self.cntrl_frame.pack(side = tk.TOP, padx = 1, pady = 1, fill = tk.Y)\n self.info_frame_1 = tk.PanedWindow(self.root)\n self.info_frame_1.pack(side = tk.TOP, padx = 1, pady = 2, fill = tk.Y)", "def createToolBar(self):\n pass", "def borrowDocker(self, docker):\r\n\r\n # Does requested widget exist?\r\n if isinstance(docker, QDockWidget) and docker.widget():\r\n # Return any previous widget to its original docker\r\n self.returnDocker()\r\n \r\n self.widgetDocker = docker\r\n self.widget = docker.widget()\r\n\r\n # Because I'm forced to use \"setFixedSize\" to resize the tool options\r\n # it needs to be put in a container, otherwise it's going to break if/when\r\n # returned to its original docker. Manipulate the container; not the widget.\r\n self.container = ToolOptionsContainer()\r\n self.container.layout().addWidget(self.widget)\r\n\r\n self.layout().addWidget(self.container, 0, Qt.AlignRight) \r\n self.adjustToView() \r\n \r\n return True\r\n \r\n return False", "def SetDockSizeConstraint(self, width_pct, height_pct):\r\n\r\n self._dock_constraint_x = max(0.0, min(1.0, width_pct))\r\n self._dock_constraint_y = max(0.0, min(1.0, height_pct))", "def test_plants_docking(self):\n self.workdir = prepare_work_dir(__rootpath__, create=True)\n settings['workdir'] = self.workdir\n settings['bindingsite_center'] = [7.79934, 9.49666, 3.39229]\n settings['exec_path'] = exec_path\n\n plants = PlantsDocking(**settings)\n self.assertTrue(plants.run(self.protein, self.ligand))\n\n outputfiles = glob.glob('{0}/_entry_00001_conf_*.mol2'.format(self.workdir))\n self.assertEqual(len(outputfiles), plants.config['cluster_structures'])\n self.assertEqual(len(outputfiles), len(plants.results()))", "def ReDockPage(self, pane):\r\n\r\n root_manager = framemanager.GetManager(self) \r\n\r\n pane.window.__floating_size__ = wx.Size(*pane.floating_size)\r\n page_index = pane.window.__page_index__\r\n text_colour = pane.window.__text_colour__\r\n control = pane.window.__control__\r\n \r\n root_manager.DetachPane(pane.window)\r\n self.InsertPage(page_index, pane.window, pane.caption, True, pane.icon, control=control)\r\n\r\n self.SetPageTextColour(page_index, text_colour)\r\n self.GetActiveTabCtrl().DoShowHide()\r\n self.DoSizing()\r\n if control:\r\n self.UpdateTabCtrlHeight(force=True)\r\n \r\n self._mgr.Update()\r\n root_manager.Update()", "def create_layout( self ):", "def on_docked(self, event):\n area = self.widget.GetDockArea()\n if not self._guard & FLOATED_GUARD:\n self._guard |= FLOATED_GUARD\n try:\n self.declaration.floating = False\n self.declaration.dock_area = _DOCK_AREA_INV_MAP[area]\n finally:\n self._guard &= ~FLOATED_GUARD", "def Data_Frame( self ):\r\n #Create pane\r\n p = self.pane_widget.add( \"Data\", min = 0.1, max = 0.9)\r\n frame_sequence = Frame( p )\r\n #xscroll at the top\r\n self.xscroll = Scrollbar( frame_sequence, orient = HORIZONTAL )\r\n self.xscroll.pack(side = TOP, fill = X )\r\n #create the canvas where the data will be displayed\r\n self.canvas_two = Canvas( frame_sequence )\r\n #Make sure these values are consistent with self.canvas_one in Tree_Frame\r\n self.canvas_two.pack( side = TOP, fill = BOTH, expand = 1 )\r\n self.xscroll.config( command = self.canvas_two.xview )\r\n self.canvas_two.config( xscrollcommand = self.xscroll.set )\r\n frame_sequence.pack(side=LEFT, fill = BOTH)", "def Show(self, show=True):\r\n \r\n super(AuiDockingHintWindow, self).Show(show)\r\n if wx.Platform == '__WXMAC__':\r\n # Need to manually do layout since its a borderless frame.\r\n self.Layout()", "def CopyDocksAndPanes2(src_docks, src_panes):\r\n \r\n dest_docks = []\r\n\r\n for ii in xrange(len(src_docks)):\r\n dest_docks.append(AuiDockInfo())\r\n dest_docks[ii].dock_direction = src_docks[ii].dock_direction\r\n dest_docks[ii].dock_layer = src_docks[ii].dock_layer\r\n dest_docks[ii].dock_row = src_docks[ii].dock_row\r\n dest_docks[ii].size = src_docks[ii].size\r\n dest_docks[ii].min_size = src_docks[ii].min_size\r\n dest_docks[ii].resizable = src_docks[ii].resizable\r\n dest_docks[ii].fixed = src_docks[ii].fixed\r\n dest_docks[ii].toolbar = src_docks[ii].toolbar\r\n dest_docks[ii].panes = src_docks[ii].panes\r\n dest_docks[ii].rect = wx.Rect(*src_docks[ii].rect)\r\n\r\n dest_panes = []\r\n\r\n for ii in xrange(len(src_panes)):\r\n dest_panes.append(AuiPaneInfo())\r\n dest_panes[ii].name = src_panes[ii].name\r\n dest_panes[ii].caption = src_panes[ii].caption\r\n dest_panes[ii].window = src_panes[ii].window\r\n dest_panes[ii].frame = src_panes[ii].frame\r\n dest_panes[ii].state = src_panes[ii].state\r\n dest_panes[ii].dock_direction = src_panes[ii].dock_direction\r\n dest_panes[ii].dock_layer = src_panes[ii].dock_layer\r\n dest_panes[ii].dock_row = src_panes[ii].dock_row\r\n dest_panes[ii].dock_pos = src_panes[ii].dock_pos\r\n dest_panes[ii].best_size = wx.Size(*src_panes[ii].best_size)\r\n dest_panes[ii].min_size = wx.Size(*src_panes[ii].min_size)\r\n dest_panes[ii].max_size = wx.Size(*src_panes[ii].max_size)\r\n dest_panes[ii].floating_pos = wx.Point(*src_panes[ii].floating_pos)\r\n dest_panes[ii].floating_size = wx.Size(*src_panes[ii].floating_size)\r\n dest_panes[ii].dock_proportion = src_panes[ii].dock_proportion\r\n dest_panes[ii].buttons = src_panes[ii].buttons\r\n dest_panes[ii].rect = wx.Rect(*src_panes[ii].rect)\r\n dest_panes[ii].icon = src_panes[ii].icon\r\n dest_panes[ii].notebook_id = src_panes[ii].notebook_id\r\n dest_panes[ii].transparent = src_panes[ii].transparent\r\n dest_panes[ii].snapped = src_panes[ii].snapped\r\n dest_panes[ii].minimize_mode = src_panes[ii].minimize_mode\r\n\r\n for ii in xrange(len(dest_docks)):\r\n dock = dest_docks[ii]\r\n for jj in xrange(len(dock.panes)):\r\n for kk in xrange(len(src_panes)):\r\n if dock.panes[jj] == src_panes[kk]:\r\n dock.panes[jj] = dest_panes[kk]\r\n\r\n dest_docks[ii] = dock\r\n \r\n return dest_docks, dest_panes", "def ValidateNotebookDocking(self, valid):\r\n \r\n return 0", "def create_mplframe(self):\r\n #create figure and axes objects\r\n self.fig = Figure()\r\n self.subplot = self.fig.add_subplot(111)\r\n #disable axis, because it will only show an image\r\n self.subplot.get_yaxis().set_visible(False)\r\n self.subplot.get_xaxis().set_visible(False)\r\n \r\n #create canvas and toolbar\r\n self.canvas = FigureCanvas(self.fig)\r\n self.toolbar = MyNavigationToolbar(self.canvas, None)\r\n\r\n #add the canvas and toolbar to the gui\r\n self.ui.imageLayout.addWidget(self.canvas)\r\n self.ui.imageLayout.addWidget(self.toolbar)\r\n\r\n #connect the toolbar selection to matploblib as a callback\r\n self.canvas.mpl_connect('selection_changed',self.toolbar_selection)", "def Top(self):\r\n\r\n self.dock_direction = AUI_DOCK_TOP\r\n return self", "def Top(self):\r\n\r\n self.dock_direction = AUI_DOCK_TOP\r\n return self", "def SetDockWidget(self, widget):\n old_widget = self._dock_widget\n if old_widget:\n old_widget.Hide()\n self._dock_widget = widget\n self.GetSizer().Add(widget)\n self.UpdateSizing()", "def build(self):\n self.icon = 'data/icon.png'\n return CalcGridLayout()", "def BottomDockable(self, b=True):\r\n \r\n return self.SetFlag(self.optionBottomDockable, b)", "def Centre(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTRE\r\n return self", "def Centre(self):\r\n \r\n self.dock_direction = AUI_DOCK_CENTRE\r\n return self", "def show(self, target=None, position=0):\n\n # if there is no Qt (eg, our UI framework...) then there is no UI\n if not QT_AVAILABLE:\n return\n\n # the UI has already been created, and is also visible. nothing to do\n if (self.dockable and self.dockable.visible):\n return\n\n #\n # if the UI has not yet been created, or has been previously closed\n # then we are free to create new UI elements to take the place of\n # anything that once was\n\n self.view = HexView(self, self.model)\n new_dockable = DockableWindow(self._title, self.view)\n\n #\n # if there is a reference to a left over dockable window (e.g, from a\n # previous close of this window type) steal its dock positon so we can\n # hopefully take the same place as the old one\n #\n\n if self.dockable:\n new_dockable.copy_dock_position(self.dockable)\n elif (target or position):\n new_dockable.set_dock_position(target, position)\n\n # make the dockable/widget visible\n self.dockable = new_dockable\n self.dockable.show()", "def IsNotebookDockable(self):\r\n\r\n return self.HasFlag(self.optionNotebookDockable)", "def createWidgets(self):\n # create top menus\n self.menu= Menu(self.master)\n self.master.config(menu=self.menu)\n self.file_menu = Menu(self.menu)\n self.menu.add_cascade(label=\"File\", menu=self.file_menu)\n self.file_menu.add_command(label=\"Quit\", command=self.quit)\n\n self.edit_menu = Menu(self.menu)\n self.edit_opts_menu = Menu(self.edit_menu)\n self.menu.add_cascade(label=\"Edit\", menu=self.edit_menu)\n self.edit_menu.add_cascade(label=\"Options\", menu=self.edit_opts_menu)\n self.edit_menu.add_command(label=\"Clear Terminal\", command=lambda: self.terminal.delete(1.0,END))\n\n # Options\n self.autoscroll_value = BooleanVar()\n self.edit_opts_menu.add_checkbutton(label=\"Autoscroll\", onvalue=True, offvalue=False, variable=self.autoscroll_value)\n\n\n #----------------------------------------\n # Create the Device entry\n\n self.device_value = StringVar()\n self.device_value.set(self.device)\n self.device_label = Label( self.master, text=\"Port:\" )\n self.device_label.grid(row=0, column = 0,sticky=E)\n self.device_menu = OptionMenu( self.master, self.device_value, *self.device_choices) \n self.device_menu.config(width=40)\n self.device_menu.grid(row=0, column = 1)\n\n #----------------------------------------\n # Create the Baud rate entry\n\n self.baudrate_value = IntVar()\n self.baudrate_value.set(self.baudrate) # loaded from default, args, or config\n self.baudrate_choices = [ 9600, 14400, 19200, 28800, 38400, 57600, 102400, 115200, 128000, 230400, 256000, 460800, 512000, 921600, 1843200, 2048000 ]\n self.baudrate_label = Label( self.master, text=\"Baud rate:\" )\n self.baudrate_label.grid(row=0, column = 2, sticky=E)\n self.baudrate_menu = OptionMenu( self.master, self.baudrate_value, *self.baudrate_choices)\n self.baudrate_menu.config(width=10)\n self.baudrate_menu.grid(row=0, column = 3)\n\n #----------------------------------------\n # Create the Log file entry\n\n self.log_value = StringVar()\n self.log_value.set(self.logfile)\n self.log_label = Label( self.master, text=\"Log file:\" )\n self.log_label.grid(row=1,column = 0, sticky=E)\n self.log_entry = Entry( self.master, width = 46, textvariable=self.log_value )\n self.log_entry.grid(row=1, column = 1)\n self.log_button = Button (self.master, text=\"Browse\", command=self.browseLogFile)\n self.log_button.grid(row=1, column = 2, sticky=W)\n\n #----------------------------------------\n # Create the connect/disconnect button\n\n self.connect_button = Button ( self.master, text=\"Connect\", command=self.connect,width=12)\n self.connect_button.grid(row=1,column=3)\n\n #----------------------------------------\n # Create the terminal window\n\n self.terminal = Text( self.master, width = 65, background='black', foreground='white' )\n self.terminal.grid(row=2, column = 0, columnspan=4, sticky=E+W)\n\n # scroll bar\n self.terminal_scroller = AutoScrollbar(self.master, command=self.terminal.yview)\n self.terminal_scroller.grid(row=2,column=4, sticky=N+S)\n self.terminal.config(yscrollcommand=self.terminal_scroller.set)\n self.terminal_scroller_lastpos = (0.0, 1.0)\n self.autoscroll_value.set(True)", "def __createLayout(self):\r\n self.__createCanvas()\r\n self.__createButton()\r\n self.__createInputFunction()\r\n self.__createLimits()\r\n self.__styleLayout()", "def create_containers(self):\r\n self.container_widgets.update({\r\n \"main_frame\": tk.Frame(master=self)})\r\n self.container_widgets.update({\r\n \"panel_frame\": tk.Frame(master=self.container_widgets[\"main_frame\"]),\r\n \"order_frame\": tk.Frame(master=self.container_widgets[\"main_frame\"],\r\n width=const.ORDER_FRAME_SIZE[\"width\"],\r\n height=const.ORDER_FRAME_SIZE[\"height\"])})\r\n self.container_widgets.update({\r\n \"order_canvas\": tk.Canvas(master=self.container_widgets[\"order_frame\"])})\r\n self.container_widgets.update({\r\n \"orders_container\": tk.Frame(master=self.container_widgets[\"order_canvas\"]),\r\n \"orders_scrollbar\": tk.Scrollbar(master=self.container_widgets[\"order_frame\"],\r\n orient=\"vertical\",\r\n command=self.container_widgets[\"order_canvas\"].yview)\r\n })", "def dock_widget(self):\n d = self.declaration.dock_widget()\n if d is not None:\n return d.proxy.widget", "def LayoutAddPane(self, cont, dock, pane, uiparts, spacer_only):\r\n \r\n sizer_item = wx.SizerItem()\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n gripper_size = self._art.GetMetric(AUI_DOCKART_GRIPPER_SIZE)\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n pane_button_size = self._art.GetMetric(AUI_DOCKART_PANE_BUTTON_SIZE)\r\n\r\n # find out the orientation of the item (orientation for panes\r\n # is the same as the dock's orientation)\r\n\r\n if dock.IsHorizontal():\r\n orientation = wx.HORIZONTAL\r\n else:\r\n orientation = wx.VERTICAL\r\n\r\n # this variable will store the proportion\r\n # value that the pane will receive\r\n pane_proportion = pane.dock_proportion\r\n\r\n horz_pane_sizer = wx.BoxSizer(wx.HORIZONTAL)\r\n vert_pane_sizer = wx.BoxSizer(wx.VERTICAL)\r\n\r\n if pane.HasGripper():\r\n \r\n part = AuiDockUIPart()\r\n if pane.HasGripperTop():\r\n sizer_item = vert_pane_sizer.Add((1, gripper_size), 0, wx.EXPAND)\r\n else:\r\n sizer_item = horz_pane_sizer.Add((gripper_size, 1), 0, wx.EXPAND)\r\n\r\n part.type = AuiDockUIPart.typeGripper\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = horz_pane_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n button_count = len(pane.buttons)\r\n button_width_total = button_count*pane_button_size\r\n if button_count >= 1:\r\n button_width_total += 3\r\n\r\n caption, captionLeft = pane.HasCaption(), pane.HasCaptionLeft()\r\n button_count = len(pane.buttons)\r\n\r\n if captionLeft:\r\n caption_sizer = wx.BoxSizer(wx.VERTICAL)\r\n\r\n # add pane buttons to the caption\r\n dummy_parts = []\r\n for btn_id in xrange(len(pane.buttons)-1, -1, -1):\r\n sizer_item = caption_sizer.Add((caption_size, pane_button_size), 0, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typePaneButton\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = pane.buttons[btn_id]\r\n part.orientation = orientation\r\n part.cont_sizer = caption_sizer\r\n part.sizer_item = sizer_item\r\n dummy_parts.append(part)\r\n \r\n sizer_item = caption_sizer.Add((caption_size, 1), 1, wx.EXPAND)\r\n vert_pane_sizer = wx.BoxSizer(wx.HORIZONTAL)\r\n\r\n # create the caption sizer\r\n part = AuiDockUIPart()\r\n\r\n part.type = AuiDockUIPart.typeCaption\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = vert_pane_sizer\r\n part.sizer_item = sizer_item\r\n caption_part_idx = len(uiparts)\r\n uiparts.append(part)\r\n uiparts.extend(dummy_parts)\r\n\r\n elif caption:\r\n\r\n caption_sizer = wx.BoxSizer(wx.HORIZONTAL)\r\n sizer_item = caption_sizer.Add((1, caption_size), 1, wx.EXPAND)\r\n\r\n # create the caption sizer\r\n part = AuiDockUIPart()\r\n\r\n part.type = AuiDockUIPart.typeCaption\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = vert_pane_sizer\r\n part.sizer_item = sizer_item\r\n caption_part_idx = len(uiparts)\r\n uiparts.append(part)\r\n\r\n # add pane buttons to the caption\r\n for button in pane.buttons:\r\n sizer_item = caption_sizer.Add((pane_button_size, caption_size), 0, wx.EXPAND) \r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typePaneButton\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = button\r\n part.orientation = orientation\r\n part.cont_sizer = caption_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if caption or captionLeft:\r\n # if we have buttons, add a little space to the right\r\n # of them to ease visual crowding\r\n if button_count >= 1:\r\n if captionLeft:\r\n caption_sizer.Add((caption_size, 3), 0, wx.EXPAND)\r\n else:\r\n caption_sizer.Add((3, caption_size), 0, wx.EXPAND)\r\n\r\n # add the caption sizer\r\n sizer_item = vert_pane_sizer.Add(caption_sizer, 0, wx.EXPAND)\r\n uiparts[caption_part_idx].sizer_item = sizer_item\r\n \r\n # add the pane window itself\r\n if spacer_only or not pane.window:\r\n sizer_item = vert_pane_sizer.Add((1, 1), 1, wx.EXPAND)\r\n else:\r\n sizer_item = vert_pane_sizer.Add(pane.window, 1, wx.EXPAND)\r\n vert_pane_sizer.SetItemMinSize(pane.window, (1, 1))\r\n\r\n part = AuiDockUIPart() \r\n part.type = AuiDockUIPart.typePane\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = vert_pane_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n # determine if the pane should have a minimum size if the pane is\r\n # non-resizable (fixed) then we must set a minimum size. Alternatively,\r\n # if the pane.min_size is set, we must use that value as well\r\n \r\n min_size = pane.min_size\r\n if pane.IsFixed():\r\n if min_size == wx.Size(-1, -1):\r\n min_size = pane.best_size\r\n pane_proportion = 0\r\n\r\n if min_size != wx.Size(-1, -1):\r\n vert_pane_sizer.SetItemMinSize(len(vert_pane_sizer.GetChildren())-1, (min_size.x, min_size.y))\r\n \r\n # add the vertical/horizontal sizer (caption, pane window) to the\r\n # horizontal sizer (gripper, vertical sizer)\r\n horz_pane_sizer.Add(vert_pane_sizer, 1, wx.EXPAND)\r\n\r\n # finally, add the pane sizer to the dock sizer\r\n if pane.HasBorder():\r\n # allowing space for the pane's border\r\n sizer_item = cont.Add(horz_pane_sizer, pane_proportion,\r\n wx.EXPAND | wx.ALL, pane_border_size)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typePaneBorder\r\n part.dock = dock\r\n part.pane = pane\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n else:\r\n sizer_item = cont.Add(horz_pane_sizer, pane_proportion, wx.EXPAND)\r\n \r\n return uiparts", "def TopDockable(self, b=True):\r\n \r\n return self.SetFlag(self.optionTopDockable, b)", "def init_widget(self):\n super(WxDockPane, self).init_widget()\n d = self.declaration\n self.set_title(d.title)\n self.set_title_bar_visible(d.title_bar_visible)\n self.set_title_bar_orientation(d.title_bar_orientation)\n self.set_closable(d.closable)\n self.set_movable(d.movable)\n self.set_floatable(d.floatable)\n self.set_floating(d.floating)\n self.set_dock_area(d.dock_area)\n self.set_allowed_dock_areas(d.allowed_dock_areas)\n widget = self.widget\n widget.Bind(EVT_DOCK_PANE_FLOATED, self.on_floated)\n widget.Bind(EVT_DOCK_PANE_DOCKED, self.on_docked)\n widget.Bind(EVT_DOCK_PANE_CLOSED, self.on_closed)", "def __create__container(self):\n self.__used_containers.append(contenedor.Arena(self.__blocks_size))", "def widgetSetup(self):\n self.master.resizable(0, 0)\n self.master.iconbitmap('logo.ico')\n self.master.title(\"Ejercicio POO\")\n\n self.master.bind(\"<Return>\", lambda e: self.create())\n self.master.bind(\"<Delete>\", lambda e: self.delete())", "def __init__(self):\r\n self.__memory = []", "def __init__(self, structure_id, pdb_file, amb_file, flex1_file, flex2_file, root_dir=None):\n\n super(DOCK, self).__init__(id=structure_id, description='DOCK6 preparation')\n self._root_dir = None\n self.structure_path = pdb_file\n\n if root_dir:\n self.root_dir = root_dir\n else:\n self.root_dir = self.structure_dir\n\n self.dockprep_path = None\n self.receptormol2_path = None\n self.receptorpdb_path = None\n self.dms_path = None\n self.sphgen_path = None\n self.bindingsite_path = None\n self.sphsel_path = None\n self.box_path = None\n self.grid_path = None\n\n self.dock_flexible_outfile = None\n self.dock_flexible_scored_result = None\n self.dock_flexible_conformers_result = None\n\n self.amb_file = amb_file\n self.flex1_file = flex1_file\n self.flex2_file = flex2_file\n\n log.debug('{}: created DOCK6 project folder at {}'.format(structure_id, self.dock_dir))", "def AddToolbar(self, name):\n if name == \"digitMap\":\n self.toolbars[name] = RDigitMapToolbar(self)\n \n self._mgr.AddPane(self.toolbars[name],\n wx.aui.AuiPaneInfo().\n Name(name).Caption(_(\"Map Toolbar\")).\n ToolbarPane().Top().\n LeftDockable(False).RightDockable(False).\n BottomDockable(False).TopDockable(True).\n CloseButton(False).Layer(2).Row(1).\n BestSize((self.toolbars[name].GetBestSize())))\n \n elif name == \"rdigit\":\n self.toolbars[name] = RDigitToolbar(parent = self, MapWindow = self.MapWindow,\n digitClass = RDigit, layerTree = self.mapManager)\n \n self._mgr.AddPane(self.toolbars[name],\n wx.aui.AuiPaneInfo().\n Name(\"rdigittoolbar\").Caption(_(\"Raster Digitizer Toolbar\")).\n ToolbarPane().Top().Row(1).\n LeftDockable(False).RightDockable(False).\n BottomDockable(False).TopDockable(True).\n CloseButton(False).Layer(0).\n BestSize((self.toolbars['rdigit'].GetBestSize()))) \n self.MapWindow.SetToolbar(self.toolbars[name])\n #self._mgr.GetPane('rdigittoolbar').Hide()", "def SetPaneWindow(self, pane):\r\n\r\n self._is_toolbar = pane.IsToolbar()\r\n self._pane_window = pane.window\r\n\r\n if isinstance(pane.window, auibar.AuiToolBar):\r\n pane.window.SetAuiManager(self._mgr)\r\n \r\n self._pane_window.Reparent(self)\r\n \r\n contained_pane = self.CopyAttributes(pane)\r\n \r\n contained_pane.Dock().Center().Show(). \\\r\n CaptionVisible(False). \\\r\n PaneBorder(False). \\\r\n Layer(0).Row(0).Position(0)\r\n\r\n if not contained_pane.HasGripper() and not self._useNativeMiniframes:\r\n contained_pane.CaptionVisible(True)\r\n\r\n indx = self._owner_mgr._panes.index(pane)\r\n\r\n # Carry over the minimum size\r\n pane_min_size = pane.window.GetMinSize()\r\n\r\n # if the best size is smaller than the min size\r\n # then set the min size to the best size as well\r\n pane_best_size = contained_pane.best_size\r\n if pane_best_size.IsFullySpecified() and (pane_best_size.x < pane_min_size.x or \\\r\n pane_best_size.y < pane_min_size.y):\r\n\r\n pane_min_size = pane_best_size\r\n self._pane_window.SetMinSize(pane_min_size)\r\n \r\n # if the frame window's max size is greater than the min size\r\n # then set the max size to the min size as well\r\n cur_max_size = self.GetMaxSize()\r\n if cur_max_size.IsFullySpecified() and (cur_max_size.x < pane_min_size.x or \\\r\n cur_max_size.y < pane_min_size.y):\r\n self.SetMaxSize(pane_min_size)\r\n\r\n art_provider = self._mgr.GetArtProvider()\r\n caption_size = art_provider.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n button_size = art_provider.GetMetric(AUI_DOCKART_PANE_BUTTON_SIZE) + \\\r\n 4*art_provider.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n\r\n min_size = pane.window.GetMinSize()\r\n\r\n if min_size.y < caption_size or min_size.x < button_size:\r\n new_x, new_y = min_size.x, min_size.y\r\n if min_size.y < caption_size:\r\n new_y = (pane.IsResizeable() and [2*wx.SystemSettings.GetMetric(wx.SYS_EDGE_Y)+caption_size] or [1])[0]\r\n if min_size.x < button_size:\r\n new_x = (pane.IsResizeable() and [2*wx.SystemSettings.GetMetric(wx.SYS_EDGE_X)+button_size] or [1])[0]\r\n \r\n self.SetMinSize((new_x, new_y))\r\n else:\r\n self.SetMinSize(min_size)\r\n\r\n self._mgr.AddPane(self._pane_window, contained_pane)\r\n self._mgr.Update() \r\n\r\n if pane.min_size.IsFullySpecified():\r\n # because SetSizeHints() calls Fit() too (which sets the window\r\n # size to its minimum allowed), we keep the size before calling\r\n # SetSizeHints() and reset it afterwards...\r\n tmp = self.GetSize()\r\n self.GetSizer().SetSizeHints(self)\r\n self.SetSize(tmp)\r\n \r\n self.SetTitle(pane.caption)\r\n\r\n if pane.floating_size != wx.Size(-1, -1):\r\n self.SetSize(pane.floating_size)\r\n else:\r\n size = pane.best_size\r\n if size == wx.Size(-1, -1):\r\n size = pane.min_size\r\n if size == wx.Size(-1, -1):\r\n size = self._pane_window.GetSize()\r\n if self._owner_mgr and pane.HasGripper():\r\n if pane.HasGripperTop():\r\n size.y += self._owner_mgr._art.GetMetric(AUI_DOCKART_GRIPPER_SIZE)\r\n else:\r\n size.x += self._owner_mgr._art.GetMetric(AUI_DOCKART_GRIPPER_SIZE)\r\n\r\n if not self._useNativeMiniframes:\r\n size.y += self._owner_mgr._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n \r\n pane.floating_size = size\r\n \r\n self.SetClientSize(size)\r\n\r\n self._owner_mgr._panes[indx] = pane\r\n\r\n self._fly_step = abs(pane.floating_size.y - \\\r\n (caption_size + 2*wx.SystemSettings.GetMetric(wx.SYS_EDGE_Y)))/10\r\n\r\n self._floating_size = wx.Size(*self.GetSize())\r\n\r\n if pane.IsFlyOut():\r\n self._check_fly_timer.Start(50)", "def UpdateDockingGuides(self, paneInfo):\r\n\r\n if len(self._guides) == 0:\r\n self.CreateGuideWindows()\r\n\r\n captionSize = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n mousePos = wx.GetMousePosition()\r\n\r\n for indx, guide in enumerate(self._guides):\r\n \r\n pt = wx.Point()\r\n guide_size = guide.host.GetSize()\r\n if not guide.host:\r\n raise Exception(\"Invalid docking host\")\r\n\r\n direction = guide.dock_direction\r\n\r\n if direction == AUI_DOCK_LEFT:\r\n pt.x = frameRect.x + guide_size.x / 2 + 16\r\n pt.y = frameRect.y + frameRect.height / 2\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n pt.x = frameRect.x + frameRect.width / 2\r\n pt.y = frameRect.y + guide_size.y / 2 + 16\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n pt.x = frameRect.x + frameRect.width - guide_size.x / 2 - 16\r\n pt.y = frameRect.y + frameRect.height / 2\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n pt.x = frameRect.x + frameRect.width / 2\r\n pt.y = frameRect.y + frameRect.height - guide_size.y / 2 - 16\r\n\r\n elif direction == AUI_DOCK_CENTER:\r\n rc = paneInfo.window.GetScreenRect()\r\n pt.x = rc.x + rc.width / 2\r\n pt.y = rc.y + rc.height / 2\r\n if paneInfo.HasCaption():\r\n pt.y -= captionSize / 2\r\n elif paneInfo.HasCaptionLeft():\r\n pt.x -= captionSize / 2\r\n\r\n # guide will be centered around point 'pt'\r\n targetPosition = wx.Point(pt.x - guide_size.x / 2, pt.y - guide_size.y / 2)\r\n\r\n if guide.host.GetPosition() != targetPosition:\r\n guide.host.Move(targetPosition)\r\n \r\n guide.host.AeroMove(targetPosition)\r\n\r\n if guide.dock_direction == AUI_DOCK_CENTER:\r\n guide.host.ValidateNotebookDocking(paneInfo.IsNotebookDockable())\r\n\r\n guide.host.UpdateDockGuide(mousePos)\r\n \r\n paneInfo.window.Lower()", "def UpdateDockGuide(self, pos):\r\n\r\n inside = self.GetScreenRect().Contains(pos)\r\n \r\n if inside:\r\n image = self._bmp_focus\r\n else:\r\n image = self._bmp_unfocus\r\n\r\n if image != self._currentImage:\r\n self._currentImage = image\r\n self.Refresh()\r\n self.Update()", "def __init__(self, parent, rect, direction=0, center=False, useAero=False):\r\n\r\n wx.Window.__init__(self, parent, -1, rect.GetPosition(), rect.GetSize(), wx.NO_BORDER)\r\n\r\n self._direction = direction\r\n self._center = center\r\n self._valid = True\r\n self._useAero = useAero\r\n \r\n self._bmp_unfocus, self._bmp_focus = GetDockingImage(direction, useAero, center)\r\n \r\n self._currentImage = self._bmp_unfocus\r\n self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)\r\n \r\n self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)\r\n self.Bind(wx.EVT_PAINT, self.OnPaint)", "def _AuiDockingGuide_init(self, *args, **kwargs):\n\n if 'style' in kwargs:\n style = kwargs['style']\n\n # This is the default style, as defined\n # in the AuiDockingGuide constructor\n else:\n style = (wx.FRAME_TOOL_WINDOW |\n wx.FRAME_STAY_ON_TOP |\n wx.FRAME_NO_TASKBAR |\n wx.NO_BORDER)\n\n if fwidgets.inSSHSession():\n style &= ~wx.FRAME_TOOL_WINDOW\n\n kwargs['style'] = style\n\n _AuiDockingGuide_real_init(self, *args, **kwargs)", "def open_window(self,size):\n # Window\n self.root = Tk()\n self.root.geometry(size)\n self.root.resizable(0, 0)\n\n\n # Tree\n self.tree = ttk.Treeview(self.root, heigh=20)\n self.tree.grid(row=4, column=0, padx=20)\n self.tree.grid(columnspan=5)\n\n hsb = ttk.Scrollbar(self.root, orient=\"horizontal\")\n hsb.configure(command=self.tree.xview)\n self.tree.configure(xscrollcommand=hsb.set)\n hsb.grid(row=5, column=0, padx=20, pady=20, columnspan=5, sticky=(W + E))", "def init_layout(self):\n\t\tself.pack_start(self.edit, expand=True)\n\t\tself.pack_start(self.button, expand=False)\n\t\tself.show_all()", "def OnRender(self, event):\r\n\r\n # if the frame is about to be deleted, don't bother\r\n if not self._frame or self._frame.IsBeingDeleted():\r\n return\r\n \r\n if not self._frame.GetSizer():\r\n return\r\n\r\n mouse = wx.GetMouseState()\r\n mousePos = wx.Point(mouse.GetX(), mouse.GetY())\r\n point = self._frame.ScreenToClient(mousePos)\r\n art = self._art\r\n\r\n dc = event.GetDC()\r\n \r\n for part in self._uiparts:\r\n \r\n # don't draw hidden pane items or items that aren't windows\r\n if part.sizer_item and ((not part.sizer_item.IsWindow() and \\\r\n not part.sizer_item.IsSpacer() and \\\r\n not part.sizer_item.IsSizer()) or \\\r\n not part.sizer_item.IsShown()):\r\n \r\n continue\r\n \r\n ptype = part.type\r\n \r\n if ptype in [AuiDockUIPart.typeDockSizer, AuiDockUIPart.typePaneSizer]:\r\n art.DrawSash(dc, self._frame, part.orientation, part.rect)\r\n\r\n elif ptype == AuiDockUIPart.typeBackground:\r\n art.DrawBackground(dc, self._frame, part.orientation, part.rect)\r\n\r\n elif ptype == AuiDockUIPart.typeCaption:\r\n art.DrawCaption(dc, self._frame, part.pane.caption, part.rect, part.pane)\r\n\r\n elif ptype == AuiDockUIPart.typeGripper:\r\n art.DrawGripper(dc, self._frame, part.rect, part.pane)\r\n\r\n elif ptype == AuiDockUIPart.typePaneBorder:\r\n art.DrawBorder(dc, self._frame, part.rect, part.pane)\r\n\r\n elif ptype == AuiDockUIPart.typePaneButton: \r\n self.DrawPaneButton(dc, part, point)", "def create_widget(self):\n self.widget = wxDockPane(self.parent_widget())", "def build(self):\n self.title = 'Processamento Digital de Imagens'\n self.main_layout = MainLayout()\n return self.main_layout", "def init_shared_memory(self, mem_key):\n\n self.shm_input = ConqueSoleSharedMemory(CONQUE_SOLE_INPUT_SIZE, 'input', mem_key)\n self.shm_input.create('write')\n self.shm_input.clear()\n\n self.shm_output = ConqueSoleSharedMemory(CONQUE_SOLE_BUFFER_LENGTH * self.columns, 'output', mem_key, True)\n self.shm_output.create('write')\n\n if not CONQUE_FAST_MODE:\n self.shm_attributes = ConqueSoleSharedMemory(CONQUE_SOLE_BUFFER_LENGTH * self.columns, 'attributes', mem_key, True, encoding='latin-1')\n self.shm_attributes.create('write')\n\n self.shm_stats = ConqueSoleSharedMemory(CONQUE_SOLE_STATS_SIZE, 'stats', mem_key, serialize=True)\n self.shm_stats.create('write')\n self.shm_stats.clear()\n\n self.shm_command = ConqueSoleSharedMemory(CONQUE_SOLE_COMMANDS_SIZE, 'command', mem_key, serialize=True)\n self.shm_command.create('write')\n self.shm_command.clear()\n\n self.shm_resize = ConqueSoleSharedMemory(CONQUE_SOLE_RESIZE_SIZE, 'resize', mem_key, serialize=True)\n self.shm_resize.create('write')\n self.shm_resize.clear()\n\n self.shm_rescroll = ConqueSoleSharedMemory(CONQUE_SOLE_RESCROLL_SIZE, 'rescroll', mem_key, serialize=True)\n self.shm_rescroll.create('write')\n self.shm_rescroll.clear()\n\n return True", "def __init__(self, cam: DashboardCamera, window_shape=(80, 61), search_margin=200, max_frozen_dur=15):\n self.camera = cam\n\n # Create windows\n self.windows_left = []\n self.windows_right = []\n for level in range(cam.img_height // window_shape[0]):\n x_init_l = cam.img_width / 4\n x_init_r = cam.img_width / 4 * 3\n self.windows_left.append(Window(level, window_shape, cam.img_size, x_init_l, max_frozen_dur))\n self.windows_right.append(Window(level, window_shape, cam.img_size, x_init_r, max_frozen_dur))\n self.search_margin = search_margin\n\n # Initialize visuals\n VIZ_OPTIONS = ('dash_undistorted', 'overhead', 'lab_b', 'lab_b_binary', 'lightness', 'lightness_binary',\n 'value', 'value_binary', 'pixel_scores', 'windows_raw', 'windows_filtered', 'highlighted_lane',\n 'presentation')\n self.visuals = {name: None for name in VIZ_OPTIONS} # Storage location of visualization images\n self.__viz_desired = None # The visuals we want to save\n self.__viz_dependencies = {'windows_raw': ['pixel_scores'], # Dependencies of visuals on other visuals\n 'windows_filtered': ['pixel_scores'],\n 'presentation': ['highlighted_lane', 'overhead', 'windows_raw', 'windows_filtered',\n 'pixel_scores']}", "def layout(self):\n pass", "def __createWidgets(self):\n # Widget canvas, used to draw rubik's cube\n self.cv = Canvas(self.master)\n self.cv['bg'] = 'white' # Background color\n self.cv['height'] = '440' # Height of canvas\n self.cv['width'] = '560' # Width of canvas\n self.cv.place(x=0, y=0)\n self.__drawCube()", "def __init__(self, simulator, display, control=None, **kwargs):\n super(ZasimMainWindow, self).__init__(**kwargs)\n\n self.setAttribute(Qt.WA_DeleteOnClose)\n\n self.simulator = simulator\n self.display = display\n self.control = control\n\n central_widget = QWidget(self)\n\n if self.control is None:\n self.control = ControlWidget(self.simulator, parent=central_widget)\n\n layout = QVBoxLayout(central_widget)\n\n sim_name = QLabel(str(self.simulator), self)\n # make text selectable and links (if any) clickable\n sim_name.setTextInteractionFlags(Qt.TextBrowserInteraction)\n # there are some nasty long names if base gets bigger than 2.\n sim_name.setWordWrap(True)\n\n layout.addWidget(sim_name)\n\n scroller = QScrollArea()\n scroller.setWidget(self.display)\n\n layout.addWidget(scroller)\n layout.addWidget(self.control)\n self.control.setObjectName(\"control\")\n\n self.setCentralWidget(central_widget)\n\n self.setup_menu()\n\n self.elementary_tool = None\n #self.comp_dlg = None\n self.new_dlg = None\n\n self.resetter = ResetDocklet(self)\n self.addDockWidget(Qt.RightDockWidgetArea, self.resetter)", "def onDockClosed(self): # used when Dock dialog is closed\n self.profile_dock = None" ]
[ "0.666467", "0.61904645", "0.58657914", "0.57930666", "0.5759", "0.5710022", "0.5661305", "0.55973077", "0.55593544", "0.5494071", "0.5491788", "0.5429935", "0.54137725", "0.5384663", "0.5345779", "0.53450286", "0.53433484", "0.5328388", "0.5326785", "0.5324091", "0.53176713", "0.5303564", "0.53012633", "0.529001", "0.5265871", "0.51698196", "0.5167559", "0.5167559", "0.51538587", "0.515215", "0.5133198", "0.5114938", "0.5109718", "0.5103036", "0.5098631", "0.509334", "0.508161", "0.50757134", "0.5074396", "0.50673246", "0.5062235", "0.5049583", "0.5038958", "0.502752", "0.50195646", "0.5011353", "0.50103134", "0.5003905", "0.5003173", "0.5002678", "0.49973705", "0.49928105", "0.4991685", "0.49800637", "0.49782342", "0.49747443", "0.4968371", "0.49526003", "0.49512863", "0.49462938", "0.49425736", "0.49275064", "0.4923581", "0.49129015", "0.4911855", "0.4911855", "0.49082208", "0.49017504", "0.48985323", "0.48825952", "0.48825952", "0.48767686", "0.4857401", "0.48537874", "0.4853148", "0.48529556", "0.4851547", "0.48400944", "0.48392057", "0.48321754", "0.4828166", "0.48134252", "0.47926456", "0.47916257", "0.47837558", "0.47789687", "0.47755364", "0.47752044", "0.4774532", "0.47589493", "0.47579518", "0.47570595", "0.47493237", "0.4749006", "0.47420135", "0.47372466", "0.4732431", "0.47310498", "0.47234014", "0.47229436", "0.47219455" ]
0.0
-1
Form dock_pd. Based on dock_proc dock_meas dock_chi2 dock_refine_ls dock_peak
def action_pd(obj: Pd, thread: QtCore.QThread): w_actions = [] f_meas = obj.is_attribute("pd_meas") f_chi2 = obj.is_attribute("chi2") f_phase = obj.is_attribute("phase") l_pd_peak = [] if f_phase: phase = obj.phase for item in phase.items: try: pd_peak = getattr(obj, f"pd_peak_{item.label.lower():}") l_pd_peak.append(pd_peak) except AttributeError: pass f_setup = obj.is_attribute("setup") f_pd_instr_resolution = obj.is_attribute("pd_instr_resolution") f_pd_background = obj.is_attribute("pd_background") f_range = obj.is_attribute("range") if not(f_chi2 & f_meas & f_setup & f_pd_instr_resolution & f_phase & f_pd_background & f_range): if not f_chi2: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add chi2") qtb_1.clicked.connect(lambda: add_items(obj, [Chi2()], thread)) w_actions.append(qtb_1) if not f_meas: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add pd_meas") qtb_1.clicked.connect(lambda: add_items(obj, [PdMeasL()], thread)) w_actions.append(qtb_1) if not f_setup: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add setup") qtb_1.clicked.connect(lambda: add_items(obj, [Setup()], thread)) w_actions.append(qtb_1) if not f_pd_instr_resolution: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add pd_instr_resolution") qtb_1.clicked.connect(lambda: add_items(obj, [PdInstrResolution()], thread)) w_actions.append(qtb_1) if not f_phase: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add phase") vv = PhaseL() vv.items = [Phase(label="phase", igsize=0., scale=1.)] qtb_1.clicked.connect(lambda: add_items(obj, [vv], thread)) w_actions.append(qtb_1) if not f_pd_background: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add pd_background") qtb_1.clicked.connect(lambda: add_items(obj, [PdBackgroundL()], thread)) w_actions.append(qtb_1) if not f_range: qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add range") qtb_1.clicked.connect(lambda: add_items(obj, [Range( ttheta_min=2, ttheta_max=100.)], thread)) w_actions.append(qtb_1) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def CalculateDockSizerLimits(self, dock):\r\n\r\n docks, panes = CopyDocksAndPanes2(self._docks, self._panes)\r\n\r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n opposite_size = self.GetOppositeDockTotalSize(docks, dock.dock_direction)\r\n\r\n for tmpDock in docks:\r\n \r\n if tmpDock.dock_direction == dock.dock_direction and \\\r\n tmpDock.dock_layer == dock.dock_layer and \\\r\n tmpDock.dock_row == dock.dock_row:\r\n \r\n tmpDock.size = 1\r\n break\r\n \r\n sizer, panes, docks, uiparts = self.LayoutAll(panes, docks, [], True, False)\r\n client_size = self._frame.GetClientSize()\r\n sizer.SetDimension(0, 0, client_size.x, client_size.y)\r\n sizer.Layout()\r\n\r\n for part in uiparts:\r\n \r\n part.rect = wx.RectPS(part.sizer_item.GetPosition(), part.sizer_item.GetSize())\r\n if part.type == AuiDockUIPart.typeDock:\r\n part.dock.rect = part.rect\r\n \r\n sizer.Destroy()\r\n new_dock = None\r\n\r\n for tmpDock in docks:\r\n if tmpDock.dock_direction == dock.dock_direction and \\\r\n tmpDock.dock_layer == dock.dock_layer and \\\r\n tmpDock.dock_row == dock.dock_row:\r\n \r\n new_dock = tmpDock\r\n break\r\n \r\n partnerDock = self.GetPartnerDock(dock)\r\n\r\n if partnerDock:\r\n partnerRange = partnerDock.size - partnerDock.min_size\r\n if partnerDock.min_size == 0:\r\n partnerRange -= sash_size\r\n if dock.IsHorizontal():\r\n partnerRange -= caption_size\r\n \r\n direction = dock.dock_direction\r\n \r\n if direction == AUI_DOCK_LEFT:\r\n minPix = new_dock.rect.x + new_dock.rect.width\r\n maxPix = dock.rect.x + dock.rect.width\r\n maxPix += partnerRange\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n minPix = new_dock.rect.y + new_dock.rect.height\r\n maxPix = dock.rect.y + dock.rect.height\r\n maxPix += partnerRange\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n minPix = dock.rect.x - partnerRange - sash_size\r\n maxPix = new_dock.rect.x - sash_size\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n minPix = dock.rect.y - partnerRange - sash_size\r\n maxPix = new_dock.rect.y - sash_size\r\n\r\n return minPix, maxPix\r\n \r\n direction = new_dock.dock_direction\r\n \r\n if direction == AUI_DOCK_LEFT:\r\n minPix = new_dock.rect.x + new_dock.rect.width\r\n maxPix = client_size.x - opposite_size - sash_size\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n minPix = new_dock.rect.y + new_dock.rect.height\r\n maxPix = client_size.y - opposite_size - sash_size\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n minPix = opposite_size\r\n maxPix = new_dock.rect.x - sash_size\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n minPix = opposite_size\r\n maxPix = new_dock.rect.y - sash_size\r\n\r\n return minPix, maxPix", "def dock_complex(pose):\n #PyMOL observer assuming the initial call was already made prior to this line.\n AddPyMolObserver_to_energies(pose, True)\n # defining scoring functions (DNA + specific to structures of interests)\n fa_score = get_fa_scorefxn()\n dna_score = create_score_function('dna')\n dna_score.set_weight(fa_elec, 1)\n # movemap minimization / fast relax\n mm = MoveMap()\n mm.set_bb_true_range(\"enter beginning region\", \"enter ending region\")#min in this motif only\n relax = FastRelax()\n relax.set_scorefxn(scorefxn)\n relax.apply(pose)\n # defining specific complex docking protocol\n docking = DockMCMProtocol()\n docking.set_scorefxn(dna_score)\n docking.set_scorefxn_pack(fa_score)\n docking.set_partners(\"B_ACD\")\n # scoring pre and post docking\n dna_init = dna_score(pose)\n fa_init = fa_score(pose)\n # dockng occurs here\n docking.apply(pose)\n # scoring post docking.\n dna_final = dna_score(pose)\n fa_final = fa_score(pose)\n return [fa_init, fa_final, dna_init, dna_final]\n #raise Exception(\"Complex docking not implemented\")", "def dock(self, ligands, run_count=1, cpu=8,\n exhaustiveness=10, write_dir=os.getcwd()):\n\n long = '{0}{1}_{2}_out.csv'.format(\n write_dir, self.name, self.id)\n trimmed = '{0}{1}_{2}_trimmed.csv'.format(\n write_dir, self.name, self.id)\n short = '{0}{1}_{2}_summary.csv'.format(\n write_dir, self.name, self.id)\n if not os.path.exists(write_dir):\n os.makedirs(write_dir)\n os.chdir(write_dir)\n\n '''Long table format, all outputs'''\n if os.path.exists(long):\n long_df = pd.read_csv(long)\n else:\n long_df = pd.DataFrame(\n columns=['Date_time', 'Exhaustiveness', 'Run_number',\n 'Receptor', 'Ligand', 'Rank', 'Affinity',\n 'Dist_rmsd_l.b.', 'Dist_rmsd_u.b.', 'species',\n 'defaultLea', 'defaultLigand'\n ]\n )\n long_df.to_csv(long, index=False, mode='w')\n\n '''trimmed table is highest ranking binding from each dock iteration only'''\n if os.path.exists(trimmed):\n trim_df = pd.read_csv(trimmed)\n else:\n trim_df = long_df\n trim_df.to_csv(trimmed, index=False, mode='w')\n\n '''short table format with summary statistics of all ligand docks'''\n if os.path.exists(short):\n short_df = pd.read_csv(short, header=None,\n names=['', 'dGmean', 'dGsd', 'KDmean', 'KDsd'])\n short_df.to_csv(short, header=False, index=False, mode='w')\n else:\n short_df = pd.DataFrame(\n columns=['', 'dGmean', 'dGsd', 'KDmean', 'KDsd'],\n data=[['', self.id],\n ['type', self.type],\n ['species', self.species],\n ['lipids', ', '.join(self.lipid_patterns)],\n [],\n ['', 'dG mean', 'dG sd', 'KD mean', 'KD sd'],\n ['ref', np.nan, np.nan, np.nan, np.nan]],\n )\n\n for mol in ligands:\n\n '''if you need cores for other things while docks are running'''\n # cpu_lock = time.localtime(time.time()).tm_hour\n # if 21 > cpu_lock > 8:\n # cpu = 6\n\n '''vinaDock.py init and output long table'''\n dock_output = self.annotate_long(\n Docker(receptor=self.pdb,\n ligand=mol,\n log_path='{0}\\\\{1}_{2}_log.txt'.format(\n write_dir, self.id, mol.name),\n box=self.box, run_count=run_count,\n exhaustiveness=exhaustiveness,\n cpu=cpu\n ).run()\n )\n\n '''collecting output and doing math to write to short table'''\n long_df = long_df.append(dock_output)\n trim = dock_output.loc[dock_output['Rank'] == '1']\n trim_df = trim_df.append(trim)\n\n dg = np.mean(trim_df.loc[\n trim_df['Ligand'] == mol.name, 'Affinity'\n ].astype(float)), \\\n np.std(trim_df.loc[\n trim_df['Ligand'] == mol.name, 'Affinity'\n ].astype(float))\n kd = KD(dg[0]), \\\n KD(dg[0]) * (-dg[1] / dg[0])\n\n short_df.loc[6, 'dGmean'] = (\n np.mean(trim_df.loc[\n trim_df['defaultLigand'] == 1, 'Affinity'].astype(float))\n )\n short_df.loc[6, 'dGsd'] = (\n np.std(trim_df.loc[\n trim_df['defaultLigand'] == 1, 'Affinity'].astype(float))\n )\n short_df.loc[6, 'KDmean'] = KD(short_df.loc[6, 'dGmean'])\n short_df.loc[6, 'KDsd'] = short_df.loc[6, 'KDmean'] * \\\n -(short_df.loc[6, 'dGsd'] / short_df.loc[6, 'dGmean'])\n short_df = short_df.append(\n pd.DataFrame(\n columns=['', 'dGmean', 'dGsd', 'KDmean', 'KDsd'],\n data=[[mol.name, dg[0], dg[1], kd[0], kd[1]]]\n ), ignore_index=True\n )\n\n dock_output.to_csv(long, index=False, header=False, mode='a')\n trim.to_csv(trimmed, index=False, header=False, mode='a')\n short_df.to_csv(short, index=False, header=False, mode='w')\n\n return long_df, trim_df, short_df", "def GetDock(self):\n return self.dock", "def overviewCommand(self):\n plt.figure(11)\n plt.clf()\n ax = plt.subplot(211)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET'),\n color='r', label='FUOFFSET',\n linewidth=1, alpha=1) \n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='r', linewidth=3, alpha=0.5,\n label=self.DLtrack+'-PSP')\n plt.legend()\n plt.subplot(212, sharex=ax)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET')-\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='k', label='$\\Delta$',\n linewidth=1, alpha=1) \n \n signal = self.raw['OPDC'].data.field('FUOFFSET')\n plt.figure(12)\n plt.clf()\n ax2 = plt.subplot(111)\n Fs = 1e6/np.diff(self.raw['OPDC'].data.field('TIME')).mean()\n print Fs\n ax2.psd(signal[:50000], NFFT=5000, Fs=Fs, label='FUOFFSET',scale_by_freq=0)\n plt.legend()", "def test_vs_docking():\n vs = virtualscreening(n_cpu=-1)\n vs.load_ligands('sdf', os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'))\n vs.dock(engine='autodock_vina',\n protein=os.path.join(test_data_dir, 'data/dude/xiap/receptor_rdkit.pdb'),\n auto_ligand=os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'),\n exhaustiveness=1,\n seed=0)\n mols = list(vs.fetch())\n assert_equal(len(mols), 3)\n mol_data = mols[0].data\n assert_in('vina_affinity', mol_data)\n assert_in('vina_rmsd_lb', mol_data)\n assert_in('vina_rmsd_ub', mol_data)", "def dock(rec_outpath, reorder_outpath, init='dock_init'):\n init = eval(init)\n receptor = os.path.basename(rec_outpath).split('_')[0]\n dock_dir = os.path.join(init.data_dir, init.dock_folder) \n rec_path = os.path.join(init.data_dir, rec_outpath)\n reorder_path = os.path.join(init.data_dir, reorder_outpath)\n\n dock_name = os.path.basename(rec_path).replace('receptor','dock')\n out_path = os.path.join(dock_dir, receptor, dock_name)\n\n\n\n if not os.path.exists(os.path.dirname(out_path)):\n os.makedirs(os.path.dirname(out_path))\n\n kw = {\n 'receptor': rec_path,\n 'ligand': reorder_path,\n 'autobox_ligand':reorder_path,\n 'out':out_path\n }\n\n cmd = init._make_command(**kw)\n cl = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n cl.wait()\n\n return [[rec_outpath, reorder_outpath, os.path.join(init.dock_folder, receptor, dock_name)]]", "def dockControl(*args, allowedArea: Union[AnyStr, List[AnyStr], bool]=\"all\", annotation:\n Union[AnyStr, bool]=\"\", area: Union[AnyStr, bool]=\"\", backgroundColor:\n Union[List[float, float, float], bool]=None, closeCommand: Script=None,\n content: Union[AnyStr, bool]=\"\", defineTemplate: AnyStr=\"\", docTag:\n Union[AnyStr, bool]=\"\", dockStation: AnyStr=\"\", dragCallback: Script=None,\n dropCallback: Script=None, enable: bool=True, enableBackground: bool=True,\n enableKeyboardFocus: bool=True, enablePopupOption: bool=True, exists: bool=True,\n fixedHeight: bool=True, fixedWidth: bool=True, floatChangeCommand: Script=None,\n floating: bool=True, fullPathName: bool=True, height: Union[int, bool]=0,\n highlightColor: Union[List[float, float, float], bool]=None, isObscured:\n bool=True, label: Union[AnyStr, bool]=\"\", manage: bool=True, moveable:\n bool=True, noBackground: bool=True, numberOfPopupMenus: bool=True, parent:\n Union[AnyStr, bool]=\"\", popupMenuArray: bool=True, preventOverride: bool=True,\n r: bool=True, retain: bool=True, sizeable: bool=True, splitLayout: AnyStr=\"\",\n state: Union[AnyStr, bool]=\"\", statusBarMessage: AnyStr=\"\", useTemplate:\n AnyStr=\"\", visible: bool=True, visibleChangeCommand: Union[Script, bool]=None,\n width: Union[int, bool]=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def LayoutAll(self, panes, docks, uiparts, spacer_only=False, oncheck=True):\r\n \r\n container = wx.BoxSizer(wx.VERTICAL)\r\n\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n cli_size = self._frame.GetClientSize()\r\n \r\n # empty all docks out\r\n for dock in docks:\r\n dock.panes = []\r\n if dock.fixed:\r\n # always reset fixed docks' sizes, because\r\n # the contained windows may have been resized\r\n dock.size = 0\r\n \r\n dock_count = len(docks)\r\n \r\n # iterate through all known panes, filing each\r\n # of them into the appropriate dock. If the\r\n # pane does not exist in the dock, add it\r\n for p in panes:\r\n\r\n # don't layout hidden panes.\r\n if p.IsShown():\r\n \r\n # find any docks with the same dock direction, dock layer, and\r\n # dock row as the pane we are working on\r\n arr = FindDocks(docks, p.dock_direction, p.dock_layer, p.dock_row)\r\n\r\n if arr:\r\n dock = arr[0]\r\n\r\n else:\r\n # dock was not found, so we need to create a new one\r\n d = AuiDockInfo()\r\n d.dock_direction = p.dock_direction\r\n d.dock_layer = p.dock_layer\r\n d.dock_row = p.dock_row\r\n docks.append(d)\r\n dock = docks[-1]\r\n\r\n if p.HasFlag(p.needsRestore) and not p.HasFlag(p.wasMaximized):\r\n \r\n isHor = dock.IsHorizontal()\r\n sashSize = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n\r\n # get the sizes of any docks that might \r\n # overlap with our restored dock\r\n\r\n # make list of widths or heights from the size in the dock rects\r\n sizes = [d.rect[2:][isHor] for \\\r\n d in docks if d.IsOk() and \\\r\n (d.IsHorizontal() == isHor) and \\\r\n not d.toolbar and \\\r\n d.dock_direction != AUI_DOCK_CENTER]\r\n \r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n\r\n # set max size allowing for sashes and absolute minimum\r\n maxsize = frameRect[2:][isHor] - sum(sizes) - (len(sizes)*10) - (sashSize*len(sizes))\r\n dock.size = min(p.previousDockSize,maxsize)\r\n\r\n else:\r\n dock.size = 0\r\n\r\n if p.HasFlag(p.wasMaximized):\r\n self.MaximizePane(p, savesizes=False)\r\n p.SetFlag(p.wasMaximized, False)\r\n\r\n if p.HasFlag(p.needsRestore):\r\n if p.previousDockPos is not None:\r\n DoInsertPane(dock.panes, dock.dock_direction, dock.dock_layer, dock.dock_row, p.previousDockPos)\r\n p.dock_pos = p.previousDockPos\r\n p.previousDockPos = None\r\n p.SetFlag(p.needsRestore, False)\r\n\r\n if p.IsDocked():\r\n # remove the pane from any existing docks except this one\r\n docks = RemovePaneFromDocks(docks, p, dock)\r\n\r\n # pane needs to be added to the dock,\r\n # if it doesn't already exist \r\n if not FindPaneInDock(dock, p.window):\r\n dock.panes.append(p)\r\n else:\r\n # remove the pane from any existing docks\r\n docks = RemovePaneFromDocks(docks, p)\r\n \r\n # remove any empty docks\r\n docks = [dock for dock in docks if dock.panes]\r\n\r\n dock_count = len(docks)\r\n # configure the docks further\r\n for ii, dock in enumerate(docks):\r\n # sort the dock pane array by the pane's\r\n # dock position (dock_pos), in ascending order\r\n dock.panes.sort(PaneSortFunc)\r\n dock_pane_count = len(dock.panes)\r\n \r\n # for newly created docks, set up their initial size\r\n if dock.size == 0:\r\n size = 0\r\n for pane in dock.panes:\r\n pane_size = pane.best_size\r\n if pane_size == wx.Size(-1, -1):\r\n pane_size = pane.min_size\r\n if pane_size == wx.Size(-1, -1) and pane.window:\r\n pane_size = pane.window.GetSize()\r\n if dock.IsHorizontal():\r\n size = max(pane_size.y, size)\r\n else:\r\n size = max(pane_size.x, size)\r\n \r\n # add space for the border (two times), but only\r\n # if at least one pane inside the dock has a pane border\r\n for pane in dock.panes:\r\n if pane.HasBorder():\r\n size = size + pane_border_size*2\r\n break\r\n \r\n # if pane is on the top or bottom, add the caption height,\r\n # but only if at least one pane inside the dock has a caption\r\n if dock.IsHorizontal():\r\n for pane in dock.panes:\r\n if pane.HasCaption() and not pane.HasCaptionLeft():\r\n size = size + caption_size\r\n break\r\n else:\r\n for pane in dock.panes:\r\n if pane.HasCaptionLeft() and not pane.HasCaption():\r\n size = size + caption_size\r\n break\r\n \r\n # new dock's size may not be more than the dock constraint\r\n # parameter specifies. See SetDockSizeConstraint()\r\n max_dock_x_size = int(self._dock_constraint_x*float(cli_size.x))\r\n max_dock_y_size = int(self._dock_constraint_y*float(cli_size.y))\r\n if cli_size <= wx.Size(20, 20):\r\n max_dock_x_size = 10000\r\n max_dock_y_size = 10000\r\n\r\n if dock.IsHorizontal():\r\n size = min(size, max_dock_y_size)\r\n else:\r\n size = min(size, max_dock_x_size)\r\n\r\n # absolute minimum size for a dock is 10 pixels\r\n if size < 10:\r\n size = 10\r\n\r\n dock.size = size\r\n\r\n # determine the dock's minimum size\r\n plus_border = False\r\n plus_caption = False\r\n plus_caption_left = False\r\n dock_min_size = 0\r\n for pane in dock.panes:\r\n if pane.min_size != wx.Size(-1, -1):\r\n if pane.HasBorder():\r\n plus_border = True\r\n if pane.HasCaption():\r\n plus_caption = True\r\n if pane.HasCaptionLeft():\r\n plus_caption_left = True\r\n if dock.IsHorizontal():\r\n if pane.min_size.y > dock_min_size:\r\n dock_min_size = pane.min_size.y\r\n else:\r\n if pane.min_size.x > dock_min_size:\r\n dock_min_size = pane.min_size.x\r\n \r\n if plus_border:\r\n dock_min_size += pane_border_size*2\r\n if plus_caption and dock.IsHorizontal():\r\n dock_min_size += caption_size\r\n if plus_caption_left and dock.IsVertical():\r\n dock_min_size += caption_size\r\n \r\n dock.min_size = dock_min_size\r\n \r\n # if the pane's current size is less than it's\r\n # minimum, increase the dock's size to it's minimum\r\n if dock.size < dock.min_size:\r\n dock.size = dock.min_size\r\n\r\n # determine the dock's mode (fixed or proportional)\r\n # determine whether the dock has only toolbars\r\n action_pane_marked = False\r\n dock.fixed = True\r\n dock.toolbar = True\r\n for pane in dock.panes:\r\n if not pane.IsFixed():\r\n dock.fixed = False\r\n if not pane.IsToolbar():\r\n dock.toolbar = False\r\n if pane.HasFlag(AuiPaneInfo.optionDockFixed):\r\n dock.fixed = True\r\n if pane.HasFlag(AuiPaneInfo.actionPane):\r\n action_pane_marked = True\r\n\r\n # if the dock mode is proportional and not fixed-pixel,\r\n # reassign the dock_pos to the sequential 0, 1, 2, 3\r\n # e.g. remove gaps like 1, 2, 30, 500\r\n if not dock.fixed:\r\n for jj in xrange(dock_pane_count):\r\n pane = dock.panes[jj]\r\n pane.dock_pos = jj\r\n \r\n # if the dock mode is fixed, and none of the panes\r\n # are being moved right now, make sure the panes\r\n # do not overlap each other. If they do, we will\r\n # adjust the panes' positions\r\n if dock.fixed and not action_pane_marked:\r\n pane_positions, pane_sizes = self.GetPanePositionsAndSizes(dock)\r\n offset = 0\r\n for jj in xrange(dock_pane_count):\r\n pane = dock.panes[jj]\r\n pane.dock_pos = pane_positions[jj]\r\n amount = pane.dock_pos - offset\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n pane.dock_pos += -amount\r\n\r\n offset += pane_sizes[jj]\r\n dock.panes[jj] = pane\r\n\r\n if oncheck:\r\n self._docks[ii] = dock \r\n\r\n # shrink docks if needed \r\n## docks = self.SmartShrink(docks, AUI_DOCK_TOP)\r\n## docks = self.SmartShrink(docks, AUI_DOCK_LEFT)\r\n\r\n if oncheck:\r\n self._docks = docks\r\n \r\n # discover the maximum dock layer\r\n max_layer = 0\r\n dock_count = len(docks)\r\n \r\n for ii in xrange(dock_count):\r\n max_layer = max(max_layer, docks[ii].dock_layer)\r\n\r\n # clear out uiparts\r\n uiparts = []\r\n\r\n # create a bunch of box sizers,\r\n # from the innermost level outwards.\r\n cont = None\r\n middle = None\r\n\r\n if oncheck:\r\n docks = self._docks\r\n \r\n for layer in xrange(max_layer+1):\r\n # find any docks in this layer\r\n arr = FindDocks(docks, -1, layer, -1)\r\n # if there aren't any, skip to the next layer\r\n if not arr:\r\n continue\r\n\r\n old_cont = cont\r\n\r\n # create a container which will hold this layer's\r\n # docks (top, bottom, left, right)\r\n cont = wx.BoxSizer(wx.VERTICAL)\r\n\r\n # find any top docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_TOP, layer, -1)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(cont, row, uiparts, spacer_only)\r\n \r\n # fill out the middle layer (which consists\r\n # of left docks, content area and right docks)\r\n \r\n middle = wx.BoxSizer(wx.HORIZONTAL)\r\n\r\n # find any left docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_LEFT, layer, -1)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n # add content dock (or previous layer's sizer\r\n # to the middle\r\n if not old_cont:\r\n # find any center docks\r\n arr = FindDocks(docks, AUI_DOCK_CENTER, -1, -1)\r\n if arr:\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n elif not self._has_maximized:\r\n # there are no center docks, add a background area\r\n sizer_item = middle.Add((1, 1), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.pane = None\r\n part.dock = None\r\n part.button = None\r\n part.cont_sizer = middle\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n else:\r\n middle.Add(old_cont, 1, wx.EXPAND)\r\n \r\n # find any right docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_RIGHT, layer, -1, reverse=True)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n if len(middle.GetChildren()) > 0:\r\n cont.Add(middle, 1, wx.EXPAND)\r\n\r\n # find any bottom docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_BOTTOM, layer, -1, reverse=True)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(cont, row, uiparts, spacer_only)\r\n\r\n if not cont:\r\n # no sizer available, because there are no docks,\r\n # therefore we will create a simple background area\r\n cont = wx.BoxSizer(wx.VERTICAL)\r\n sizer_item = cont.Add((1, 1), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.pane = None\r\n part.dock = None\r\n part.button = None\r\n part.cont_sizer = middle\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if oncheck:\r\n self._uiparts = uiparts\r\n self._docks = docks\r\n\r\n container.Add(cont, 1, wx.EXPAND)\r\n\r\n if oncheck:\r\n return container\r\n else:\r\n return container, panes, docks, uiparts", "def config_pbc_md(self):\n\n self._config_md()\n self.title = \"PBC MD Simulation\"\n self.cntrl[\"cut\"] = 8.0\n self.cntrl[\"igb\"] = 0\n self.cntrl[\"iwrap\"] = 1\n self.cntrl[\"ntp\"] = 1\n self.cntrl[\"barostat\"] = 2", "def dock_simple(pose, dock_partners, foldtree):\n assert isinstance(dock_partners, str)\n # setup foldtree\n if foldtree is not None:\n assert isinstance(foldtree, str)\n setup_foldtree(pose, foldtree, Vector1([1]))\n # specify scoring functions\n fa_score = get_fa_scorefxn()\n dna_score = create_score_function('dna')\n dna_score.set_weight(fa_elec, 1)\n # specify docking protocol\n docking = DockMCMProtocol()\n docking.set_scorefxn(dna_score)\n docking.set_scorefxn_pack(fa_score)\n docking.set_partners(dock_partners)\n # obtain initial and final scores after docking\n dna_init = dna_score(pose)\n fa_init = fa_score(pose)\n docking.apply(pose)\n dna_final = dna_score(pose)\n fa_final = fa_score(pose)\n return [fa_init, fa_final, dna_init, dna_final]", "def docking_rdock(self, ligand_file, docking_file, docking_log_file):\n\n docking_prefix = '.'.join(docking_file.strip().split('.')[:-1])\n run_line = '%s' % self.docking_program\n run_line += ' -r %s' % self.dock_config_file\n run_line += ' -p dock.prm'\n run_line += ' -n %d' % self.exhaustiveness\n run_line += ' -i %s' % ligand_file\n run_line += ' -o %s' % docking_prefix\n\n# run_line2 = 'sdsort -n -fSCORE %s.sd' % (docking_prefix)\n run_line2 = 'sdsort -n -fSCORE.INTER %s.sd' % (docking_prefix)\n\n e = None\n try:\n result = subprocess.check_output(run_line.split(),\n stderr=subprocess.STDOUT,\n timeout=self.timeout_dock,\n universal_newlines=True)\n if self.output_save:\n fp = open(docking_log_file, 'w')\n fp.write(result)\n fp.close()\n\n result2 = subprocess.check_output(run_line2.split(),\n universal_newlines=True)\n fp = open(docking_file, 'w')\n fp.write(result2)\n fp.close()\n\n except Exception as e:\n return [99.999], e\n\n affinity_list = list()\n out_lines = result2.split('\\n')\n check_score = False\n for line in out_lines:\n if line[0:16] == '> <SCORE.INTER>':\n# if line[0:10] == '> <SCORE>':\n check_score = True\n continue\n if check_score is True:\n affinity = float(line)\n affinity_list += [affinity]\n check_score = False\n continue\n if len(affinity_list) == 0:\n e = 'WARNING: Could not find any conformations.'\n return [99.999], e\n return affinity_list, e", "def SetDockPos(self, source):\r\n \r\n self.dock_direction = source.dock_direction\r\n self.dock_layer = source.dock_layer\r\n self.dock_row = source.dock_row\r\n self.dock_pos = source.dock_pos\r\n self.dock_proportion = source.dock_proportion\r\n self.floating_pos = wx.Point(*source.floating_pos)\r\n self.floating_size = wx.Size(*source.floating_size)\r\n self.rect = wx.Rect(*source.rect)\r\n \r\n return self", "def GetDockPixelOffset(self, test):\r\n\r\n # the only way to accurately calculate the dock's\r\n # offset is to actually run a theoretical layout\r\n docks, panes = CopyDocksAndPanes2(self._docks, self._panes)\r\n panes.append(test)\r\n\r\n sizer, panes, docks, uiparts = self.LayoutAll(panes, docks, [], True, False)\r\n client_size = self._frame.GetClientSize()\r\n sizer.SetDimension(0, 0, client_size.x, client_size.y)\r\n sizer.Layout()\r\n\r\n for part in uiparts:\r\n pos = part.sizer_item.GetPosition()\r\n size = part.sizer_item.GetSize()\r\n part.rect = wx.RectPS(pos, size)\r\n if part.type == AuiDockUIPart.typeDock:\r\n part.dock.rect = part.rect\r\n\r\n sizer.Destroy()\r\n\r\n for dock in docks:\r\n if test.dock_direction == dock.dock_direction and \\\r\n test.dock_layer == dock.dock_layer and \\\r\n test.dock_row == dock.dock_row:\r\n \r\n if dock.IsVertical():\r\n return dock.rect.y\r\n else:\r\n return dock.rect.x\r\n \r\n return 0", "def GetPartnerDock(self, dock):\r\n\r\n for layer in xrange(dock.dock_layer, -1, -1):\r\n \r\n bestDock = None\r\n\r\n for tmpDock in self._docks:\r\n \r\n if tmpDock.dock_layer != layer:\r\n continue\r\n \r\n if tmpDock.dock_direction != dock.dock_direction:\r\n continue\r\n\r\n if tmpDock.dock_layer < dock.dock_layer:\r\n \r\n if not bestDock or tmpDock.dock_row < bestDock.dock_row:\r\n bestDock = tmpDock\r\n \r\n elif tmpDock.dock_row > dock.dock_row:\r\n \r\n if not bestDock or tmpDock.dock_row > bestDock.dock_row:\r\n bestDock = tmpDock\r\n \r\n if bestDock:\r\n return bestDock\r\n \r\n return None", "def SavePerspective(self):\r\n\r\n result = \"layout2|\"\r\n\r\n for pane in self._panes:\r\n result += self.SavePaneInfo(pane) + \"|\"\r\n \r\n for dock in self._docks:\r\n result = result + (\"dock_size(%d,%d,%d)=%d|\")%(dock.dock_direction,\r\n dock.dock_layer,\r\n dock.dock_row,\r\n dock.size)\r\n return result", "def test_plants_docking(self):\n self.workdir = prepare_work_dir(__rootpath__, create=True)\n settings['workdir'] = self.workdir\n settings['bindingsite_center'] = [7.79934, 9.49666, 3.39229]\n settings['exec_path'] = exec_path\n\n plants = PlantsDocking(**settings)\n self.assertTrue(plants.run(self.protein, self.ligand))\n\n outputfiles = glob.glob('{0}/_entry_00001_conf_*.mol2'.format(self.workdir))\n self.assertEqual(len(outputfiles), plants.config['cluster_structures'])\n self.assertEqual(len(outputfiles), len(plants.results()))", "def CopyDocksAndPanes2(src_docks, src_panes):\r\n \r\n dest_docks = []\r\n\r\n for ii in xrange(len(src_docks)):\r\n dest_docks.append(AuiDockInfo())\r\n dest_docks[ii].dock_direction = src_docks[ii].dock_direction\r\n dest_docks[ii].dock_layer = src_docks[ii].dock_layer\r\n dest_docks[ii].dock_row = src_docks[ii].dock_row\r\n dest_docks[ii].size = src_docks[ii].size\r\n dest_docks[ii].min_size = src_docks[ii].min_size\r\n dest_docks[ii].resizable = src_docks[ii].resizable\r\n dest_docks[ii].fixed = src_docks[ii].fixed\r\n dest_docks[ii].toolbar = src_docks[ii].toolbar\r\n dest_docks[ii].panes = src_docks[ii].panes\r\n dest_docks[ii].rect = wx.Rect(*src_docks[ii].rect)\r\n\r\n dest_panes = []\r\n\r\n for ii in xrange(len(src_panes)):\r\n dest_panes.append(AuiPaneInfo())\r\n dest_panes[ii].name = src_panes[ii].name\r\n dest_panes[ii].caption = src_panes[ii].caption\r\n dest_panes[ii].window = src_panes[ii].window\r\n dest_panes[ii].frame = src_panes[ii].frame\r\n dest_panes[ii].state = src_panes[ii].state\r\n dest_panes[ii].dock_direction = src_panes[ii].dock_direction\r\n dest_panes[ii].dock_layer = src_panes[ii].dock_layer\r\n dest_panes[ii].dock_row = src_panes[ii].dock_row\r\n dest_panes[ii].dock_pos = src_panes[ii].dock_pos\r\n dest_panes[ii].best_size = wx.Size(*src_panes[ii].best_size)\r\n dest_panes[ii].min_size = wx.Size(*src_panes[ii].min_size)\r\n dest_panes[ii].max_size = wx.Size(*src_panes[ii].max_size)\r\n dest_panes[ii].floating_pos = wx.Point(*src_panes[ii].floating_pos)\r\n dest_panes[ii].floating_size = wx.Size(*src_panes[ii].floating_size)\r\n dest_panes[ii].dock_proportion = src_panes[ii].dock_proportion\r\n dest_panes[ii].buttons = src_panes[ii].buttons\r\n dest_panes[ii].rect = wx.Rect(*src_panes[ii].rect)\r\n dest_panes[ii].icon = src_panes[ii].icon\r\n dest_panes[ii].notebook_id = src_panes[ii].notebook_id\r\n dest_panes[ii].transparent = src_panes[ii].transparent\r\n dest_panes[ii].snapped = src_panes[ii].snapped\r\n dest_panes[ii].minimize_mode = src_panes[ii].minimize_mode\r\n\r\n for ii in xrange(len(dest_docks)):\r\n dock = dest_docks[ii]\r\n for jj in xrange(len(dock.panes)):\r\n for kk in xrange(len(src_panes)):\r\n if dock.panes[jj] == src_panes[kk]:\r\n dock.panes[jj] = dest_panes[kk]\r\n\r\n dest_docks[ii] = dock\r\n \r\n return dest_docks, dest_panes", "def __init__(self, structure_id, pdb_file, amb_file, flex1_file, flex2_file, root_dir=None):\n\n super(DOCK, self).__init__(id=structure_id, description='DOCK6 preparation')\n self._root_dir = None\n self.structure_path = pdb_file\n\n if root_dir:\n self.root_dir = root_dir\n else:\n self.root_dir = self.structure_dir\n\n self.dockprep_path = None\n self.receptormol2_path = None\n self.receptorpdb_path = None\n self.dms_path = None\n self.sphgen_path = None\n self.bindingsite_path = None\n self.sphsel_path = None\n self.box_path = None\n self.grid_path = None\n\n self.dock_flexible_outfile = None\n self.dock_flexible_scored_result = None\n self.dock_flexible_conformers_result = None\n\n self.amb_file = amb_file\n self.flex1_file = flex1_file\n self.flex2_file = flex2_file\n\n log.debug('{}: created DOCK6 project folder at {}'.format(structure_id, self.dock_dir))", "def UpdateDockingGuides(self, paneInfo):\r\n\r\n if len(self._guides) == 0:\r\n self.CreateGuideWindows()\r\n\r\n captionSize = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n mousePos = wx.GetMousePosition()\r\n\r\n for indx, guide in enumerate(self._guides):\r\n \r\n pt = wx.Point()\r\n guide_size = guide.host.GetSize()\r\n if not guide.host:\r\n raise Exception(\"Invalid docking host\")\r\n\r\n direction = guide.dock_direction\r\n\r\n if direction == AUI_DOCK_LEFT:\r\n pt.x = frameRect.x + guide_size.x / 2 + 16\r\n pt.y = frameRect.y + frameRect.height / 2\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n pt.x = frameRect.x + frameRect.width / 2\r\n pt.y = frameRect.y + guide_size.y / 2 + 16\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n pt.x = frameRect.x + frameRect.width - guide_size.x / 2 - 16\r\n pt.y = frameRect.y + frameRect.height / 2\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n pt.x = frameRect.x + frameRect.width / 2\r\n pt.y = frameRect.y + frameRect.height - guide_size.y / 2 - 16\r\n\r\n elif direction == AUI_DOCK_CENTER:\r\n rc = paneInfo.window.GetScreenRect()\r\n pt.x = rc.x + rc.width / 2\r\n pt.y = rc.y + rc.height / 2\r\n if paneInfo.HasCaption():\r\n pt.y -= captionSize / 2\r\n elif paneInfo.HasCaptionLeft():\r\n pt.x -= captionSize / 2\r\n\r\n # guide will be centered around point 'pt'\r\n targetPosition = wx.Point(pt.x - guide_size.x / 2, pt.y - guide_size.y / 2)\r\n\r\n if guide.host.GetPosition() != targetPosition:\r\n guide.host.Move(targetPosition)\r\n \r\n guide.host.AeroMove(targetPosition)\r\n\r\n if guide.dock_direction == AUI_DOCK_CENTER:\r\n guide.host.ValidateNotebookDocking(paneInfo.IsNotebookDockable())\r\n\r\n guide.host.UpdateDockGuide(mousePos)\r\n \r\n paneInfo.window.Lower()", "def Dock(self):\r\n\r\n if self.IsNotebookPage():\r\n self.notebook_id = -1\r\n self.dock_direction = AUI_DOCK_NONE\r\n \r\n return self.SetFlag(self.optionFloating, False)", "def port1_docked_time(self, port1_docked_time):\n\n self._port1_docked_time = port1_docked_time", "def port1_docked_time(self):\n return self._port1_docked_time", "def port1_docking_date(self, port1_docking_date):\n\n self._port1_docking_date = port1_docking_date", "def makeDPartial( name\n , config\n , DecayDescriptor\n , inputSel\n ) :\n\n _Kcuts1 = \"~ISMUON & (PT > %(DaugPtLoose)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2Loose)s)\" % locals()['config']\n _KcutsPIDK = \" & (PIDK > %(HighPIDK)s)\" % locals()['config']\n _Kcuts2 = \" & (ISLONG) & (P > %(DaugPLoose)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2Loose)s)\" % locals()['config']\n _Kcuts = _Kcuts1 + _KcutsPIDK + _Kcuts2\n _Picuts1 = \"~ISMUON & (PT > %(DaugPtMin)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2)s)\" % locals()['config']\n _PicutsPIDK = \" & (PIDK < %(LowPIDK)s)\" % locals()['config']\n _Picuts2 = \" & (ISLONG) & (P > %(DaugP)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2)s)\" % locals()['config']\n _Picuts = _Picuts1 + _PicutsPIDK + _Picuts2\n _dauCuts = { 'K+': _Kcuts, 'pi+': _Picuts }\n #_Kcuts1 = \"~ISMUON & (PT > 500* MeV) & (MIPCHI2DV(PRIMARY) > 4)\"\n #_KcutsPIDK = \" & (PIDK > 5)\"\n #_Kcuts2 = \" & (ISLONG) & (P > 5000* MeV) & (TRCHI2DOF < 5)\"\n #_Kcuts = _Kcuts1 + _KcutsPIDK + _Kcuts2\n #_Picuts1 = \"~ISMUON & (PT > 500* MeV) & (MIPCHI2DV(PRIMARY) > 4)\"\n #_PicutsPIDK = \" & (PIDK < 0)\"\n #_Picuts2 = \" & (ISLONG) & (P > 5000* MeV) & (TRCHI2DOF < 5)\"\n #_Picuts = _Picuts1 + _PicutsPIDK + _Picuts2\n #_dauCuts = { 'K+': _Kcuts, 'pi+': _Picuts }\n\n _combCuts = \"(APT > %(D0PtLoose)s* MeV)\" \\\n \"& (AP > %(D0P)s* MeV)\" % locals()['config']\n\n _motherCuts = \"(VFASPF(VCHI2PDOF) < %(D0VtxChi2Ndof)s)\" \\\n \"& (BPVVDCHI2 > %(D0FDChi2)s)\" % locals()['config']\n\n\n _Dminus = CombineParticles( DecayDescriptor = DecayDescriptor\n , DaughtersCuts = _dauCuts\n , CombinationCut = _combCuts\n , MotherCut = _motherCuts\n )\n\n return Selection( name+'Sel',\n Algorithm = _Dminus,\n RequiredSelections = inputSel\n )", "def LayoutAddDock(self, cont, dock, uiparts, spacer_only):\r\n \r\n sizer_item = wx.SizerItem()\r\n part = AuiDockUIPart()\r\n\r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n orientation = (dock.IsHorizontal() and [wx.HORIZONTAL] or [wx.VERTICAL])[0]\r\n\r\n # resizable bottom and right docks have a sash before them\r\n if not self._has_maximized and not dock.fixed and \\\r\n dock.dock_direction in [AUI_DOCK_BOTTOM, AUI_DOCK_RIGHT]:\r\n \r\n sizer_item = cont.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n\r\n part.type = AuiDockUIPart.typeDockSizer\r\n part.orientation = orientation\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n # create the sizer for the dock\r\n dock_sizer = wx.BoxSizer(orientation)\r\n\r\n # add each pane to the dock\r\n has_maximized_pane = False\r\n pane_count = len(dock.panes)\r\n\r\n if dock.fixed:\r\n \r\n # figure out the real pane positions we will\r\n # use, without modifying the each pane's pane_pos member\r\n pane_positions, pane_sizes = self.GetPanePositionsAndSizes(dock)\r\n\r\n offset = 0\r\n for pane_i in xrange(pane_count):\r\n \r\n pane = dock.panes[pane_i]\r\n pane_pos = pane_positions[pane_i]\r\n\r\n if pane.IsMaximized():\r\n has_maximized_pane = True\r\n\r\n amount = pane_pos - offset\r\n if amount > 0:\r\n \r\n if dock.IsVertical():\r\n sizer_item = dock_sizer.Add((1, amount), 0, wx.EXPAND)\r\n else:\r\n sizer_item = dock_sizer.Add((amount, 1), 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = (orientation==wx.HORIZONTAL and \\\r\n [wx.VERTICAL] or [wx.HORIZONTAL])[0]\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n offset = offset + amount\r\n \r\n uiparts = self.LayoutAddPane(dock_sizer, dock, pane, uiparts, spacer_only)\r\n\r\n offset = offset + pane_sizes[pane_i]\r\n \r\n # at the end add a very small stretchable background area\r\n sizer_item = dock_sizer.Add((0, 0), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n else:\r\n \r\n for pane_i in xrange(pane_count):\r\n \r\n pane = dock.panes[pane_i]\r\n\r\n if pane.IsMaximized():\r\n has_maximized_pane = True\r\n\r\n # if this is not the first pane being added,\r\n # we need to add a pane sizer\r\n if not self._has_maximized and pane_i > 0:\r\n sizer_item = dock_sizer.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typePaneSizer\r\n part.dock = dock\r\n part.pane = dock.panes[pane_i-1]\r\n part.button = None\r\n part.orientation = (orientation==wx.HORIZONTAL and \\\r\n [wx.VERTICAL] or [wx.HORIZONTAL])[0]\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n uiparts = self.LayoutAddPane(dock_sizer, dock, pane, uiparts, spacer_only)\r\n \r\n if dock.dock_direction == AUI_DOCK_CENTER or has_maximized_pane:\r\n sizer_item = cont.Add(dock_sizer, 1, wx.EXPAND)\r\n else:\r\n sizer_item = cont.Add(dock_sizer, 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeDock\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if dock.IsHorizontal():\r\n cont.SetItemMinSize(dock_sizer, (0, dock.size))\r\n else:\r\n cont.SetItemMinSize(dock_sizer, (dock.size, 0))\r\n\r\n # top and left docks have a sash after them\r\n if not self._has_maximized and not dock.fixed and \\\r\n dock.dock_direction in [AUI_DOCK_TOP, AUI_DOCK_LEFT]:\r\n \r\n sizer_item = cont.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeDockSizer\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n return uiparts", "def port1_docking_date(self):\n return self._port1_docking_date", "def __init__(self, **kwargs):\n\n # mod_path = os.path.join(os.path.dirname(a.__file__), 'BindingPMF.py')\n # print \"\"\"###########\n # # AlGDock #\n # ###########\n # Molecular docking with adaptively scaled alchemical interaction grids\n #\n # in {0}\n # last modified {1}\n # \"\"\".format(mod_path, time.ctime(os.path.getmtime(mod_path)))\n\n from AlGDock.argument_parser import SimulationArguments\n self.args = SimulationArguments(**kwargs)\n\n from AlGDock.simulation_data import SimulationData\n self.data = {}\n self.data['BC'] = SimulationData(self.args.dir['BC'], 'BC', \\\n self.args.params['CD']['pose'])\n self.data['CD'] = SimulationData(self.args.dir['CD'], 'CD', \\\n self.args.params['CD']['pose'])\n\n if not 'max_time' in kwargs.keys():\n kwargs['max_time'] = None\n if not 'run_type' in kwargs.keys():\n kwargs['run_type'] = None\n\n from AlGDock.logger import Logger\n self.log = Logger(self.args, \\\n max_time=kwargs['max_time'], run_type=kwargs['run_type'])\n\n self.T_HIGH = self.args.params['BC']['T_HIGH']\n self.T_TARGET = self.args.params['BC']['T_TARGET']\n\n self._setup()\n\n print '\\n*** Simulation parameters and constants ***'\n for p in ['BC', 'CD']:\n print '\\nfor %s:' % p\n print dictionary_tools.dict_view(self.args.params[p])[:-1]\n\n self.run(kwargs['run_type'])", "def get_bestdockingscore(self):\r\n\r\n if not \"lc\" in self.cPoses.__dict__:\r\n print \"Load first sdf with poses\"\r\n return \r\n\r\n if \"dscores\" in self.__dict__:\r\n return self.dscores\r\n\r\n dscores = {}\r\n for dchem in self.cPoses.lc:\r\n # case where protein is included, case of XP docking\r\n if not \"r_i_docking_score\" in dchem.keys():\r\n continue\r\n\r\n chemblID = dchem[\"s_m_entry_name\"].split(\".\")[0]\r\n #print chemblID\r\n\r\n if not chemblID in dscores.keys():\r\n dscores[chemblID] = {}\r\n dscores[chemblID][\"count\"] = 1\r\n else:\r\n dscores[chemblID][\"count\"] = dscores[chemblID][\"count\"] + 1\r\n\r\n if not \"r_i_docking_score\" in dscores[chemblID].keys():\r\n dscores[chemblID][\"r_i_docking_score\"] = float(dchem[\"r_i_docking_score\"])\r\n dscores[chemblID][\"r_i_glide_emodel\"] = float(dchem[\"r_i_glide_emodel\"])\r\n else:\r\n if float(dchem[\"r_i_docking_score\"]) < dscores[chemblID][\"r_i_docking_score\"]:\r\n dscores[chemblID][\"r_i_docking_score\"] = float(chemblID[\"r_i_docking_score\"])\r\n dscores[chemblID][\"r_i_glide_emodel\"] = float(chemblID[\"r_i_glide_emodel\"])\r\n\r\n self.dscores = dscores\r\n\r\n # write\r\n pfilout = self.pr_out + \"score_poses.txt\"\r\n filout = open(pfilout, \"w\")\r\n filout.write(\"Chemicals\\tNb poses\\tGlide score\\temodel score\\n\")\r\n for chemblID in dscores.keys():\r\n filout.write(\"%s\\t%s\\t%s\\t%s\\n\"%(chemblID, dscores[chemblID][\"count\"], dscores[chemblID][\"r_i_docking_score\"], dscores[chemblID][\"r_i_glide_emodel\"]))\r\n filout.close()", "def init_layout(self):\n super(WxDockPane, self).init_layout()\n self.widget.SetDockWidget(self.dock_widget())", "def GetPanePositionsAndSizes(self, dock):\r\n \r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n gripper_size = self._art.GetMetric(AUI_DOCKART_GRIPPER_SIZE)\r\n\r\n positions = []\r\n sizes = []\r\n\r\n action_pane = -1\r\n pane_count = len(dock.panes)\r\n\r\n # find the pane marked as our action pane\r\n for pane_i in xrange(pane_count):\r\n pane = dock.panes[pane_i]\r\n if pane.HasFlag(AuiPaneInfo.actionPane):\r\n if action_pane != -1:\r\n raise Exception(\"Too many action panes!\")\r\n action_pane = pane_i\r\n \r\n # set up each panes default position, and\r\n # determine the size (width or height, depending\r\n # on the dock's orientation) of each pane\r\n for pane in dock.panes:\r\n positions.append(pane.dock_pos)\r\n size = 0\r\n \r\n if pane.HasBorder():\r\n size += pane_border_size*2\r\n \r\n if dock.IsHorizontal():\r\n if pane.HasGripper() and not pane.HasGripperTop():\r\n size += gripper_size\r\n\r\n if pane.HasCaptionLeft():\r\n size += caption_size\r\n \r\n size += pane.best_size.x\r\n \r\n else:\r\n if pane.HasGripper() and pane.HasGripperTop():\r\n size += gripper_size\r\n\r\n if pane.HasCaption() and not pane.HasCaptionLeft():\r\n size += caption_size\r\n \r\n size += pane.best_size.y\r\n \r\n sizes.append(size)\r\n\r\n # if there is no action pane, just return the default\r\n # positions (as specified in pane.pane_pos)\r\n if action_pane == -1:\r\n return positions, sizes\r\n\r\n offset = 0\r\n for pane_i in xrange(action_pane-1, -1, -1):\r\n amount = positions[pane_i+1] - (positions[pane_i] + sizes[pane_i])\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n positions[pane_i] -= -amount\r\n\r\n offset += sizes[pane_i]\r\n \r\n # if the dock mode is fixed, make sure none of the panes\r\n # overlap we will bump panes that overlap\r\n offset = 0\r\n for pane_i in xrange(action_pane, pane_count):\r\n amount = positions[pane_i] - offset\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n positions[pane_i] += -amount\r\n\r\n offset += sizes[pane_i]\r\n\r\n return positions, sizes", "def get(self, request):\n# self.context[\"form\"] = AddDockParamsForm()\n# self.context[\"data\"] = { \"sets\": [\n# {\"tape_id\":\"1\", \"tape_len\":1, \"a_aft\":1, \"a_fore\":1, \"f_rate\":1, \"f_qty\":1},\n# {\"tape_id\":\"2\", \"tape_len\":2, \"a_aft\":2, \"a_fore\":2, \"f_rate\":2, \"f_qty\":2},\n# {\"tape_id\":\"3\", \"tape_len\":3, \"a_aft\":3, \"a_fore\":3, \"f_rate\":3, \"f_qty\":3},\n# ] }\n# return render(request, \"dbkeeper/add_dock_params.html\", self.context)\n form = AddDockParamsForm()\n form.setFixedFields()\n try:\n dockParams = Setting.getDockParams()\n form.setFields(len(dockParams[\"sets\"]))\n# form.setData(dockParams)\n self.context[\"data\"] = form.setData(dockParams)\n except:\n self.context[\"data\"] = { \"numRows\": 0 }\n self.context[\"form\"] = form\n return render(request, \"dbkeeper/add_dock_params.html\", self.context)", "def _build_ppdf(self,pdf_dset,renormalize):\n\n if (not hasattr(self,'u')) or (not hasattr(self,'w')) or (not hasattr(self,'sfr')):\n raise AttributeError(\"axes are not set. Call set_axes() first\")\n\n dbinsq = self.dlogcs*self.dlogvout\n\n # Momentum flux PDF\n etaM = pdf_dset['etaM'] # in Msun/kpc^2/yr\n etap = self._etap(self.sfr) # in (Msun*km/s)/kpc^2/yr\n pdf_dset['etap'] = etap\n\n pfact = (self.vout**2+self.cs**2)/(self.vp*self.vout)\n ppdfc = etaM/etap*pdf_dset['Mpdf-cool']*pfact\n ppdfh = etaM/etap*pdf_dset['Mpdf-hot']*pfact\n ppdf = ppdfc + ppdfh\n\n if renormalize:\n renorm = ppdf.sum(dim=['logcs','logvout'])*dbinsq\n ppdfc = ppdfc/renorm\n ppdfh = ppdfh/renorm\n ppdf = ppdf/renorm\n pdf_dset['p_renorm'] = renorm\n\n pdf_dset['ppdf-cool'] = ppdfc\n pdf_dset['ppdf-hot'] = ppdfh\n pdf_dset['etap-cool'] = pdf_dset['etap']*ppdfc.sum(dim=['logcs','logvout'])*dbinsq\n pdf_dset['etap-hot'] = pdf_dset['etap']*ppdfh.sum(dim=['logcs','logvout'])*dbinsq\n pdf_dset['ppdf'] = ppdf", "def getRigBuildData(self):\n\n data = super(SimpleControlComponentGuide, self).getRigBuildData()\n\n data[\"ctrlSize\"] = self.ctrlSizeInputAttr.getValue()\n data[\"ctrlXfo\"] = self.mainCtrl.xfo\n\n return data", "def __init__(self, parent=None, args=[], macros=None):\n super(PyDMChartingDisplay, self).__init__(parent=parent, args=args, macros=macros)\n\n self.channel_map = dict()\n self.setWindowTitle(\"PyDM Charting Tool\")\n\n self.main_layout = QVBoxLayout()\n self.body_layout = QVBoxLayout()\n\n self.pv_layout = QHBoxLayout()\n self.pv_name_line_edt = QLineEdit()\n self.pv_name_line_edt.setAcceptDrops(True)\n self.pv_name_line_edt.installEventFilter(self)\n\n self.pv_protocol_cmb = QComboBox()\n self.pv_protocol_cmb.addItems([\"ca://\", \"archive://\"])\n\n self.pv_connect_push_btn = QPushButton(\"Connect\")\n self.pv_connect_push_btn.clicked.connect(self.add_curve)\n\n self.tab_panel = QTabWidget()\n self.tab_panel.setMaximumWidth(450)\n self.curve_settings_tab = QWidget()\n self.chart_settings_tab = QWidget()\n\n self.charting_layout = QHBoxLayout()\n self.chart = PyDMTimePlot(plot_by_timestamps=False, plot_display=self)\n self.chart.setPlotTitle(\"Time Plot\")\n\n self.splitter = QSplitter()\n\n self.curve_settings_layout = QVBoxLayout()\n self.curve_settings_layout.setAlignment(Qt.AlignTop)\n self.curve_settings_layout.setSizeConstraint(QLayout.SetMinAndMaxSize)\n self.curve_settings_layout.setSpacing(5)\n\n self.crosshair_settings_layout = QVBoxLayout()\n self.crosshair_settings_layout.setAlignment(Qt.AlignTop)\n self.crosshair_settings_layout.setSpacing(5)\n\n self.enable_crosshair_chk = QCheckBox(\"Enable Crosshair\")\n self.cross_hair_coord_lbl = QLabel()\n\n self.curve_settings_inner_frame = QFrame()\n self.curve_settings_inner_frame.setLayout(self.curve_settings_layout)\n\n self.curve_settings_scroll = QScrollArea()\n self.curve_settings_scroll.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)\n self.curve_settings_scroll.setWidget(self.curve_settings_inner_frame)\n\n self.curves_tab_layout = QHBoxLayout()\n self.curves_tab_layout.addWidget(self.curve_settings_scroll)\n\n self.enable_crosshair_chk.setChecked(False)\n self.enable_crosshair_chk.clicked.connect(self.handle_enable_crosshair_checkbox_clicked)\n self.enable_crosshair_chk.clicked.emit(False)\n\n self.chart_settings_layout = QVBoxLayout()\n self.chart_settings_layout.setAlignment(Qt.AlignTop)\n\n self.chart_layout = QVBoxLayout()\n self.chart_panel = QWidget()\n\n self.chart_control_layout = QHBoxLayout()\n self.chart_control_layout.setAlignment(Qt.AlignHCenter)\n self.chart_control_layout.setSpacing(10)\n\n self.view_all_btn = QPushButton(\"View All\")\n self.view_all_btn.clicked.connect(self.handle_view_all_button_clicked)\n self.view_all_btn.setEnabled(False)\n\n self.auto_scale_btn = QPushButton(\"Auto Scale\")\n self.auto_scale_btn.clicked.connect(self.handle_auto_scale_btn_clicked)\n self.auto_scale_btn.setEnabled(False)\n\n self.reset_chart_btn = QPushButton(\"Reset\")\n self.reset_chart_btn.clicked.connect(self.handle_reset_chart_btn_clicked)\n self.reset_chart_btn.setEnabled(False)\n\n self.resume_chart_text = \"Resume\"\n self.pause_chart_text = \"Pause\"\n self.pause_chart_btn = QPushButton(self.pause_chart_text)\n self.pause_chart_btn.clicked.connect(self.handle_pause_chart_btn_clicked)\n\n self.title_settings_layout = QVBoxLayout()\n self.title_settings_layout.setSpacing(10)\n\n self.title_settings_grpbx = QGroupBox()\n self.title_settings_grpbx.setFixedHeight(150)\n\n self.import_data_btn = QPushButton(\"Import Data...\")\n self.import_data_btn.clicked.connect(self.handle_import_data_btn_clicked)\n\n self.export_data_btn = QPushButton(\"Export Data...\")\n self.export_data_btn.clicked.connect(self.handle_export_data_btn_clicked)\n\n self.chart_title_lbl = QLabel(text=\"Chart Title\")\n self.chart_title_line_edt = QLineEdit()\n self.chart_title_line_edt.setText(self.chart.getPlotTitle())\n self.chart_title_line_edt.textChanged.connect(self.handle_title_text_changed)\n\n self.chart_change_axis_settings_btn = QPushButton(text=\"Change Axis Settings...\")\n self.chart_change_axis_settings_btn.clicked.connect(self.handle_change_axis_settings_clicked)\n\n self.update_datetime_timer = QTimer(self)\n self.update_datetime_timer.timeout.connect(self.handle_update_datetime_timer_timeout)\n\n self.chart_sync_mode_layout = QVBoxLayout()\n self.chart_sync_mode_layout.setSpacing(5)\n\n self.chart_sync_mode_grpbx = QGroupBox(\"Data Sampling Mode\")\n self.chart_sync_mode_grpbx.setFixedHeight(80)\n\n self.chart_sync_mode_sync_radio = QRadioButton(\"Synchronous\")\n self.chart_sync_mode_async_radio = QRadioButton(\"Asynchronous\")\n self.chart_sync_mode_async_radio.setChecked(True)\n\n self.graph_drawing_settings_layout = QVBoxLayout()\n\n self.chart_redraw_rate_lbl = QLabel(\"Redraw Rate (Hz)\")\n self.chart_redraw_rate_spin = QSpinBox()\n self.chart_redraw_rate_spin.setRange(MIN_REDRAW_RATE_HZ, MAX_REDRAW_RATE_HZ)\n self.chart_redraw_rate_spin.setValue(DEFAULT_REDRAW_RATE_HZ)\n self.chart_redraw_rate_spin.valueChanged.connect(self.handle_redraw_rate_changed)\n\n self.chart_data_sampling_rate_lbl = QLabel(\"Asynchronous Data Sampling Rate (Hz)\")\n self.chart_data_async_sampling_rate_spin = QSpinBox()\n self.chart_data_async_sampling_rate_spin.setRange(MIN_DATA_SAMPLING_RATE_HZ, MAX_DATA_SAMPLING_RATE_HZ)\n self.chart_data_async_sampling_rate_spin.setValue(DEFAULT_DATA_SAMPLING_RATE_HZ)\n self.chart_data_async_sampling_rate_spin.valueChanged.connect(self.handle_data_sampling_rate_changed)\n self.chart_data_sampling_rate_lbl.hide()\n self.chart_data_async_sampling_rate_spin.hide()\n\n self.chart_limit_time_span_layout = QHBoxLayout()\n self.chart_limit_time_span_layout.setSpacing(5)\n\n self.limit_time_plan_text = \"Limit Time Span\"\n self.chart_limit_time_span_chk = QCheckBox(self.limit_time_plan_text)\n self.chart_limit_time_span_chk.hide()\n self.chart_limit_time_span_lbl = QLabel(\"Hours : Minutes : Seconds\")\n self.chart_limit_time_span_hours_line_edt = QLineEdit()\n self.chart_limit_time_span_minutes_line_edt = QLineEdit()\n self.chart_limit_time_span_seconds_line_edt = QLineEdit()\n self.chart_limit_time_span_activate_btn = QPushButton(\"Apply\")\n self.chart_limit_time_span_activate_btn.setDisabled(True)\n\n self.chart_ring_buffer_size_lbl = QLabel(\"Ring Buffer Size\")\n self.chart_ring_buffer_size_edt = QLineEdit()\n self.chart_ring_buffer_size_edt.installEventFilter(self)\n self.chart_ring_buffer_size_edt.textChanged.connect(self.handle_buffer_size_changed)\n self.chart_ring_buffer_size_edt.setText(str(DEFAULT_BUFFER_SIZE))\n\n self.show_legend_chk = QCheckBox(\"Show Legend\")\n self.show_legend_chk.setChecked(self.chart.showLegend)\n self.show_legend_chk.clicked.connect(self.handle_show_legend_checkbox_clicked)\n\n self.graph_background_color_layout = QFormLayout()\n\n self.background_color_lbl = QLabel(\"Graph Background Color \")\n self.background_color_btn = QPushButton()\n self.background_color_btn.setStyleSheet(\"background-color: \" + self.chart.getBackgroundColor().name())\n self.background_color_btn.setContentsMargins(10, 0, 5, 5)\n self.background_color_btn.setMaximumWidth(20)\n self.background_color_btn.clicked.connect(self.handle_background_color_button_clicked)\n\n self.axis_settings_layout = QVBoxLayout()\n self.axis_settings_layout.setSpacing(5)\n\n self.show_x_grid_chk = QCheckBox(\"Show x Grid\")\n self.show_x_grid_chk.setChecked(self.chart.showXGrid)\n self.show_x_grid_chk.clicked.connect(self.handle_show_x_grid_checkbox_clicked)\n\n self.show_y_grid_chk = QCheckBox(\"Show y Grid\")\n self.show_y_grid_chk.setChecked(self.chart.showYGrid)\n self.show_y_grid_chk.clicked.connect(self.handle_show_y_grid_checkbox_clicked)\n\n self.axis_color_lbl = QLabel(\"Axis and Grid Color\")\n self.axis_color_lbl.setEnabled(False)\n\n self.axis_color_btn = QPushButton()\n self.axis_color_btn.setStyleSheet(\"background-color: \" + DEFAULT_CHART_AXIS_COLOR.name())\n self.axis_color_btn.setContentsMargins(10, 0, 5, 5)\n self.axis_color_btn.setMaximumWidth(20)\n self.axis_color_btn.clicked.connect(self.handle_axis_color_button_clicked)\n self.axis_color_btn.setEnabled(False)\n\n self.grid_opacity_lbl = QLabel(\"Grid Opacity\")\n self.grid_opacity_lbl.setEnabled(False)\n\n self.grid_opacity_slr = QSlider(Qt.Horizontal)\n self.grid_opacity_slr.setFocusPolicy(Qt.StrongFocus)\n self.grid_opacity_slr.setRange(0, 10)\n self.grid_opacity_slr.setValue(5)\n self.grid_opacity_slr.setTickInterval(1)\n self.grid_opacity_slr.setSingleStep(1)\n self.grid_opacity_slr.setTickPosition(QSlider.TicksBelow)\n self.grid_opacity_slr.valueChanged.connect(self.handle_grid_opacity_slider_mouse_release)\n self.grid_opacity_slr.setEnabled(False)\n\n self.reset_chart_settings_btn = QPushButton(\"Reset Chart Settings\")\n self.reset_chart_settings_btn.clicked.connect(self.handle_reset_chart_settings_btn_clicked)\n\n self.curve_checkbox_panel = QWidget()\n\n self.graph_drawing_settings_grpbx = QGroupBox()\n self.graph_drawing_settings_grpbx.setFixedHeight(270)\n\n self.axis_settings_grpbx = QGroupBox()\n self.axis_settings_grpbx.setFixedHeight(180)\n\n self.app = QApplication.instance()\n self.setup_ui()\n\n self.curve_settings_disp = None\n self.axis_settings_disp = None\n self.chart_data_export_disp = None\n self.chart_data_import_disp = None\n self.grid_alpha = 5\n self.time_span_limit_hours = None\n self.time_span_limit_minutes = None\n self.time_span_limit_seconds = None\n self.data_sampling_mode = ASYNC_DATA_SAMPLING", "def LoadPerspective(self, layout, update=True):\r\n\r\n input = layout\r\n\r\n # check layout string version\r\n # 'layout1' = wxAUI 0.9.0 - wxAUI 0.9.2\r\n # 'layout2' = wxAUI 0.9.2 (wxWidgets 2.8)\r\n index = input.find(\"|\")\r\n part = input[0:index].strip()\r\n input = input[index+1:]\r\n \r\n if part != \"layout2\":\r\n return False\r\n\r\n # mark all panes currently managed as docked and hidden\r\n for pane in self._panes:\r\n pane.Dock().Hide()\r\n\r\n # clear out the dock array; this will be reconstructed\r\n self._docks = []\r\n\r\n # replace escaped characters so we can\r\n # split up the string easily\r\n input = input.replace(\"\\\\|\", \"\\a\")\r\n input = input.replace(\"\\\\;\", \"\\b\")\r\n\r\n while 1:\r\n\r\n pane = AuiPaneInfo()\r\n index = input.find(\"|\")\r\n pane_part = input[0:index].strip()\r\n input = input[index+1:]\r\n\r\n # if the string is empty, we're done parsing\r\n if pane_part == \"\":\r\n break\r\n\r\n if pane_part[0:9] == \"dock_size\":\r\n index = pane_part.find(\"=\")\r\n val_name = pane_part[0:index]\r\n value = pane_part[index+1:]\r\n\r\n index = val_name.find(\"(\")\r\n piece = val_name[index+1:]\r\n index = piece.find(\")\")\r\n piece = piece[0:index]\r\n\r\n vals = piece.split(\",\")\r\n dir = int(vals[0])\r\n layer = int(vals[1])\r\n row = int(vals[2])\r\n size = int(value)\r\n \r\n dock = AuiDockInfo()\r\n dock.dock_direction = dir\r\n dock.dock_layer = layer\r\n dock.dock_row = row\r\n dock.size = size\r\n self._docks.append(dock)\r\n \r\n continue\r\n\r\n # Undo our escaping as LoadPaneInfo needs to take an unescaped\r\n # name so it can be called by external callers\r\n pane_part = pane_part.replace(\"\\a\", \"|\")\r\n pane_part = pane_part.replace(\"\\b\", \";\")\r\n\r\n pane = self.LoadPaneInfo(pane_part, pane)\r\n\r\n p = self.GetPane(pane.name)\r\n \r\n if not p.IsOk():\r\n if pane.IsNotebookControl():\r\n # notebook controls - auto add...\r\n self._panes.append(pane)\r\n indx = self._panes.index(pane)\r\n else:\r\n # the pane window couldn't be found\r\n # in the existing layout -- skip it\r\n continue\r\n\r\n else:\r\n indx = self._panes.index(p)\r\n pane.window = p.window\r\n pane.frame = p.frame\r\n pane.buttons = p.buttons\r\n self._panes[indx] = pane\r\n\r\n if isinstance(pane.window, auibar.AuiToolBar) and (pane.IsFloatable() or pane.IsDockable()):\r\n pane.window.SetGripperVisible(True)\r\n \r\n if update:\r\n self.Update()\r\n\r\n return True", "def create_dockable(self, dockable_name, widget):\n pass", "def GetDockWidget(self):\n return self._dock_widget", "def UpdateDockGuide(self, pos):\r\n\r\n self.target.UpdateDockGuide(pos)", "def __init__(self, other=None):\r\n\r\n if other:\r\n self.Assign(other)\r\n else:\r\n # window representing the docking target\r\n self.host = None\r\n # dock direction (top, bottom, left, right, center)\r\n self.dock_direction = AUI_DOCK_NONE", "def OnDocked(self, event):\n self._floating = False\n self._dock_area = event.GetPane().dock_direction\n wx.PostEvent(self, wxDockPaneDockedEvent())", "def create(self, verbose=False):\r\n # delete the window if its handle exists\r\n if cmds.window(self.window, exists=True):\r\n cmds.deleteUI(self.window)\r\n # initialize the window as a pane for docking\r\n self.window = cmds.loadUI(uiFile=self.uiFile, verbose=verbose)\r\n #layoutWin = cmds.paneLayout(configuration='single')\r\n # create a dockControl and parent the control to layoutWin\r\n cmds.dockControl(allowedArea='all', area='right', floating=False, \r\n height=cmds.window(self.window, query=True, height=True), \r\n content=self.window, label='Docked Cone Pointer Window')\r\n cmds.showWindow(self.window)", "def list():\n return [Dock.OMNI, Dock.LEFT, Dock.RIGHT]", "def createDockArea(self):\n self.centralDock = CentralDockArea(self.globalSession)\n self.setCentralWidget(self.centralDock)", "def __init__(self, df_flow, x1='x', x2='y', x3_value=None,resolution=100,x1_center=0.0,x2_center=0.0, D=None, invert_x1=False,\n crop_x1 = None, crop_x2=None):\n\n # Assign the axis names\n self.x1_name = x1\n self.x2_name = x2\n self.x3_name = [x3 for x3 in ['x','y','z'] if x3 not in [x1,x2]][0]\n\n # Find the nearest value in 3rd dimension\n search_values = np.array(sorted(df_flow[self.x3_name].unique()))\n nearest_idx = (np.abs(search_values-x3_value)).argmin()\n nearest_value = search_values[nearest_idx]\n print('Nearest value to in %s of %.2f is %.2f' % (self.x3_name, x3_value,nearest_value))\n \n # Get a sub-frame of only this 3rd dimension value\n df_sub = df_flow[df_flow[self.x3_name]==nearest_value]\n\n # Make sure cropping is valid\n if crop_x1:\n if crop_x1[0] < min(df_sub[x1]):\n raise Exception(\"Invalid x_1 minimum on cropping\")\n if crop_x1[1] > max(df_sub[x1]):\n raise Exception(\"Invalid x_1 maximum on cropping\")\n\n if crop_x2:\n if crop_x2[0] < min(df_sub[x2]):\n raise Exception(\"Invalid x_2 minimum on cropping\")\n if crop_x2[1] > max(df_sub[x2]):\n raise Exception(\"Invalid x_2 maximum on cropping\")\n\n # If cropping x1 do it now\n # if crop_x1:\n # df_sub = df_sub[(df_sub[x1] >= crop_x1[0]) & (df_sub[x1] <= crop_x1[1])]\n # if crop_x2:\n # df_sub = df_sub[(df_sub[x2] >= crop_x2[0]) & (df_sub[x2] <= crop_x2[1])]\n\n # Store the relevent values\n self.x1_in = df_sub[x1]\n self.x2_in = df_sub[x2]\n self.u_in = df_sub['u']\n self.v_in = df_sub['v']\n self.w_in = df_sub['w']\n\n # Save the desired resolution\n self.res = resolution\n\n # Grid the data, if cropping available use that\n if crop_x1:\n # self.x1_lin = np.linspace(min(self.x1_in), max(self.x1_in), resolution)\n self.x1_lin = np.linspace(crop_x1[0], crop_x1[1], resolution)\n else:\n self.x1_lin = np.linspace(min(self.x1_in), max(self.x1_in), resolution)\n if crop_x2:\n # self.x2_lin = np.linspace(min(self.x2_in), max(self.x2_in), resolution)\n self.x2_lin = np.linspace(crop_x2[0], crop_x2[1], resolution)\n else:\n self.x2_lin = np.linspace(min(self.x2_in), max(self.x2_in), resolution)\n \n # Mesh and interpolate u, v and w\n # print(self.x1_lin)\n # print(sorted(self.x1_in))\n self.x1_mesh, self.x2_mesh = np.meshgrid(self.x1_lin, self.x2_lin)\n self.u_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.u_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n self.v_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.v_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n self.w_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.w_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n \n # Save flat vectors\n self.x1_flat = self.x1_mesh.flatten()\n self.x2_flat = self.x2_mesh.flatten()\n\n # Save u-cubed\n self.u_cubed = self.u_mesh ** 3\n\n\n # Save re-centing points for visualization\n self.x1_center = x1_center\n self.x2_center = x2_center\n\n\n # If inverting, invert x1, and x1_center\n if invert_x1:\n self.x1_mesh = self.x1_mesh * -1\n self.x1_lin = self.x1_lin * -1\n self.x1_flat = self.x1_flat * -1 \n self.x1_center = self.x1_center * -1 \n self.v_mesh =self.v_mesh * -1\n\n\n # Set the diamater which will be used in visualization\n # Annalysis in D or meters?\n if D == None:\n self.plot_in_D = False\n self.D = 1.\n else:\n self.plot_in_D = True\n self.D = D", "def GetDockArea(self):\n return self._dock_area", "def dock_dx_dy(block1, dock1n, block2, dock2n):\n _dock1 = block1.docks[dock1n]\n _dock2 = block2.docks[dock2n]\n _d1type, _d1dir, _d1x, _d1y = _dock1[0:4]\n _d2type, _d2dir, _d2x, _d2y = _dock2[0:4]\n if block1 == block2:\n return (100, 100)\n if _d1dir == _d2dir:\n return (100, 100)\n if (_d2type is not 'number') or (dock2n is not 0):\n if block1.connections is not None and \\\n dock1n < len(block1.connections) and \\\n block1.connections[dock1n] is not None:\n return (100, 100)\n if block2.connections is not None and \\\n dock2n < len(block2.connections) and \\\n block2.connections[dock2n] is not None:\n return (100, 100)\n if _d1type != _d2type:\n if block1.name in STRING_OR_NUMBER_ARGS:\n if _d2type == 'number' or _d2type == 'string':\n pass\n elif block1.name in CONTENT_ARGS:\n if _d2type in CONTENT_BLOCKS:\n pass\n else:\n return (100, 100)\n (_b1x, _b1y) = block1.spr.get_xy()\n (_b2x, _b2y) = block2.spr.get_xy()\n return ((_b1x + _d1x) - (_b2x + _d2x), (_b1y + _d1y) - (_b2y + _d2y))", "def port2_docked_time(self):\n return self._port2_docked_time", "def createCfg_prep_dcard(self, jobOptions):\n category_output = self.channel\n if jobOptions['label']:\n category_output += \"_%s\" % jobOptions['label']\n lines = []\n lines.append(\"process.fwliteInput.fileNames = cms.vstring('%s')\" % jobOptions['inputFile'])\n lines.append(\"process.fwliteOutput.fileName = cms.string('%s')\" % jobOptions['datacardFile'])\n lines.append(\"process.prepareDatacards.processesToCopy = cms.vstring(%s)\" % self.prep_dcard_processesToCopy)\n lines.append(\"process.prepareDatacards.signals = cms.vstring(%s)\" % self.prep_dcard_signals)\n lines.append(\"process.prepareDatacards.makeSubDir = cms.bool(True)\")\n lines.append(\"process.prepareDatacards.categories = cms.VPSet(\")\n for charge in [\"OS\", \"SS\"]:\n for ptEtaBin in [\n \"BB_LL\", \"BB_ML\", \"BB_MM\", \"BB_HL\", \"BB_HM\", \"BB_HH\",\n \"EE_LL\", \"EE_ML\", \"EE_MM\", \"EE_HL\", \"EE_HM\", \"EE_HH\",\n \"BE_LL\", \"BE_ML\", \"EB_ML\",\"BE_MM\", \"BE_HL\", \"EB_HL\",\n \"BE_HM\", \"EB_HM\", \"BE_HH\", \"total\",\n ]:\n lines.append(\" cms.PSet(\")\n lines.append(\" input = cms.string('%s/%s'),\" % (charge, ptEtaBin))\n lines.append(\" output = cms.string('ttH_%s_%s_%s')\" % (self.channel, charge, ptEtaBin))\n lines.append(\" ),\")\n lines.append(\")\")\n lines.append(\"process.prepareDatacards.histogramToFit = cms.string('%s')\" % jobOptions['histogramToFit'])\n lines.append(\"process.prepareDatacards.sysShifts = cms.vstring(%s)\" % systematics.muon_E)\n create_cfg(self.cfgFile_prep_dcard, jobOptions['cfgFile_modified'], lines)", "def WindingDesign(main):\n oEditor = main['ANSYS']['oEditor']\n\n # Slots number\n Slots = main['ANSYS']['FixedVariables']['Slots']\n\n # SlotType\n SlotType = main['ANSYS']['FixedVariables']['SlotType']\n\n # Geimetric parameters\n g = main['ANSYS']['DesignProperties']['Stator']['g']\n\n Hs0 = main['ANSYS']['DesignProperties']['Slot']['Hs0']\n Hs1 = main['ANSYS']['DesignProperties']['Slot']['Hs1']\n Hs2 = main['ANSYS']['DesignProperties']['Slot']['Hs2']\n Bs1 = main['ANSYS']['DesignProperties']['Slot']['Bs1']\n Bs2 = main['ANSYS']['DesignProperties']['Slot']['Bs2']\n\n DiaGap = main['ANSYS']['DesignProperties']['Rotor']['DiaGap']\n\n # Coils Arrange ABC\n PhasesABC = main['ANSYS']['Winding']['ABC']\n\n # Color used for phases\n Color = main['ANSYS']['Winding']['Color']\n\n oEditor.CreateUserDefinedPart(\n [\n \"NAME:UserDefinedPrimitiveParameters\",\n \"DllName:=\"\t\t, \"RMxprt/LapCoil.dll\",\n \"Version:=\"\t\t, \"16.0\",\n \"NoOfParameters:=\"\t, 22,\n \"Library:=\"\t\t, \"syslib\",\n [\n \"NAME:ParamVector\",\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"DiaGap\",\n \"Value:=\"\t\t, \"DiaGap+g*2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"DiaYoke\",\n \"Value:=\"\t\t, \"DiaYoke\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Length\",\n \"Value:=\"\t\t, \"0mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Skew\",\n \"Value:=\"\t\t, \"0deg\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Slots\",\n \"Value:=\"\t\t, str(int(Slots))\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"SlotType\",\n \"Value:=\"\t\t, str(int(SlotType))\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Hs0\",\n \"Value:=\"\t\t, \"Hs0\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Hs1\",\n \"Value:=\"\t\t, \"Hs1\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Hs2\",\n \"Value:=\"\t\t, \"Hs2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Bs0\",\n \"Value:=\"\t\t, \"Bs0\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Bs1\",\n \"Value:=\"\t\t, \"Bs1\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Bs2\",\n \"Value:=\"\t\t, \"Bs2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Rs\",\n \"Value:=\"\t\t, \"Rs\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"FilletType\",\n \"Value:=\"\t\t, \"0\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Layers\",\n \"Value:=\"\t\t, \"2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"CoilPitch\",\n \"Value:=\"\t\t, \"1\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"EndExt\",\n \"Value:=\"\t\t, \"5mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"SpanExt\",\n \"Value:=\"\t\t, \"25mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"BendAngle\",\n \"Value:=\"\t\t, \"0deg\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"SegAngle\",\n \"Value:=\"\t\t, \"10deg\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"LenRegion\",\n \"Value:=\"\t\t, \"200mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"InfoCoil\",\n \"Value:=\"\t\t, \"0\"\n ]\n ]\n ],\n [\n \"NAME:Attributes\",\n \"Name:=\"\t\t, \"LapCoil1\",\n \"Flags:=\"\t\t, \"\",\n \"Color:=\"\t\t, \"(143 175 143)\",\n \"Transparency:=\"\t, 0,\n \"PartCoordinateSystem:=\", \"Global\",\n \"UDMId:=\"\t\t, \"\",\n \"MaterialValue:=\"\t, \"\\\"copper\\\"\",\n \"SurfaceMaterialValue:=\", \"\\\"\\\"\",\n \"SolveInside:=\"\t\t, True,\n \"ShellElement:=\"\t, False,\n \"ShellElementThickness:=\", \"0mm\",\n \"IsMaterialEditable:=\"\t, True,\n \"UseMaterialAppearance:=\", False,\n \"IsLightweight:=\"\t, False\n ]\n )\n\n # Body Separation\n oEditor.SeparateBody(\n [\n \"NAME:Selections\",\n \"Selections:=\"\t\t, \"LapCoil1\",\n \"NewPartsModelFlag:=\"\t, \"Model\"\n ],\n [\n \"CreateGroupsForNewObjects:=\", False\n ]\n )\n\n # Average Slot Width\n AverWidth = (Bs2 + Bs1)/2\n\n # Average Radius\n AverRadius = DiaGap/2 + g + Hs0 + Hs1 + Hs2*0.75\n\n # Angle to shift and find the kth tooth\n ShiftSlot = 1/Slots*np.pi\n\n # Angle to fond the corrent layer\n ShiftLayer = np.arctan(AverWidth/4/AverRadius)\n\n # List to save the coils sides names\n WindingNames = [[], [], []]\n\n # Phases name to employed\n PhaseNames = ['A', 'B', 'C']\n\n for phase, row in enumerate(PhasesABC):\n\n PhaseName = [[], []]\n\n for coil, slot in enumerate(row):\n\n SlotAngle = np.abs(slot)/Slots*2*np.pi - ShiftSlot\n\n if coil % 2 == 1:\n SlotAngle = SlotAngle - ShiftLayer\n\n else:\n SlotAngle = SlotAngle + ShiftLayer\n\n x = np.cos(SlotAngle)*AverRadius\n y = np.sin(SlotAngle)*AverRadius\n\n Name0 = oEditor.GetBodyNamesByPosition(\n [\n \"NAME:Parameters\",\n \"XPosition:=\", str(x)+\"mm\",\n \"YPosition:=\", str(y)+\"mm\",\n \"ZPosition:=\", \"0mm\"\n ]\n )\n\n C = Color[phase]\n\n if np.sign(slot) == 1:\n\n CoilSideName = PhaseNames[phase]+\"In\"+str(np.abs(coil))\n\n PhaseName[0] += [CoilSideName]\n\n oEditor.ChangeProperty(\n [\n \"NAME:AllTabs\",\n [\n \"NAME:Geometry3DAttributeTab\",\n [\n \"NAME:PropServers\",\n Name0[0]\n ],\n [\n \"NAME:ChangedProps\",\n [\n \"NAME:Name\",\n \"Value:=\"\t\t,\n CoilSideName\n ],\n [\n \"NAME:Color\",\n \"R:=\"\t\t\t, C[0],\n \"G:=\"\t\t\t, C[1],\n \"B:=\"\t\t\t, C[2]\n ],\n\n ]\n ]\n ]\n )\n else:\n\n CoilSideName = PhaseNames[phase]+\"Out\"+str(np.abs(coil))\n\n PhaseName[1] += [CoilSideName]\n\n oEditor.ChangeProperty(\n [\n \"NAME:AllTabs\",\n [\n \"NAME:Geometry3DAttributeTab\",\n [\n \"NAME:PropServers\",\n Name0[0]\n ],\n [\n \"NAME:ChangedProps\",\n [\n \"NAME:Name\",\n \"Value:=\"\t\t,\n CoilSideName\n ],\n [\n \"NAME:Color\",\n \"R:=\"\t\t\t, C[0],\n \"G:=\"\t\t\t, C[1],\n \"B:=\"\t\t\t, C[2],\n ],\n\n ]\n ]\n ]\n )\n\n WindingNames[phase] += PhaseName\n\n main['ANSYS']['Winding']['CoilNames'] = WindingNames\n\n return main", "def dop_comp(field) :\n dop = fits.open(field+'/'+field+'_rv.fits')\n r13 = apload.ApLoad(apred='r13')\n old = r13.apField(field)\n\n i1,i2 = match.match(dop[1].data['APOGEE_ID'],old[1].data['APOGEE_ID'])\n print(len(dop[1].data),len(old[1].data),len(i1))\n\n fig,ax=plots.multi(1,1)\n plots.plotc(ax,dop[1].data['RV_TEFF'][i1],dop[1].data['VHELIO_AVG'][i1]-old[1].data['VHELIO_AVG'][i2],dop[1].data['VSCATTER'][i1])\n\n j=np.argsort(np.abs(dop[1].data['VHELIO_AVG'][i1]-old[1].data['VHELIO_AVG'][i2],dop[1].data['VSCATTER'][i1]))\n\n plots._data = dop[1].data\n plots._id_cols=['APOGEE_ID']\n plots.event(fig)\n key=' '\n sf,sax=plots.multi(1,2,sharex=True,hspace=0.001)\n while key != 'e' :\n x,y,key,index = plots.mark(fig,index=True)\n obj = dop[1].data['APOGEE_ID'][i1[index]]\n #jv = np.where(dop[2].data['APOGEE_ID'] == dop[1].data['APOGEE_ID'][i1])[0]\n out=pickle.load(open(field+'/'+obj+'_out.pkl','rb'))\n print(obj,old[1].data['APOGEE_ID'][i2[index]])\n print(out[0])\n sax[0].cla()\n spec=old[2].data['SPEC'][i2[index]]\n plots.plotl(sax[0],old[3].data['WAVE'][0,:],spec/convolve(spec,np.ones(500)/500,mode='same'),xr=[15000,17000],yr=[0.5,1.5])\n for mod,obs in zip(out[2],out[3]) :\n sax[1].cla()\n for chip in range(3) :\n plots.plotl(sax[1],obs.wave[:,chip],obs.flux[:,chip],color='k',yr=[0.5,1.5])\n gd = np.where(obs.mask[:,chip] == False)[0]\n plots.plotl(sax[1],obs.wave[gd,chip],obs.flux[gd,chip],color='g')\n plots.plotl(sax[1],mod.wave[:,chip],mod.flux[:,chip],color='r')\n plt.draw()\n input('hit a key: ')", "def dock(self,\n protein_file,\n ligand_file,\n centroid=None,\n box_dims=None,\n dry_run=False):\n protein_docked, ligand_docked = self.pose_generator.generate_poses(\n protein_file, ligand_file, centroid, box_dims, dry_run)\n if not dry_run:\n score = self.pose_scorer.score(protein_docked, ligand_docked)\n else:\n score = np.zeros((1,))\n return (score, (protein_docked, ligand_docked))", "def dock(self):\n if not self.dockingClient.gh or not self.dockingClient.get_state() in (GoalStatus.SUCCEEDED, GoalStatus.PENDING, GoalStatus.ACTIVE):\n self.dockingClient.send_goal(AutoDockingGoal()) #TODO test if parameter is required\n rospy.loginfo(self.name + \": docking\") \n if self.dockingClient.get_state() == GoalStatus.SUCCEEDED: \n self.dockingClient.stop_tracking_goal()\n rospy.loginfo(self.name + \": docking succeeded\")\n self.docked = True \n return True\n return False", "def config_gb_md(self):\n\n self._config_md()\n self.title = \"GB MD Simulation\"\n self.cntrl[\"cut\"] = 999.0\n self.cntrl[\"igb\"] = 1\n self.cntrl[\"ntp\"] = 0\n self.cntrl[\"barostat\"] = 0", "def GetTotalPixSizeAndProportion(self, dock):\r\n\r\n totalPixsize = 0\r\n totalProportion = 0\r\n\r\n # determine the total proportion of all resizable panes,\r\n # and the total size of the dock minus the size of all\r\n # the fixed panes\r\n for tmpPane in dock.panes:\r\n \r\n if tmpPane.IsFixed():\r\n continue\r\n\r\n totalProportion += tmpPane.dock_proportion\r\n\r\n if dock.IsHorizontal():\r\n totalPixsize += tmpPane.rect.width\r\n else:\r\n totalPixsize += tmpPane.rect.height\r\n\r\n## if tmpPane.min_size.IsFullySpecified():\r\n## \r\n## if dock.IsHorizontal():\r\n## totalPixsize -= tmpPane.min_size.x\r\n## else:\r\n## totalPixsize -= tmpPane.min_size.y\r\n \r\n return totalPixsize, totalProportion", "def docked_time(self):\n return self._docked_time", "def setup(self):\r\n # productive\r\n profprint()\r\n #-----------------------------------------------------------------------------\r\n # Needle Finder Logic\r\n logic = self.logic\r\n\r\n #Report Frame########################################\r\n self.__reportFrame = ctk.ctkCollapsibleButton()\r\n self.__reportFrame.text = \"Segmentation Report\"\r\n self.__reportFrame.collapsed = 1\r\n reportFrame = qt.QFormLayout(self.__reportFrame)\r\n\r\n # segmentation report\r\n self.analysisGroupBox = qt.QGroupBox()\r\n self.analysisGroupBox.setFixedHeight(330)\r\n self.analysisGroupBox.setTitle('Segmentation Report')\r\n reportFrame.addRow(self.analysisGroupBox)\r\n self.analysisGroupBoxLayout = qt.QFormLayout(self.analysisGroupBox)\r\n\r\n #-----------------------------------------------------------------------------\r\n\r\n #Report Frame Control Point########################################\r\n self.__reportFrameCTL = ctk.ctkCollapsibleButton()\r\n self.__reportFrameCTL.text = \"Manual Segmentation Report\"\r\n self.__reportFrameCTL.collapsed = 1\r\n reportFrameCTL = qt.QFormLayout(self.__reportFrameCTL)\r\n\r\n # manual segmentation report\r\n self.analysisGroupBoxCTL = qt.QGroupBox()\r\n self.analysisGroupBoxCTL.setFixedHeight(330)\r\n self.analysisGroupBoxCTL.setTitle('Manual Segmentation Report')\r\n reportFrameCTL.addRow(self.analysisGroupBoxCTL)\r\n self.analysisGroupBoxLayoutCTL = qt.QFormLayout(self.analysisGroupBoxCTL)\r\n\r\n #-----------------------------------------------------------------------------\r\n\r\n #Segmentation Frame##########################################\r\n self.__segmentationFrame = ctk.ctkCollapsibleButton()\r\n self.__segmentationFrame.text = \"Segmentation\"\r\n self.__segmentationFrame.collapsed = 0\r\n segmentationFrame = qt.QFormLayout(self.__segmentationFrame)\r\n\r\n # 1 Define template\r\n self.templateSliceButton = qt.QPushButton('1. Select Current Axial Slice as Seg. Limit (current: None)')\r\n segmentationFrame.addRow(self.templateSliceButton)\r\n self.templateSliceButton.connect('clicked()', logic.placeAxialLimitMarker)\r\n self.templateSliceButton.setEnabled(1)\r\n\r\n # 2 give needle tips\r\n self.fiducialButton = qt.QPushButton('2. Start Giving Needle Tips [CTRL + ENTER]')\r\n self.fiducialButton.checkable = True\r\n segmentationFrame.addRow(self.fiducialButton)\r\n self.fiducialButton.connect('toggled(bool)', self.onStartStopGivingNeedleTipsToggled)\r\n self.fiducialButton.setEnabled(0)\r\n\r\n # New insertion - create new set of needles with different colors\r\n self.newInsertionButton = None\r\n # self.newInsertionButton = qt.QPushButton('New Needle Set')\r\n # segmentationFrame.addRow(self.newInsertionButton)\r\n # self.newInsertionButton.connect('clicked()', logic.newInsertionNeedleSet)\r\n # self.newInsertionButton.setEnabled(0)\r\n\r\n # Delete Needle Button\r\n self.deleteNeedleButton = qt.QPushButton('Delete Last Segmented Needle [Ctrl + Z]')\r\n segmentationFrame.addRow(self.deleteNeedleButton)\r\n # self.deleteNeedleButton.connect('clicked()', logic.deleteAllAutoNeedlesFromScene)\r\n self.deleteNeedleButton.connect('clicked()', logic.deleteLastNeedle)\r\n self.deleteNeedleButton.setEnabled(0)\r\n\r\n # Reset Needle Detection Button\r\n self.resetDetectionButton = qt.QPushButton('Reset Needle Detection (Start Over)')\r\n segmentationFrame.addRow(self.resetDetectionButton)\r\n self.resetDetectionButton.connect('clicked()', logic.resetNeedleDetection)\r\n self.resetDetectionButton.setEnabled(0)\r\n\r\n # auto segmentation report\r\n segmentationFrame.addRow(self.__reportFrame)\r\n\r\n #Validation Frame##########################################\r\n self.__validationFrame = ctk.ctkCollapsibleButton()\r\n self.__validationFrame.text = \"Validation\"\r\n self.__validationFrame.collapsed = 0 # <<<\r\n validationFrame = qt.QFormLayout(self.__validationFrame)\r\n\r\n self.startGivingControlPointsButton = qt.QPushButton('Start Giving Control Points')\r\n self.startGivingControlPointsButton.checkable = True\r\n self.startGivingControlPointsButton.setStyleSheet(\"QPushButton {background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #ccffcc, stop: 1 #f3fff3)}\"\r\n \"QPushButton:checked{background-color: red;}\")\r\n\r\n self.startGivingControlPointsButton.connect('toggled(bool)', self.onStartStopGivingValidationControlPointsToggled)\r\n\r\n self.startAssistModeButton = qt.QPushButton('Assisted Manual Segmentation')\r\n self.startAssistModeButton.checkable = True\r\n self.startAssistModeButton.connect('toggled(bool)', self.onStartAssistModeToggled)\r\n\r\n self.validationNeedleButton = qt.QPushButton('Next Validation Needle: (0)->(1)')\r\n self.validationNeedleButton.toolTip = \"By clicking on this button, you will increment the number of the needle\"\r\n self.validationNeedleButton.toolTip += \"that you want to manually segment. Thus, the points you will add will be used to draw a new needle.<br/>\"\r\n self.validationNeedleButton.toolTip += \"<b>Warning:<b> You can/'t add any more points to the current needle after clicking here\"\r\n\r\n self.validationNeedleButton.connect('clicked()', logic.validationNeedle)\r\n\r\n self.drawValidationNeedlesButton = qt.QPushButton('Render Manual Needle 0')\r\n self.drawValidationNeedlesButton.toolTip = \"Redraw every manually segmented needles. This is usefull for example if you moved a control point, or after you added a new needle\"\r\n\r\n self.drawValidationNeedlesButton.connect('clicked()', logic.drawValidationNeedles)\r\n\r\n self.startValidationButton = qt.QPushButton('Start Evaluation')\r\n self.startValidationButton.toolTip = \"Launch tracking algo. from the tip of the manually segmented needles\"\r\n\r\n self.startValidationButton.connect('clicked()', logic.startValidation)\r\n #self.startValidationButton.setStyleSheet(\"background-color: yellow\")\r\n self.startValidationButton.setStyleSheet(\"background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #f7f700, stop: 1 #dbdb00)\");\r\n\r\n # Reset Needle Validation Button\r\n self.resetValidationButton = qt.QPushButton('Reset Manual Segmentation')\r\n self.templateRegistrationButton = qt.QPushButton('[Beta] Template Registration')\r\n\r\n # Hide Markers Button\r\n self.hideAnnotationTextButton = qt.QPushButton('Hide Marker Texts')\r\n self.hideAnnotationTextButton.checkable = True\r\n\r\n # Undo Button\r\n self.undoButton = qt.QPushButton('Undo Fiducial Mvt')\r\n self.undoButton.checkable = False\r\n\r\n\r\n self.resetValidationButton.connect('clicked()', logic.resetNeedleValidation)\r\n self.templateRegistrationButton.connect('clicked()', logic.autoregistration)\r\n self.hideAnnotationTextButton.connect('clicked()', logic.hideAnnotations)\r\n self.undoButton.connect('clicked()', logic.undoFid)\r\n\r\n self.editNeedleTxtBox = qt.QSpinBox()\r\n self.editNeedleTxtBox.connect(\"valueChanged(int)\", logic.changeValue)\r\n editLabel = qt.QLabel('Choose Needle:')\r\n\r\n # Choose needle\r\n self.configFrameCTL = qt.QFrame()\r\n self.configFrameCTL.setLayout(qt.QHBoxLayout())\r\n\r\n self.configFrameCTL.layout().addWidget(editLabel)\r\n self.configFrameCTL.layout().addWidget(self.editNeedleTxtBox)\r\n self.configFrameCTL.layout().addWidget(self.validationNeedleButton)\r\n\r\n # validationFrame.addRow(editLabel, self.editNeedleTxtBox)\r\n # validationFrame.addRow(self.validationNeedleButton)\r\n validationFrame.layout().addRow(self.configFrameCTL)\r\n validationFrame.addRow(self.startGivingControlPointsButton)\r\n validationFrame.addRow(self.startAssistModeButton)\r\n validationFrame.addRow(self.drawValidationNeedlesButton)\r\n validationFrame.addRow(self.startValidationButton)\r\n validationFrame.addRow(self.resetValidationButton)\r\n validationFrame.addRow(self.hideAnnotationTextButton)\r\n validationFrame.addRow(self.undoButton)\r\n #validationFrame.addRow(self.templateRegistrationButton)\r\n validationFrame.addRow(self.__reportFrameCTL)\r\n\r\n # self.scrollPointButton = qt.QPushButton('Scroll Ctrl Pt for Needle ' + str(self.editNeedleTxtBox.value))\r\n # validationFrame.addRow(self.scrollPointButton)\r\n # self.scrollPointButton.connect('clicked()', logic.scrollPoint)\r\n\r\n # Needle detection parameters#################################\r\n self.__parameterFrame = ctk.ctkCollapsibleButton()\r\n self.__parameterFrame.text = \"Needle Detection Parameters (Developers)\"\r\n self.__parameterFrame.collapsed = 0\r\n parameterFrame = qt.QFormLayout(self.__parameterFrame)\r\n\r\n # Load/Save/Reset\r\n self.configFrame = qt.QFrame()\r\n self.configFrame.setLayout(qt.QHBoxLayout())\r\n parameterFrame.layout().addRow(self.configFrame)\r\n self.loadButton = qt.QPushButton()\r\n self.loadButton.text = \"Load Parameters\"\r\n self.loadButton.checkable = False\r\n self.loadButton.toolTip = \"Click to load parameters from a configuration file.\"\r\n self.loadButton.connect('clicked()', self.onLoad)\r\n self.saveButton = qt.QPushButton()\r\n self.saveButton.checkable = False\r\n self.saveButton.text = \"Save Parameters\"\r\n self.saveButton.toolTip = \"Click to save the parameters in a configuration file.\"\r\n self.saveButton.connect('clicked()', self.onSave)\r\n self.resetParametersButton = qt.QPushButton()\r\n self.resetParametersButton.checkable = False\r\n self.resetParametersButton.text = \"Reset Default Parameters\"\r\n self.resetParametersButton.toolTip = \"Click to reset the default parameters from default.cfg\"\r\n self.resetParametersButton.connect('clicked()', self.onResetParameters)\r\n self.configFrame.layout().addWidget(self.loadButton)\r\n self.configFrame.layout().addWidget(self.saveButton)\r\n self.configFrame.layout().addWidget(self.resetParametersButton)\r\n\r\n # Auto correct tip position?\r\n self.autoCorrectTip = qt.QCheckBox('Auto correct tip position?')\r\n parameterFrame.addRow(self.autoCorrectTip)\r\n self.autoCorrectTip.setChecked(0)\r\n\r\n # Look for needles in CT?\r\n self.invertedContrast = qt.QCheckBox('Search for bright needles (CT)?')\r\n parameterFrame.addRow(self.invertedContrast)\r\n # Compute gradient?\r\n self.gradient = qt.QCheckBox('Compute gradient?')\r\n self.gradient.setChecked(1)\r\n parameterFrame.addRow(self.gradient)\r\n\r\n # Filter ControlPoints?\r\n self.filterControlPoints = qt.QCheckBox('Filter Control Points?')\r\n self.filterControlPoints.setChecked(0)\r\n # parameterFrame.addRow(self.filterControlPoints)\r\n\r\n # Draw Fiducial Points?\r\n self.drawFiducialPoints = qt.QCheckBox('Draw Control Points?')\r\n self.drawFiducialPoints.setChecked(0)\r\n parameterFrame.addRow(self.drawFiducialPoints)\r\n\r\n # Auto find Tips: Tracking in +z and -z direction\r\n self.autoStopTip = qt.QCheckBox('Tracking in both directions')\r\n self.autoStopTip.setChecked(0)\r\n parameterFrame.addRow(self.autoStopTip)\r\n\r\n # Extend Needle to the wanted value\r\n self.extendNeedle = qt.QCheckBox('Extend Needle')\r\n self.extendNeedle.setChecked(0)\r\n parameterFrame.addRow(self.extendNeedle)\r\n\r\n # Real Needle Value (used to extend the needle)\r\n realNeedleLengthLabel = qt.QLabel('Real Needle Length (mm):')\r\n self.realNeedleLength = qt.QSpinBox()\r\n self.realNeedleLength.setMinimum(0.1)\r\n self.realNeedleLength.setMaximum(1500)\r\n self.realNeedleLength.setValue(240)\r\n parameterFrame.addRow(realNeedleLengthLabel, self.realNeedleLength)\r\n\r\n # Max Needle Length?\r\n self.maxLength = qt.QCheckBox('Max Needle Length?')\r\n self.maxLength.setChecked(1)\r\n parameterFrame.addRow(self.maxLength)\r\n\r\n # Add Gaussian Estimation?\r\n self.gaussianAttenuationButton = qt.QCheckBox('Add Gaussian Prob. Attenuation?')\r\n self.gaussianAttenuationButton.setChecked(1)\r\n parameterFrame.addRow(self.gaussianAttenuationButton)\r\n\r\n # nb points per line spin box\r\n # ## previously 4 - try with 20\r\n self.sigmaValue = qt.QSpinBox()\r\n self.sigmaValue.setMinimum(0.1)\r\n self.sigmaValue.setMaximum(500)\r\n self.sigmaValue.setValue(20)\r\n sigmaValueLabel = qt.QLabel(\"Sigma Value (exp(-x^2/(2*(sigma/10)^2))): \")\r\n parameterFrame.addRow(sigmaValueLabel, self.sigmaValue)\r\n\r\n # nb points per line spin box\r\n self.gradientPonderation = qt.QSpinBox()\r\n self.gradientPonderation.setMinimum(0.01)\r\n self.gradientPonderation.setMaximum(500)\r\n self.gradientPonderation.setValue(5)\r\n gradientPonderationLabel = qt.QLabel(\"Gradient Ponderation: \")\r\n parameterFrame.addRow(gradientPonderationLabel, self.gradientPonderation)\r\n\r\n # center accuentuation\r\n # ## previously 1, try with 2 ( avoids exiting catheter track)\r\n self.exponent = qt.QSpinBox()\r\n self.exponent.setMinimum(0.01)\r\n self.exponent.setMaximum(500)\r\n self.exponent.setValue(2)\r\n exponentLabel = qt.QLabel(\"Center Ponderation: \")\r\n parameterFrame.addRow(exponentLabel, self.exponent)\r\n\r\n # nb points per line spin box\r\n self.nbPointsPerLine = qt.QSpinBox()\r\n self.nbPointsPerLine.setMinimum(2)\r\n self.nbPointsPerLine.setMaximum(500)\r\n self.nbPointsPerLine.setValue(20)\r\n nbPointsPerLineLabel = qt.QLabel(\"Number of points per line: \")\r\n # parameterFrame.addRow( nbPointsPerLineLabel, self.nbPointsPerLine)\r\n\r\n # nb radius iteration spin box\r\n self.nbRadiusIterations = qt.QSpinBox()\r\n self.nbRadiusIterations.setMinimum(2)\r\n self.nbRadiusIterations.setMaximum(1000)\r\n self.nbRadiusIterations.setValue(13)\r\n nbRadiusIterationsLabel = qt.QLabel(\"Number of distance iterations: \")\r\n # parameterFrame.addRow( nbRadiusIterationsLabel, self.nbRadiusIterations)\r\n\r\n # distance max spin box\r\n self.radiusMax = qt.QSpinBox()\r\n self.radiusMax.setMinimum(0)\r\n self.radiusMax.setMaximum(1000)\r\n self.radiusMax.setValue(5)\r\n distanceMaxLabel = qt.QLabel(\"Radius of cone base (mm): \")\r\n parameterFrame.addRow(distanceMaxLabel, self.radiusMax)\r\n\r\n # nb rotating iterations spin box\r\n self.nbRotatingIterations = qt.QSpinBox()\r\n self.nbRotatingIterations.setMinimum(2)\r\n self.nbRotatingIterations.setMaximum(1000)\r\n self.nbRotatingIterations.setValue(35)\r\n nbRotatingIterationsLabel = qt.QLabel(\"Number of rotating steps: \")\r\n parameterFrame.addRow(nbRotatingIterationsLabel, self.nbRotatingIterations)\r\n\r\n # nb heights per needle spin box\r\n self.numberOfPointsPerNeedle = qt.QSpinBox()\r\n self.numberOfPointsPerNeedle.setMinimum(1)\r\n self.numberOfPointsPerNeedle.setMaximum(50)\r\n self.numberOfPointsPerNeedle.setValue(6)\r\n numberOfPointsPerNeedleLabel = qt.QLabel(\"Number of Control Points: \")\r\n parameterFrame.addRow(numberOfPointsPerNeedleLabel, self.numberOfPointsPerNeedle)\r\n\r\n # nb heights per needle spin box\r\n self.stepsize = qt.QSpinBox()\r\n self.stepsize.setMinimum(1)\r\n self.stepsize.setMaximum(500)\r\n self.stepsize.setValue(5)\r\n stepsizeLabel = qt.QLabel(\"Stepsize: \")\r\n # parameterFrame.addRow( stepsizeLabel, self.stepsize)\r\n\r\n # lenghtNeedle\r\n self.lenghtNeedleParameter = qt.QSpinBox()\r\n self.lenghtNeedleParameter.setMinimum(1)\r\n self.lenghtNeedleParameter.setMaximum(10000)\r\n self.lenghtNeedleParameter.setValue(100)\r\n stepsizeLabel = qt.QLabel(\"Lenght of the needles (mm): \")\r\n parameterFrame.addRow(stepsizeLabel, self.lenghtNeedleParameter)\r\n\r\n # radius\r\n self.radiusNeedleParameter = qt.QSpinBox()\r\n self.radiusNeedleParameter.setMinimum(1)\r\n self.radiusNeedleParameter.setMaximum(200)\r\n self.radiusNeedleParameter.setValue(2)\r\n radiusLabel = qt.QLabel(\"Radius of the needles (mm): \")\r\n parameterFrame.addRow(radiusLabel, self.radiusNeedleParameter)\r\n\r\n # algo\r\n self.algoVersParameter = qt.QSpinBox()\r\n self.algoVersParameter.setMinimum(0)\r\n self.algoVersParameter.setMaximum(9)\r\n self.algoVersParameter.setValue(0)\r\n algoLabel = qt.QLabel(\"Needle detection version: \")\r\n parameterFrame.addRow(algoLabel, self.algoVersParameter)\r\n\r\n # Research/dev. area#################################\r\n self.__devFrame = ctk.ctkCollapsibleButton()\r\n self.__devFrame.text = \"R&&D (Developers)\"\r\n self.__devFrame.collapsed = 0\r\n devFrame = qt.QFormLayout(self.__devFrame)\r\n\r\n # #Segment Needle Button\r\n # self.needleButton = qt.QPushButton('Segment Needles')\r\n # segmentationFrame.addRow(self.needleButton)\r\n # self.needleButton.connect('clicked()', self.needleSegmentation)\r\n # self.needleButton.setEnabled(0)\r\n\r\n # Segment Needle Button\r\n # self.needleButton2 = qt.QPushButton('Segment/Update Needles - Python')\r\n # segmentationFrame.addRow(self.needleButton2)\r\n # self.needleButton2.connect('clicked()', self.needleDetection)\r\n\r\n self.skipSegLimitButton = qt.QPushButton('Skip Giving Seg. Limit.')\r\n self.skipSegLimitButton.checkable = False\r\n self.skipSegLimitButton.connect('clicked(bool)', self.onSkipSegLimit)\r\n\r\n # Obturator needle tips\r\n self.fiducialObturatorButton = qt.QPushButton('Start Giving Obturator Needle Tips')\r\n self.fiducialObturatorButton.checkable = True\r\n self.fiducialObturatorButton.connect('toggled(bool)', self.onStartStopGivingObturatorNeedleTipsToggled)\r\n\r\n self.renderObturatorNeedlesButton = qt.QPushButton('Render Obturator Needles')\r\n self.renderObturatorNeedlesButton.checkable = False\r\n self.renderObturatorNeedlesButton.connect('clicked()', self.logic.drawObturatorNeedles)\r\n\r\n self.displayFiducialButton = qt.QPushButton('Display Labels On Needles')\r\n self.displayFiducialButton.connect('clicked()', logic.displayFiducial)\r\n\r\n self.displayContourButton = qt.QPushButton('Draw Radiation Isosurfaces')\r\n self.displayContourButton.checkable = False\r\n self.displayContourButton.connect('clicked()', logic.drawIsoSurfaces)\r\n\r\n self.hideContourButton = qt.QPushButton('Hide Radiation Isosurfaces')\r\n self.hideContourButton.checkable = True\r\n self.hideContourButton.connect('clicked()', logic.hideIsoSurfaces)\r\n self.hideContourButton.setEnabled(0)\r\n\r\n self.filterButton = qt.QPushButton('Preprocessing')\r\n self.filterButton.checkable = False\r\n self.filterButton.connect('clicked()', logic.filterWithSITK)\r\n self.filterButton.setEnabled(1)\r\n\r\n self.parSearchButton = qt.QPushButton('Parameter Search')\r\n self.parSearchButton.checkable = False\r\n self.parSearchButton.connect('clicked()', logic.parSearch)\r\n self.parSearchButton.setEnabled(1)\r\n\r\n self.setAsValNeedlesButton = qt.QPushButton('Use Needles for Validation')\r\n self.setAsValNeedlesButton.checkable = False\r\n self.setAsValNeedlesButton.connect('clicked()', logic.setAllNeedleTubesAsValidationNeedles)\r\n self.setAsValNeedlesButton.setEnabled(1)\r\n self.setAsValNeedlesButton.setStyleSheet(\"background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #f7f700, stop: 1 #dbdb00)\");\r\n\r\n # ## create segmentation editor environment:\r\n editorWidgetParent = slicer.qMRMLWidget()\r\n editorWidgetParent.setLayout(qt.QVBoxLayout())\r\n editorWidgetParent.setMRMLScene(slicer.mrmlScene)\r\n editorWidgetParent.hide()\r\n self.editorWidget = None\r\n # The order of statements is important here for resetNeedleDetection to work!!\r\n self.editorWidget = EditorWidget(editorWidgetParent, False)\r\n self.editUtil = None\r\n self.editUtil = self.editorWidget.editUtil # EditorLib.EditUtil.EditUtil()\r\n self.currentLabel = None\r\n self.setWandEffectOptions() # has to be done before setup():\r\n self.editUtil.setCurrentEffect(\"DefaultTool\")\r\n self.editorWidget.setup()\r\n # our mouse mode button\r\n self.editorWidget.toolsBox.actions[\"NeedleFinder\"] = qt.QAction(0) # dummy self.fiducialButton\r\n self.undoRedo = None\r\n self.undoRedo = self.editorWidget.toolsBox.undoRedo\r\n self.currentLabel = self.editUtil.getLabel()\r\n self.editorWidget.editLabelMapsFrame.setText(\"Edit Segmentation\")\r\n self.editorWidget.editLabelMapsFrame.connect('contentsCollapsed(bool)', self.onEditorCollapsed)\r\n editorWidgetParent.show()\r\n self.editUtil.setCurrentEffect(\"NeedleFinder\")\r\n\r\n self.scenePath = qt.QLineEdit()\r\n self.cleanSceneButton = qt.QPushButton('Clean Scene')\r\n self.cleanSceneButton.connect('clicked()', logic.cleanScene)\r\n\r\n # devFrame.addRow(self.displayFiducialButton)\r\n devFrame.addWidget(editorWidgetParent)\r\n devFrame.addRow(self.scenePath)\r\n devFrame.addRow(self.cleanSceneButton)\r\n devFrame.addRow(self.skipSegLimitButton)\r\n devFrame.addRow(self.fiducialObturatorButton)\r\n devFrame.addRow(self.renderObturatorNeedlesButton)\r\n devFrame.addRow(self.displayContourButton)\r\n devFrame.addRow(self.hideContourButton)\r\n devFrame.addRow(self.filterButton)\r\n devFrame.addRow(self.parSearchButton)\r\n devFrame.addRow(self.setAsValNeedlesButton)\r\n devFrame.addRow(self.templateRegistrationButton)\r\n\r\n #put frames on the tab########################################\r\n self.layout.addRow(self.__segmentationFrame)\r\n #self.layout.addRow(self.__reportFrame)\r\n # self.layout.addRow(self.__reportFrameCTL)\r\n self.layout.addRow(self.__validationFrame)\r\n self.layout.addRow(self.__parameterFrame)\r\n self.layout.addRow(self.__devFrame)\r\n\r\n # reset module\r\n resetButton = qt.QPushButton('Reset Module')\r\n resetButton.connect('clicked()', self.onReload)\r\n self.widget = slicer.qMRMLWidget()\r\n self.widget.setLayout(self.layout)\r\n self.layout2.addWidget(self.widget)\r\n\r\n # init table report\r\n self.initTableView() # init the report table\r\n self.initTableViewControlPoints() # init the report table\r\n\r\n # Lauren's feature request: set mainly unused coronal view to sagittal to display ground truth bitmap image (if available)\r\n # Usage after fresh slicer start: 1. Load scene and 2. reference jpg. 3. Then open NeedleFinder from Modules selector\r\n vnJPG = slicer.util.getNode(\"Case *\") # the naming convention for the ground truth JPG files: \"Case XXX.jpg\"\r\n if vnJPG:\r\n print \"showing ground 2d image truth in green view\"\r\n # show JPG image if available\r\n sw = slicer.app.layoutManager().sliceWidget(\"Green\")\r\n cn = sw.mrmlSliceCompositeNode()\r\n cn.SetBackgroundVolumeID(vnJPG.GetID())\r\n slicer.app.layoutManager().sliceWidget(\"Green\").sliceLogic().GetBackgroundLayer().Modified()\r\n sGreen = slicer.mrmlScene.GetNodeByID(\"vtkMRMLSliceNodeGreen\")\r\n if sGreen == None :\r\n sGreen = slicer.mrmlScene.GetNodeByID(\"vtkMRMLSliceNode2\")\r\n # set to axial view\r\n sGreen.SetSliceVisible(0)\r\n sGreen.SetOrientationToAxial()\r\n sw.fitSliceToBackground()\r\n sGreen.Modified()\r\n\r\n self.onResetParameters()\r\n self.setupShortcuts()", "def _placeDock(self, dock, pos=None, otherDock=None):\n if otherDock is not None and pos is not None:\n self.area.addDock(dock,pos,otherDock)\n elif pos is not None:\n self.area.addDock(dock,pos,otherDock)\n else:\n self.area.addDock(dock)\n return dock", "def pcd(dw, qpts=50):\n w = w0+dw\n pcm.set_qpts(qpts)\n sml = pcm.sml_w(w)\n avgchi = pcm.avgchi\n pcm.set_qpts(0)\n sml2 = pcm.sml_w(w)\n print sml, log(sml) - pcm.offset, avgchi\n print sml2, log(sml2) - pcm.offset, pcm.avgchi", "def onDockClosed(self): # used when Dock dialog is closed\n self.profile_dock = None", "def plot_prec_value1(self):\n# self.query_dict={'code':code.value,'exchange':exchange.value,\\\n# 'structure':struct.value,'element':element.value,'properties':prop.value}\n# print ('POSTING', self.query_dict)\n# self.query_api(endpoint='evk')\n\n #layout_doc.children[4].children[0] = self.plot_pade_figure()\n\n\n self.query_dict={'code':code.value,'exchange':exchange.value,\\\n 'structure':struct.value,'element':element.value,'properties':prop.value}\n print ('POSTING', self.query_dict)\n self.query_api(endpoint='evk')\n\n layout_doc.children[4].children[0] = self.plot_pade_figure()", "def setDetails(self):\n pwd = utils.get_cwd()\n production_idx = pwd.find('production')+13\n self.prodDir = pwd[0:production_idx]\n self.nd280Version = os.path.basename(os.getenv('ND280ROOT'))\n self.production = self.prodDir.split('production')[-1].strip('0')\n self.respin = utils.split_path(pwd[production_idx+1:])[0]\n self.nuMCType = utils.split_path(pwd[production_idx+1:])[2]\n self.fillFromCard('runInfo.card')\n\n self.usingNUCP = False\n if 'beam/' in pwd:\n self.beam = 'beam'\n if 'nue/' in pwd:\n self.beam = 'nue'\n self.nuType = 'nue'\n if 'run1/' in pwd:\n self.beam = 'run1'\n self.ecalPeriods = '1-2'\n if 'run2/' in pwd:\n self.beam = 'run2'\n self.tpcPeriods = 'runs2-3'\n self.ecalPeriods = '1-2'\n if 'run3/' in pwd:\n self.beam = 'run3'\n self.tpcPeriods = 'runs2-3'\n self.ecalPeriods = '3-4'\n if 'run4/' in pwd:\n self.beam = 'run4'\n self.tpcPeriods = 'runs2-3-4'\n self.ecalPeriods = '3-4'\n if 'run5/' in pwd:\n self.beam = 'run5'\n self.tpcPeriods = 'runs2-3-4'\n self.ecalPeriods = '3-4'\n if 'ccpiplus/' in pwd:\n self.beam = 'ccpiplus'\n self.nMesons = 0\n self.nLeptons = 1\n self.nMuMinus = 1\n self.nPiZero = 0\n self.nPiPlus = 1\n self.usingNUCP = True\n if 'ccpizero/' in pwd:\n self.beam = 'ccpizero'\n self.nMesons = 0\n self.nLeptons = 1\n self.nMuMinus = 1\n self.nPiZero = 1\n self.nPiPlus = 0\n self.usingNUCP = True\n if 'ncpiplus/' in pwd:\n self.beam = 'ncpiplus'\n self.nMesons = 0\n self.nLeptons = 0\n self.nMuMinus = 0\n self.nPiZero = 0\n self.nPiPlus = 1\n self.usingNUCP = True\n if 'ncpizero/' in pwd:\n self.beam = 'ncpizero'\n self.nMesons = 0\n self.nLeptons = 0\n self.nMuMinus = 0\n self.nPiZero = 1\n self.nPiPlus = 0\n self.usingNUCP = True\n if 'tpcgas/' in pwd:\n self.beam = 'tpcgas'\n if 'verify/' in pwd:\n self.verify = True\n if self.nuMCType == 'anti-genie':\n self.runprefix -= 10000000\n if 'genie' in pwd:\n self.mc = 'Genie'\n self.runprefix += 1000000\n self.respin = pwd[pwd.find(self.prodDir)+len(self.prodDir)+1:][0]\n if self.respin not in string.uppercase:\n print 'Respin', self.respin, 'doesn\\'t appear to be an UPPER CASE LETTER'\n if '2010-11' in pwd:\n self.baseline = '2010-11'\n\n if 'magnet/' in pwd:\n self.runN = int(pwd[pwd.find('/run')+4])\n self.runprefix += (self.runN-1)*100000\n\n if 'water' in pwd:\n self.fill = 'water'\n self.p0dwater = 1\n self.runprefix += 10000\n if 'basket/' in pwd:\n self.fluxVolume = 'basket'\n self.fluxMasterVolume = 'Basket'\n self.fluxName = 'basket'\n self.runN = 2\n self.runprefix += 101000\n if 'nue/' in pwd:\n self.fluxName = 'Nue'\n self.runprefix += 1000\n elif 'ncpizero/' in pwd:\n self.fluxName = 'NC1pi0'\n self.runprefix += 2000\n elif 'ccpizero/' in pwd:\n self.fluxName = 'CC1pi0'\n self.runprefix += 3000\n elif 'ncpiplus/' in pwd:\n self.fluxName = 'NC1pi+'\n self.runprefix += 4000\n elif 'ccpiplus/' in pwd:\n self.fluxName = 'CC1pi+'\n self.runprefix += 5000\n elif 'ncpizerofgd/' in pwd:\n self.fluxName = 'NCpi0FGD'\n self.fluxMasterVolume = 'FGD1'\n self.runprefix += 6000\n elif 'ccpicoh/' in pwd:\n self.fluxName = 'CCpicoh'\n self.fluxMasterVolume = 'FGD1'\n self.runprefix += 7000\n elif 'tpcgas/' in pwd:\n self.fluxName = 'TPCGas'\n # set this to mask ND280 geometry\n # the self.standalone option can be set to a single ND280 detector\n # and overrides the baseline setting. However, turns out that\n # setting master_volume to Half produces events only on argon so\n # we are using that instead.\n # self.standalone = 'TPC'\n self.fluxMasterVolume = 'Half'\n self.forceVolume = 'true'\n self.runprefix += 6000\n\n self.setBasePath()\n self.setNumcDir()\n self.setPassThruDir()\n self.setFluxDir()\n self.setFluxInfo()", "def fillDetInfo():\n print('here i am')\n # 1. maps of analysis channel to cpd, and pulser monitor channels\n detCH, pMons = {}, {}\n for ds in [0,1,2,3,4,5,6]:\n f = np.load(\"%s/data/ds%d_detChans.npz\" % (os.environ['LATDIR'], ds))\n detCH[ds] = f['arr_0'].item()\n pMons[ds] = f['arr_1'].item()\n\n # 2. maps of HV and TRAP threshold settings are stored in the DB.\n # make them global, and move them to the runSettings file.\n # FORMAT: {ds : {'det' : [(run1,val1),(run2,val2)...]} }\n detHV, detTH = {}, {}\n\n # load all possible values, as in settingsMgr\n detDB = db.TinyDB(\"%s/calDB-v2.json\" % dsi.latSWDir)\n detPars = db.Query()\n cal = dsi.CalInfo()\n for ds in [0,1,2,3,4,5,6]:\n # for ds in [0]:\n print(\"scanning ds\",ds)\n detTH[ds] = {}\n detHV[ds] = {}\n for key in cal.GetKeys(ds):\n mod = -1\n if \"m1\" in key: mod = 1\n if \"m2\" in key: mod = 2\n for cIdx in range(cal.GetIdxs(key)):\n\n # load the DB records\n dbKeyTH = \"trapThr_%s_c%d\" % (key, cIdx)\n dbValTH = dsi.getDBRecord(dbKeyTH,calDB=detDB,pars=detPars)\n\n dbKeyHV = \"hvBias_%s_c%d\" % (key, cIdx)\n dbValHV = dsi.getDBRecord(dbKeyHV,calDB=detDB,pars=detPars)\n\n # debug: print the record\n # for val in sorted(dbValTH):\n # if len(dbValTH[val])>0:\n # print(val, dbValTH[val])\n # return\n\n # fill the first value\n if len(detTH[ds])==0:\n detTH[ds] = dbValTH\n detHV[ds] = dbValHV\n continue\n\n # check for new threshold values.\n for cpd in detTH[ds]:\n nOld, nNew = len(detTH[ds][cpd]), len(dbValTH[cpd])\n\n # detector just came online\n if nOld==0 and nNew>0:\n detTH[ds][cpd] = dbValTH[cpd]\n continue\n # detector still offline\n if nOld==0 and nNew==0:\n continue\n # detector just went offline\n if nOld>0 and nNew==0:\n continue\n\n # check last run/trap pair against each new one\n prevRun, prevTH = detTH[ds][cpd][-1][0], detTH[ds][cpd][-1][1]\n for val in dbValTH[cpd]:\n thisRun, thisTH = val[0], val[1]\n if thisTH != prevTH:\n detTH[ds][cpd].append([thisRun,thisTH])\n prevTH = thisTH\n\n # check for new HV values.\n for cpd in detHV[ds]:\n\n nOld, nNew = len(detHV[ds][cpd]), len(dbValHV[cpd])\n\n # detector just came online\n if nOld==0 and nNew>0:\n detHV[ds][cpd] = dbValHV[cpd]\n continue\n # detector still offline\n if nOld==0 and nNew==0:\n continue\n # detector just went offline\n if nOld>0 and nNew==0:\n continue\n\n # check last run/trap pair against each new one\n prevRun, prevHV = detHV[ds][cpd][-1][0], detHV[ds][cpd][-1][1]\n for val in dbValHV[cpd]:\n thisRun, thisHV = val[0], val[1]\n if thisHV != prevHV:\n print(\"found HV diff. cpd %d prev %dV (run %d) new %dV (run %d)\" % (cpd, prevHV, prevRun, thisHV, thisRun))\n detHV[ds][cpd].append([thisRun,thisHV])\n prevHV = thisHV\n\n # return\n\n # # load the old file and compare\n # # GOAL: improve on this file.\n # # f = np.load(\"%s/data/runSettings.npz\" % dsi.latSWDir)\n # # detHVOld = f['arr_0'].item()\n # # detTHOld = f['arr_1'].item()\n # # detCHOld = f['arr_2'].item()\n # # pMonsOld = f['arr_3'].item()\n #\n # ds = 3\n # print(\"old results, ds\",ds)\n # for cpd in sorted(detTHOld[ds]):\n # if cpd!=\"122\":continue\n # if len(detTHOld[ds][cpd]) > 0:\n # print(cpd, detTHOld[ds][cpd])\n #\n # # for ds in [0,1,2,3,4,5,6]:\n # print(\"thresh results, ds:\",ds)\n # for cpd in sorted(detTH[ds]):\n # # if cpd!=122:continue\n # if len(detTH[ds][cpd]) > 0:\n # print(cpd, detTH[ds][cpd])\n\n\n np.savez(\"%s/data/runSettings-v2.npz\" % dsi.latSWDir,detHV,detTH,detCH,pMons)", "def GetDockingImage(direction, useAero, center):\r\n\r\n suffix = (center and [\"\"] or [\"_single\"])[0]\r\n prefix = \"\"\r\n if useAero == 2:\r\n # Whidbey docking guides\r\n prefix = \"whidbey_\"\r\n elif useAero == 1:\r\n # Aero docking style\r\n prefix = \"aero_\"\r\n \r\n if direction == wx.TOP:\r\n bmp_unfocus = eval(\"%sup%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sup_focus%s\"%(prefix, suffix)).GetBitmap()\r\n elif direction == wx.BOTTOM:\r\n bmp_unfocus = eval(\"%sdown%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sdown_focus%s\"%(prefix, suffix)).GetBitmap()\r\n elif direction == wx.LEFT:\r\n bmp_unfocus = eval(\"%sleft%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sleft_focus%s\"%(prefix, suffix)).GetBitmap()\r\n elif direction == wx.RIGHT:\r\n bmp_unfocus = eval(\"%sright%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sright_focus%s\"%(prefix, suffix)).GetBitmap()\r\n else:\r\n bmp_unfocus = eval(\"%stab%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%stab_focus%s\"%(prefix, suffix)).GetBitmap()\r\n\r\n return bmp_unfocus, bmp_focus", "def main(folder, quiet=0):\n\n if quiet:\n output_stream = StringIO()\n else:\n output_stream = sys.stdout\n\n\n\n color1 = \"I4\" #filter system for first color of CMD\n color2 = \"M1\" #filter system for second color of CMD\n zeromagc1 = zero.zero_mag[color1]\n zeromagc2 = zero.zero_mag[color2]\n min_mag = 8. #minimal observation limit\n max_mag = 0. #maximal observation limit\n\n#getting file list\n files = sorted(os.listdir('%s/%s' % (os.getcwdu(), folder))) \n out = []\n\n for fil in files:\n#only using files created by the automated simulation\n if fil.startswith('sim_') and not 'settings' in fil.encode(\"ascii\"):\n print(\"%s/%s\" % (folder,fil.encode(\"ascii\")), file=output_stream)\n \n\n # Read in\n hdulist = fits.open('%s/%s' %(folder,fil))\n data = hdulist[1].data\n\n #calculating magnitudes from fluxes and converting to CMD-data\n x = -2.5*(np.log10(data['c%s' % color1]/zeromagc1) - np.log10(data['c%s' % color2]/zeromagc2))\n y = -2.5*(np.log10(data['c%s' % color2]/zeromagc2))\n\n \n sel = np.logical_and( (y > -10./3. * (x-1.) + 10.), np.logical_and(max_mag < y, y < min_mag))\n sel = np.logical_and(sel, y < -x + 12.)\n n = sum(sel)\n t = Table(hdulist[1].data)\n if 'sel' in t.columns:\n t.remove_column('sel')\n t.add_column(Column(name='sel', data=sel.astype('int')))\n \n hdulist[1].data = np.array(t)\n tmp, av, apera, age = fil.split('_')\n fits.update('%s/%s' %(folder,fil), np.array(t), ext = 1, clobber=True)\n out.append([av, apera, age, n])\n\n #writing obtained data to \"folder/__expected_number\"\n head = ['#', 'AV', 'Aperature_size', 'Age', 'Expected_number']\n f = open('%s/__expected_number' % folder, 'w')\n f.write(','.join(head)+'\\n' )\n np.savetxt(f, np.asarray(out).astype(int))\n f.close()\n \n print (\"Analysed %s files and saved output to %s\" % (len(out),'%s/__expected_number' % folder), file=output_stream)", "def port2_docking_date(self):\n return self._port2_docking_date", "def __init__(self, parent):\n\n assert isinstance(parent, FindingChartDialog)\n\n self.parent = parent\n self.db = self.parent.db\n\n builder = self.parent.builder\n builder.add_from_file(glade.CHART_PREFERENCES_DIALOG)\n\n self.dialog = builder.get_object('chart-preferences-dialog')\n self.dialog.set_transient_for(self.parent.dialog)\n self.dialog.set_title(\"Finding Chart: Preferences\")\n self.dialog.set_resizable(False)\n\n # Note: gtk.RESPONSE_SAVE doesn't exist; we use gtk.RESPONSE_OK\n self.close_button = self.dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)\n self.apply_button = self.dialog.add_button(gtk.STOCK_APPLY, gtk.RESPONSE_APPLY)\n self.save_button = self.dialog.add_button(gtk.STOCK_SAVE, gtk.RESPONSE_OK)\n self.dialog.set_default_response(gtk.RESPONSE_CLOSE)\n self.dialog.set_focus(self.close_button)\n\n text = \"Update chart with these parameters\"\n self.apply_button.set_tooltip_text(text)\n\n text = \"Store these parameters in the LEMONdB\"\n self.save_button.set_tooltip_text(text)\n\n # Spin buttons to select the value of Vmin / Vmax\n self.vmin_button = builder.get_object('vmin-spinbutton')\n self.vmax_button = builder.get_object('vmax-spinbutton')\n\n # If the values of both Vmin and Vmax are stored in the LEMONdB, assume\n # a logarithmic scale. Otherwise, use the normalization algorithm and\n # Vmin / Vmax values defined by the APLpyNormalize object of the\n # finding chart (parent.aplpy_plot.image.norm). Note that, by default,\n # FITSFigure.show_grayscale() uses a linear stretch.\n\n try:\n self.stretch = 'log'\n vmin = self.db.vmin\n vmax = self.db.vmax\n\n msg1 = \"Normalization parameters (vmin and vmax) read from LEMONdB\"\n msg2 = \"Assuming logarithmic normalization (stretch = 'log')\"\n for message in msg1, msg2:\n logging.debug(message)\n\n except AttributeError:\n normalize = self.parent.aplpy_plot.image.norm\n self.stretch = normalize.stretch\n vmin = normalize.vmin\n vmax = normalize.vmax\n\n msg1 = \"Normalization parameters not stored in the LEMONdB\"\n msg2 = \"Algorithm and values read from APLpyNormalize object\"\n for message in msg1, msg2:\n logging.debug(message)\n\n # Because of the linear normalization formula, which APLpyNormalize\n # uses by default, it may set a value of 'vmin' smaller than that\n # of the minimum pixel level of the finding chart (read from the\n # 'data_min' attribute of the parent FindingChartDialog object):\n #\n # vmin = -0.1 * (vmax - vmin) + vmin\n #\n # Therefore, we need to make sure to use the lowest of the two values:\n # 'vmin' and 'data_min', since (although strange at first) the former\n # may be smaller than the latter. Analogously, the value of 'vmax'\n # returned by APLpyNormalize can be greater than the maximum pixel\n # level, so we must take that into account.\n\n data_min = numpy.ceil (min(self.parent.data_min, vmin))\n data_max = numpy.floor(max(self.parent.data_max, vmax))\n assert hasattr(self, 'stretch')\n\n kwargs = dict(lower = data_min, upper = data_max, step_incr = 1)\n vmin_adjust = gtk.Adjustment(value = vmin, **kwargs)\n vmax_adjust = gtk.Adjustment(value = vmax, **kwargs)\n self.vmin_button.set_adjustment(vmin_adjust)\n self.vmax_button.set_adjustment(vmax_adjust)\n\n def ndigits(n):\n \"\"\" Return the number of digits of an integer \"\"\"\n return len(str(abs(n)))\n\n # The desired width of the button, in characters\n self.vmin_button.set_width_chars(ndigits(data_min))\n self.vmax_button.set_width_chars(ndigits(data_max))\n\n # Show the absolute minimum and maximum allowed values\n data_min_entry = builder.get_object('data-min-entry')\n data_min_entry.set_width_chars(ndigits(data_min))\n data_min_entry.set_text(str(data_min))\n data_min_entry.set_sensitive(False)\n\n data_max_entry = builder.get_object('data-max-entry')\n data_max_entry.set_width_chars(ndigits(data_max))\n data_max_entry.set_text(str(data_max))\n data_max_entry.set_sensitive(False)\n\n # Both spin buttons must be in the range [data_min, data_max], but\n # there is a second restriction: Vmin must be at all times <= Vmax.\n # Use the 'value-changed' signal, emitted when any of the settings\n # (i.e. value, digits) that change the display of the spinbutton are\n # changed, to enforce this. Every time that Vmin is changed we make\n # sure that it is <= Vmax; otherwise we set it to Vmax. The same is\n # done with Vmax, ensuring that it is always >= Vmin.\n\n def vmin_changed_callback(*args):\n upper = self.vmax_button.get_value()\n if self.vmin_button.get_value() > upper:\n self.vmin_button.set_value(upper)\n\n def vmax_changed_callback(*args):\n lower = self.vmin_button.get_value()\n if self.vmax_button.get_value() < lower:\n self.vmax_button.set_value(lower)\n\n self.vmin_button.connect('value-changed', vmin_changed_callback)\n self.vmax_button.connect('value-changed', vmax_changed_callback)\n self.dialog.connect('response', self.handle_response)", "def _dsurface_domega(self):\n\n dsdo = 0.\n\n return dsdo", "def dockprep(self, force_rerun=False):\n log.debug('{}: running dock preparation...'.format(self.id))\n\n prep_mol2 = op.join(self.dock_dir, '{}_prep.mol2'.format(self.id))\n prep_py = op.join(self.dock_dir, \"prep.py\")\n\n if ssbio.utils.force_rerun(flag=force_rerun, outfile=prep_mol2):\n with open(prep_py, \"w\") as f:\n f.write('import chimera\\n')\n f.write('from DockPrep import prep\\n')\n f.write('models = chimera.openModels.list(modelTypes=[chimera.Molecule])\\n')\n f.write('prep(models)\\n')\n f.write('from WriteMol2 import writeMol2\\n')\n f.write('writeMol2(models, \"{}\")\\n'.format(prep_mol2))\n\n cmd = 'chimera --nogui {} {}'.format(self.structure_path, prep_py)\n os.system(cmd)\n os.remove(prep_py)\n os.remove('{}c'.format(prep_py))\n\n if ssbio.utils.is_non_zero_file(prep_mol2):\n self.dockprep_path = prep_mol2\n log.debug('{}: successful dockprep execution'.format(self.dockprep_path))\n else:\n log.critical('{}: dockprep failed to run on PDB file'.format(self.structure_path))", "def ValidateNotebookDocking(self, valid):\r\n \r\n return 0", "def __init__(self, parent=None, pltw=None, cpos=None, stguess=None):\n super(pkFitDlg, self).__init__(parent)\n\n self.parent = parent\n self.title = 'Peak Fitting tool'\n self.pltw = pltw\n self.cpos = cpos\n self.maxparm = 5\n self.first = True\n self.npeaks = 0\n self.blkno = self.pltw.curvelist[cpos].yvinfo.blkpos\n self.xpos = self.pltw.curvelist[cpos].xvinfo.vidx\n self.ypos = self.pltw.curvelist[cpos].yvinfo.vidx\n self.data = np.vstack((self.pltw.blklst[self.blkno][self.xpos],\n self.pltw.blklst[self.blkno][self.ypos],\n self.pltw.blklst[self.blkno][self.ypos],\n np.zeros(len(self.pltw.blklst[self.blkno][self.xpos]))))\n (self.nvect, self.npt) = self.data.shape\n self.diffshift = abs(self.data[1].min() - getSpan(self.data[1]) * 0.15)\n\n if stguess is None:\n pkdlg = getPkDlg(parent, self)\n pkdlg.setModal(True)\n ret = pkdlg.exec()\n if ret:\n stguess = pkdlg.stguess\n\n if stguess is None:\n self.stguess = None\n self.close()\n return\n else:\n self.stguess = stguess.replace('\\n', ',')\n self.guessToParms(self.stguess)\n\n # Create the layout\n self.createLayout()\n # Connect buttons to callback functions\n self.exeBtn.clicked.connect(self.compute)\n self.okBtn.clicked.connect(self.validate)\n self.cancelBtn.clicked.connect(self.reject)\n self.setWindowTitle(self.title)\n\n self.updateParmsEdit()\n self.compute()\n QTimer.singleShot(5000, self.istest)", "def form_dictionary_by_pd(global_obj) -> dict:\n if isinstance(global_obj, Pd):\n global_obj.form_object()\n\n ddict = {}\n chi2, diffrn_radiation = None, None\n exclude, pd_background = None, None\n pd_instr_reflex_asymmetry, pd_instr_resolution = None, None,\n pd_meas, pd_peak = None, None\n pd_proc, phase = None, None\n range_, refine_ls = None, None\n refln, refln_susceptibility = None, None\n setup = None\n\n l_obj = take_items_by_class(global_obj, (Setup, ))\n if len(l_obj) > 0:\n setup = l_obj[0]\n\n l_obj = take_items_by_class(global_obj, (DiffrnRadiation, ))\n if len(l_obj) > 0:\n diffrn_radiation = l_obj[0]\n\n l_obj = take_items_by_class(global_obj, (ReflnL, ))\n if len(l_obj) > 0:\n refln = l_obj\n\n l_obj = take_items_by_class(global_obj, (ReflnSusceptibilityL, ))\n if len(l_obj) > 0:\n refln_susceptibility = l_obj\n\n\n ddict[\"name\"] = global_obj.get_name()\n if setup is not None:\n ddict[\"magnetic_field\"] = numpy.atleast_1d(setup.field)\n ddict[\"offset_ttheta\"] = numpy.atleast_1d(setup.offset_ttheta)\n ddict[\"wavelength\"] = numpy.atleast_1d(setup.wavelength)\n\n if diffrn_radiation is not None:\n ddict[\"beam_polarization\"] = numpy.atleast_1d(diffrn_radiation.polarization)\n ddict[\"flipper_efficiency\"] = numpy.atleast_1d(diffrn_radiation.efficiency)\n\n if refln is not None:\n for refln_phase in refln:\n phase_name = refln_phase.loop_name\n if (refln_phase.is_attribute(\"index_h\") and refln_phase.is_attribute(\"index_k\") and refln_phase.is_attribute(\"index_l\")):\n index_hkl = numpy.array([refln_phase.index_h, refln_phase.index_k, refln_phase.index_l], dtype=int)\n ddict[f\"index_hkl_{phase_name:}\"] = index_hkl\n if refln_phase.is_attribute(\"f_calc\"):\n f_calc = numpy.array(refln_phase.f_calc, dtype=complex)\n ddict[f\"f_nucl_{phase_name:}\"] = f_calc\n if (refln_phase.is_attribute(\"a_calc\") and refln_phase.is_attribute(\"b_calc\")):\n a_calc = numpy.array(refln_phase.a_calc, dtype=complex)\n b_calc = numpy.array(refln_phase.b_calc, dtype=complex)\n ddict[f\"f_nucl_{phase_name:}\"] = a_calc + 1j*b_calc\n\n if refln_susceptibility is not None:\n for refln_phase in refln_susceptibility:\n phase_name = refln_phase.loop_name\n if (refln_phase.is_attribute(\"index_h\") and refln_phase.is_attribute(\"index_k\") and refln_phase.is_attribute(\"index_l\")):\n index_hkl = numpy.array([refln_phase.index_h, refln_phase.index_k, refln_phase.index_l], dtype=int)\n ddict[f\"index_hkl_{phase_name:}\"] = index_hkl\n if refln_phase.is_attribute(\"chi_11_calc\"):\n chi_11 = numpy.array(refln_phase.chi_11_calc, dtype=complex)\n chi_12 = numpy.array(refln_phase.chi_12_calc, dtype=complex)\n chi_13 = numpy.array(refln_phase.chi_13_calc, dtype=complex)\n chi_21 = numpy.array(refln_phase.chi_21_calc, dtype=complex)\n chi_22 = numpy.array(refln_phase.chi_22_calc, dtype=complex)\n chi_23 = numpy.array(refln_phase.chi_23_calc, dtype=complex)\n chi_31 = numpy.array(refln_phase.chi_31_calc, dtype=complex)\n chi_32 = numpy.array(refln_phase.chi_32_calc, dtype=complex)\n chi_33 = numpy.array(refln_phase.chi_33_calc, dtype=complex)\n\n ddict[f\"sft_ccs_{phase_name:}\"] = numpy.stack([\n chi_11, chi_12, chi_13, chi_21, chi_22, chi_23, chi_31, chi_32, chi_33], axis=0)\n\n return ddict", "def __init__(self, parent=None):\n QtGui.QWidget.__init__(self, parent)\n \n self.setWindowTitle('The Visual Climate Data Analysis Tools - (VCDAT)')\n layout = QtGui.QVBoxLayout()\n self.setLayout(layout)\n\n # Init Menu Widget\n self.menuWidget = QMenuWidget(self)\n\n # Init File Widget\n vsplitter = QtGui.QSplitter(QtCore.Qt.Vertical) \n fileWidget = QLabeledWidgetContainer(QCDATFileWidget(),\n 'FILE VARIABLES')\n vsplitter.addWidget(fileWidget)\n\n # Init Defined Variables Widget\n definedVar = QLabeledWidgetContainer(QDefinedVariable(),\n 'DEFINED VARIABLES')\n vsplitter.addWidget(definedVar)\n hsplitter = QtGui.QSplitter(QtCore.Qt.Horizontal)\n hsplitter.addWidget(vsplitter)\n\n # Init Var Plotting Widget\n varView = QLabeledWidgetContainer(QVariableView(),\n 'PLOTTING')\n hsplitter.addWidget(varView)\n hsplitter.setStretchFactor(1, 1)\n layout.addWidget(hsplitter)\n\n # Init guiController\n guiController = GuiController(fileWidget.getWidget(),\n definedVar.getWidget(),\n varView.getWidget())\n guiController.initTeachingCommands()\n self.guiController = guiController # So guicontroller doesn't get garbage collected\n\n # Connect signals between self & GuiController\n self.connect(self, QtCore.SIGNAL('setRecordCommands'),\n guiController.setRecordCommands)\n self.connect(self, QtCore.SIGNAL('viewTeachingCommands'),\n guiController.viewTeachingCommands)\n self.connect(self, QtCore.SIGNAL('closeTeachingCommands'),\n guiController.closeTeachingCommands) \n\n # Connect Signals between QVariableView & QDefinedVariable\n varView.connect(definedVar.getWidget(), QtCore.SIGNAL('selectDefinedVariableEvent'),\n varView.getWidget().selectDefinedVariableEvent)\n varView.connect(definedVar.getWidget(), QtCore.SIGNAL('setupDefinedVariableAxes'),\n varView.getWidget().setupDefinedVariableAxes)\n definedVar.connect(varView.getWidget(), QtCore.SIGNAL('plotPressed'),\n definedVar.getWidget().defineQuickplot)\n definedVar.connect(varView.getWidget(), QtCore.SIGNAL('defineVariable'),\n definedVar.getWidget().defineVariable)\n\n # Connect Signals between QFileWidget & QVariableView\n varView.connect(fileWidget.getWidget(), QtCore.SIGNAL('variableChanged'),\n varView.getWidget().setupDefinedVariableAxes)\n varView.connect(fileWidget.getWidget(), QtCore.SIGNAL('defineVariableEvent'),\n varView.getWidget().defineVariableEvent)", "def print_design(x, D):\n\n N = round(x[0])\n ds = x[1]\n ws = x[2]\n wc = x[3]\n lc = x[4]\n g = x[5]\n\n # compute mass\n M = 2.0*(2.0*wc+ws+ds)*lc*wc*D.rowmc + \\\n (2*lc+2*wc+np.pi*ds)*ds*ws*D.kpf*D.rowwc\n # compute loss at rated current\n Prt = (2*lc+2*wc+np.pi*ds)*(N*D.irt) ** 2/(ds*ws*D.kpf*D.sigmawc)\n # compute inductance\n L = D.mu0*lc*wc*N ** 2/(2*g)\n # compute the flux density\n Brt = D.mu0*N*D.irt/(2*g)\n # current density\n Jrt = N*D.irt/(ws*ds*D.kpf)\n print('Design Data')\n print(f'Turns = {N}')\n print(f'Slot depth (m) = {ds}')\n print(f'Slot width (m) = {ws}')\n print(f'Core width (m) = {wc}')\n print(f'Core length (m) = {lc}')\n print(f'Air gap (m) = {g}')\n print(' ')\n print('Design Metrics')\n print(f'Mass (kg) = {M}')\n print(f'Loss at rated current (W) = {Prt}')\n print(' ')\n print('Constrained Quantities')\n print(f'Inductance (H) = {L}')\n print(f'Flux Density at Rated Current (T) = {Brt}')\n print(f'Current Density Rated Current (A/m**2) = {Jrt}')", "def GetToolBarDockOffsets(docks):\r\n\r\n top_left = wx.Size(0, 0)\r\n bottom_right = wx.Size(0, 0)\r\n\r\n for dock in docks:\r\n if dock.toolbar:\r\n dock_direction = dock.dock_direction\r\n if dock_direction == AUI_DOCK_LEFT:\r\n top_left.x += dock.rect.width\r\n bottom_right.x += dock.rect.width\r\n\r\n elif dock_direction == AUI_DOCK_TOP:\r\n top_left.y += dock.rect.height\r\n bottom_right.y += dock.rect.height\r\n\r\n elif dock_direction == AUI_DOCK_RIGHT:\r\n bottom_right.x += dock.rect.width\r\n \r\n elif dock_direction == AUI_DOCK_BOTTOM:\r\n bottom_right.y += dock.rect.height\r\n\r\n return top_left, bottom_right", "def SmartShrink(self, docks, direction):\r\n\r\n sashSize = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n clientSize = self._frame.GetClientSize()\r\n ourDocks = FindDocks(docks, direction, -1, -1)\r\n oppositeDocks = FindOppositeDocks(docks, direction)\r\n oppositeSize = self.GetOppositeDockTotalSize(docks, direction)\r\n ourSize = 0\r\n\r\n for dock in ourDocks:\r\n ourSize += dock.size\r\n\r\n if not dock.toolbar:\r\n ourSize += sashSize\r\n \r\n shrinkSize = ourSize + oppositeSize\r\n\r\n if direction == AUI_DOCK_TOP or direction == AUI_DOCK_BOTTOM:\r\n shrinkSize -= clientSize.y\r\n else:\r\n shrinkSize -= clientSize.x\r\n\r\n if shrinkSize <= 0:\r\n return docks\r\n\r\n # Combine arrays\r\n for dock in oppositeDocks:\r\n ourDocks.append(dock)\r\n \r\n oppositeDocks = []\r\n\r\n for dock in ourDocks:\r\n if dock.toolbar or not dock.resizable:\r\n continue\r\n\r\n dockRange = dock.size - dock.min_size\r\n\r\n if dock.min_size == 0:\r\n dockRange -= sashSize\r\n if direction == AUI_DOCK_TOP or direction == AUI_DOCK_BOTTOM:\r\n dockRange -= caption_size\r\n \r\n if dockRange >= shrinkSize:\r\n \r\n dock.size -= shrinkSize\r\n return docks\r\n \r\n else:\r\n \r\n dock.size -= dockRange\r\n shrinkSize -= dockRange\r\n \r\n return docks", "def DockFixed(self, b=True):\r\n\r\n return self.SetFlag(self.optionDockFixed, b)", "def do_dock6_flexible(self, ligand_path, force_rerun=False):\n log.debug('{}: running DOCK6...'.format(self.id))\n\n ligand_name = os.path.basename(ligand_path).split('.')[0]\n in_name = op.join(self.dock_dir, \"{}_{}_flexdock.in\".format(self.id, ligand_name))\n out_name = op.join(self.dock_dir, \"{}_{}_flexdock.out\".format(self.id, ligand_name))\n\n conformers_out = op.join(self.dock_dir, '{}_{}_flexdock_conformers.mol2'.format(self.id, ligand_name))\n scored_out = op.join(self.dock_dir, '{}_{}_flexdock_scored.mol2'.format(self.id, ligand_name))\n ranked_out = op.join(self.dock_dir, '{}_{}_flexdock_ranked.mol2'.format(self.id, ligand_name))\n\n if ssbio.utils.force_rerun(flag=force_rerun, outfile=ranked_out):\n with open(in_name, \"w\") as f:\n dock_text = \"\"\"ligand_atom_file {}\nlimit_max_ligands no\nskip_molecule no\nread_mol_solvation no\ncalculate_rmsd no\nuse_database_filter no\norient_ligand yes\nautomated_matching yes\nreceptor_site_file {}\nmax_orientations 500\ncritical_points no\nchemical_matching no\nuse_ligand_spheres no\nuse_internal_energy yes\ninternal_energy_rep_exp 12\nflexible_ligand yes\nuser_specified_anchor no\nlimit_max_anchors no\nmin_anchor_size 5\npruning_use_clustering yes\npruning_max_orients 100\npruning_clustering_cutoff 100\npruning_conformer_score_cutoff 100\nuse_clash_overlap no\nwrite_growth_tree no\nbump_filter yes\nbump_grid_prefix {}\nscore_molecules yes\ncontact_score_primary no\ncontact_score_secondary no\ngrid_score_primary yes\ngrid_score_secondary no\ngrid_score_rep_rad_scale 1\ngrid_score_vdw_scale 1\ngrid_score_es_scale 1\ngrid_score_grid_prefix {}\nmultigrid_score_secondary no\ndock3.5_score_secondary no\ncontinuous_score_secondary no\ndescriptor_score_secondary no\ngbsa_zou_score_secondary no\ngbsa_hawkins_score_secondary no\nSASA_descriptor_score_secondary no\namber_score_secondary no\nminimize_ligand yes\nminimize_anchor yes\nminimize_flexible_growth yes\nuse_advanced_simplex_parameters no\nsimplex_max_cycles 1\nsimplex_score_converge 0.1\nsimplex_cycle_converge 1.0\nsimplex_trans_step 1.0\nsimplex_rot_step 0.1\nsimplex_tors_step 10.0\nsimplex_anchor_max_iterations 500\nsimplex_grow_max_iterations 500\nsimplex_grow_tors_premin_iterations 0\nsimplex_random_seed 0\nsimplex_restraint_min yes\nsimplex_coefficient_restraint 10.0\natom_model all\nvdw_defn_file {}\nflex_defn_file {}\nflex_drive_file {}\nligand_outfile_prefix {}_{}_flexdock\nwrite_orientations no\nnum_scored_conformers 20\nwrite_conformations yes\ncluster_conformations yes\nrank_ligands yes\n \"\"\".format(ligand_path, op.basename(self.sphsel_path), op.splitext(op.basename(self.grid_path))[0],\n op.splitext(op.basename(self.grid_path))[0], self.amb_file, self.flex1_file, self.flex2_file,\n self.id, ligand_name)\n\n f.write(dock_text)\n\n os.chdir(self.dock_dir)\n cmd = \"dock6 -i {} -o {} -v\".format(in_name, out_name)\n os.system(cmd)\n\n if ssbio.utils.is_non_zero_file(ranked_out):\n self.dock_flexible_outfile = out_name\n self.dock_flexible_conformers_result = conformers_out\n self.dock_flexible_scored_result = scored_out\n log.debug('{}: successful docking!'.format(self.dock_flexible_outfile))\n else:\n log.error('{}+{}: empty DOCK6 ranked file, execution error (or ligand failed to dock)'.format(self.id,\n op.basename(ligand_path)))", "def port2_docking_date(self, port2_docking_date):\n\n self._port2_docking_date = port2_docking_date", "def _get_dc_offset(self):\n # apply this knowledge to reshape the spectroscopic values\n # remember to reshape such that the dimensions are arranged in reverse order (slow to fast)\n spec_vals_nd, success = reshape_to_n_dims(self._sho_spec_vals[self._sho_all_but_forc_inds,\n self._current_sho_spec_slice],\n h5_spec=self._sho_spec_inds[self._sho_all_but_forc_inds,\n self._current_sho_spec_slice])\n # This should result in a N+1 dimensional matrix where the first index contains the actual data\n # the other dimensions are present to easily slice the data\n spec_labels_sorted = np.hstack(('Dim', self.h5_main.spec_dim_labels))\n if self._verbose:\n print('Spectroscopic dimensions sorted by rate of change:')\n print(spec_labels_sorted)\n # slice the N dimensional dataset such that we only get the DC offset for default values of other dims\n fit_dim_pos = np.argwhere(spec_labels_sorted == self._fit_dim_name)[0][0]\n # fit_dim_slice = list()\n # for dim_ind in range(spec_labels_sorted.size):\n # if dim_ind == fit_dim_pos:\n # fit_dim_slice.append(slice(None))\n # else:\n # fit_dim_slice.append(slice(0, 1))\n\n fit_dim_slice = [fit_dim_pos]\n for idim, dim in enumerate(spec_labels_sorted[1:]):\n if dim == self._fit_dim_name:\n fit_dim_slice.append(slice(None))\n fit_dim_slice[0] = idim\n elif dim in ['FORC', 'FORC_repeat', 'FORC_Cycle']:\n continue\n else:\n fit_dim_slice.append(slice(0, 1))\n\n if self._verbose:\n print('slice to extract Vdc:')\n print(fit_dim_slice)\n\n self.fit_dim_vec = np.squeeze(spec_vals_nd[tuple(fit_dim_slice)])\n\n return", "def setup_evolution(self, evo_data):\n self.popup = tk.Tk()\n ws = self.popup.winfo_screenwidth()\n hs = self.popup.winfo_screenheight()\n w = 900\n h = 700\n x = (ws / 2) - (w / 2)\n y = (hs / 3) - (h / 3)\n self.popup.geometry('%dx%d+%d+%d' % (w, h, x, y))\n self.popup.wm_title(self.lang.VM_title)\n label_id = ttk.Label(self.popup, text=self.lang.VM_id + str(evo_data[\"id\"]), font=FONT_TITOL)\n label_id.pack(pady=0)\n label_location = ttk.Label(self.popup, text=self.lang.VM_location + str(evo_data[\"location\"]), font=FONT_TITOL)\n label_location.pack(pady=0)\n width = 0.3\n f = Figure(figsize=(7, 3), dpi=100)\n\n ax = f.add_subplot(121)\n ax.set_ylabel(self.lang.VM_cm)\n ax.set_title(self.lang.VM_perimeter_title)\n #ax.xticks(rotation=90)\n ax.bar(evo_data[\"date\"], evo_data[\"perimeter\"], width)\n\n ax1 = f.add_subplot(122)\n ax1.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax1.set_title(self.lang.VM_perimeter_area)\n ax1.bar(evo_data[\"date\"], evo_data[\"area_total\"], width)\n\n f.autofmt_xdate()\n f.tight_layout(pad=1, w_pad=0, h_pad=0)\n\n canvas = FigureCanvasTkAgg(f, master=self.popup)\n canvas.draw()\n canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1)\n\n f1 = Figure(figsize=(7, 3), dpi=100)\n\n ax2 = f1.add_subplot(131)\n ax2.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax2.set_title(self.lang.VM_granulation)\n ax2.bar(evo_data[\"date\"], evo_data[\"granulation\"], width, color=\"orange\")\n\n ax3 = f1.add_subplot(132)\n ax3.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax3.set_title(self.lang.VM_slough)\n ax3.bar(evo_data[\"date\"], evo_data[\"slough\"], width, color=\"orange\")\n\n ax4 = f1.add_subplot(133)\n ax4.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax4.set_title(self.lang.VM_necrosis)\n ax4.bar(evo_data[\"date\"], evo_data[\"necrosis\"], width, color=\"orange\")\n\n f1.autofmt_xdate()\n f1.tight_layout(pad=1, w_pad=0, h_pad=0)\n\n canvas1 = FigureCanvasTkAgg(f1, master=self.popup)\n canvas1.draw()\n canvas1.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1)", "def docking_vina(self, ligand_file, docking_pdbqt_file, docking_log_file):\n\n run_line = '%s' % self.docking_program\n run_line += ' --config %s' % self.dock_config_file\n run_line += ' --ligand %s' % ligand_file\n run_line += ' --out %s' % docking_pdbqt_file\n if self.output_save:\n run_line += ' --log %s' % (docking_log_file)\n e = None\n try:\n result = subprocess.check_output(run_line.split(),\n stderr=subprocess.STDOUT,\n timeout=self.timeout_dock,\n universal_newlines=True)\n except Exception as e:\n return [99.999], e\n\n result_lines = result.split('\\n')\n\n check_result = False\n affinity_list = list()\n for result_line in result_lines:\n if result_line.startswith('-----+'):\n check_result = True\n continue\n if not check_result:\n continue\n if result_line.startswith('Writing output'):\n break\n if result_line.startswith('Refine time'):\n break\n lis = result_line.strip().split()\n if not lis[0].isdigit():\n break\n# mode = int(lis[0])\n affinity = float(lis[1])\n affinity_list += [affinity]\n if len(affinity_list) == 0:\n e = 'WARNING: Could not find any conformations.'\n return [99.999], e\n return affinity_list, e", "def printDesignVariables(self):\n print(\"-\" * 85)\n print(\"{:>30}{:>20}{:>20}\".format(\"CSM Design Parameter\", \"Name\", \"Value\"))\n print(\"-\" * 85)\n for dvName in self.DVs:\n DV = self.DVs[dvName]\n print(f\"{DV.csmDesPmtr:>30}{DV.name:>20}{DV.value:>20}\")", "def port2_docked_time(self, port2_docked_time):\n\n self._port2_docked_time = port2_docked_time", "def plot_prec_value2(self):\n self.query_dict={'code':code2.value,'exchange':exchange2.value,\\\n 'structure':struct2.value,'element':element2.value,'properties':prop2.value}\n print ('POSTING', self.query_dict)\n self.query_api(endpoint='evk')\n\n layout_doc.children[4].children[1] = self.plot_pade_figure()", "def __init__(self):\r\n\r\n object.__init__(self)\r\n \r\n self.dock_direction = 0\r\n self.dock_layer = 0\r\n self.dock_row = 0\r\n self.size = 0\r\n self.min_size = 0\r\n self.resizable = True\r\n self.fixed = False\r\n self.toolbar = False\r\n self.rect = wx.Rect()\r\n self.panes = []", "def build_period_rdi_chart(nuts_totals_df, start_date=None,\n end_date=None, charts_label=None,\n elem_fig_id=None,\n vits_fig_id=None,\n macros_fig_id=None):\n # calc num days\n if start_date is not None and end_date is not None:\n delta = end_date - start_date\n num_days = float(delta.days)\n print(f'num days: {num_days}')\n else:\n num_days = 1.\n\n usr_life_stg = ''\n usr_type = ''\n usr_age = ''\n usr_active_lvl = \"\"\n if current_user.is_authenticated:\n usr_life_stg = current_user.lifestage_grp\n usr_type = current_user.person_type\n usr_age = current_user.age\n usr_active_lvl = current_user.active_level\n\n # df of nuts by category with field values as %\n elems_df = pd.DataFrame(columns=list(rdi_elems_dict.keys()))\n vits_df = pd.DataFrame(columns=list(rdi_vits_dict.keys()))\n macros_df = pd.DataFrame(columns=list(rdi_macros_dict.keys()))\n\n # fill row 0 of each nut_type df with percentages\n for idx, row in nuts_totals_df.iterrows():\n # todo: need to process and take out brackets, extra words\n cnf_nut = row['Name'].lower()\n cnf_nut = preprocess_cnf_nuts(cnf_nut)\n cnf_amt = float(row['Value'])\n # todo: take out micro symbol from units but not used as units\n # taken from dicts_arrs in def find_type\n cnf_units = row['Units']\n if '\\xb5g' in cnf_units:\n cnf_units = cnf_units.replace(\"\\xb5g\", \"ug\")\n nut_type, rdi_nut, multiplier = find_type(cnf_nut, cnf_elems_dicts)\n if nut_type == \"\":\n nut_type, rdi_nut, multiplier = find_type(cnf_nut, cnf_vits_dicts)\n if nut_type == \"\":\n nut_type, rdi_nut, multiplier = find_type(cnf_nut, cnf_macros_dicts)\n\n # get start and exclusive end idx of rdi_df\n start_idx, end_idx = get_lifestage_idxs(usr_type)\n if nut_type == 'element':\n elems_df = fill_nut_df(nut_type, start_idx, end_idx, usr_life_stg,\n cnf_nut, cnf_amt, multiplier,\n elems_df,\n usr_type, usr_age, usr_active_lvl, num_days)\n\n elif nut_type == 'vitamin':\n vits_df = fill_nut_df(nut_type, start_idx, end_idx, usr_life_stg,\n cnf_nut, cnf_amt, multiplier,\n vits_df,\n usr_type, usr_age, usr_active_lvl, num_days)\n\n elif nut_type == 'macronutrient':\n macros_df = fill_nut_df(nut_type, start_idx, end_idx, usr_life_stg,\n cnf_nut, cnf_amt, multiplier,\n macros_df,\n usr_type, usr_age, usr_active_lvl, num_days)\n\n # make bar charts and html.Div containing them, return\n # style chart\n elem_colors = color_bars(elems_df)\n vits_colors = color_bars(vits_df)\n macros_colors = color_bars(macros_df)\n\n fig_elems = go.Figure(data=[go.Bar(\n x=list(elems_df.columns),\n y=list(elems_df.iloc[0]),\n marker_color=elem_colors\n )])\n fig_elems.update_layout(title_text=f'elements for{charts_label}')\n fig_vits = go.Figure(data=[go.Bar(x=list(vits_df.columns),\n y=list(vits_df.iloc[0]),\n marker_color=vits_colors)])\n fig_vits.update_layout(title_text=f'vitamins for{charts_label}')\n fig_macros = go.Figure(data=[go.Bar(x=list(macros_df.columns),\n y=list(macros_df.iloc[0]),\n marker_color=macros_colors)])\n fig_macros.update_layout(title_text=f\"macronutrients for{charts_label}\")\n\n figs_div = html.Div([\n dcc.Graph(\n figure=fig_elems,\n id=elem_fig_id\n ),\n dcc.Graph(\n figure=fig_vits,\n id=vits_fig_id\n ),\n dcc.Graph(\n figure=fig_macros,\n id=macros_fig_id\n )\n ])\n return figs_div", "def is_dock_msg(msg):\n return msg & 0xF0 == Dock.BASE", "def data(self):\n dico = {}\n for d_ in [\"flux\",\"var\",\"lbda\",\"mjd\",\"bandname\",\"zp\",\"zpsys\"]:\n dico[d_] = self.get(d_)\n return dico", "def GetOppositeDockTotalSize(self, docks, direction):\r\n \r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n minSizeMax = 0\r\n result = sash_size\r\n vertical = False\r\n\r\n if direction in [AUI_DOCK_TOP, AUI_DOCK_BOTTOM]:\r\n vertical = True\r\n\r\n # Get minimum size of the most inner area\r\n for tmpDock in docks:\r\n \r\n if tmpDock.dock_layer != 0:\r\n continue\r\n\r\n if tmpDock.dock_direction != AUI_DOCK_CENTER and tmpDock.IsVertical() != vertical:\r\n continue\r\n\r\n for tmpPane in tmpDock.panes:\r\n \r\n minSize = pane_border_size*2 - sash_size\r\n\r\n if vertical:\r\n minSize += tmpPane.min_size.y + caption_size\r\n else:\r\n minSize += tmpPane.min_size.x\r\n\r\n if minSize > minSizeMax:\r\n minSizeMax = minSize\r\n \r\n result += minSizeMax\r\n\r\n # Get opposite docks\r\n oppositeDocks = FindOppositeDocks(docks, direction)\r\n\r\n # Sum size of the opposite docks and their sashes\r\n for dock in oppositeDocks:\r\n result += dock.size\r\n # if it's not a toolbar add the sash_size too\r\n if not dock.toolbar:\r\n result += sash_size\r\n \r\n return result", "def PMTandPiezoPlot(datadir,run,event,gain): \n en = event\n mu = gain\n e = sbc.DataHandling.GetSBCEvent.GetEvent(datadir+'/'+run,en)\n print(e[\"fastDAQ\"].keys())\n cgate = e[\"fastDAQ\"][\"CAMgate\"]\n dcam = np.diff(cgate)\n \n p0=e[\"fastDAQ\"][\"Piezo1\"]\n p1 = e[\"fastDAQ\"][\"Piezo2\"]\n fdt = e[\"fastDAQ\"][\"time\"]\n runreconpath = \"/pnfs/coupp/persistent/grid_output/SBC-17/output/%s/\"%run\n pmtdiffs = []\n diffs = []\n \n camOnTimes = [fdt[i] for i in range(len(dcam)) if dcam[i] < -0.5]\n camOffTimes = [fdt[i] for i in range(len(dcam)) if dcam[i] > 0.5]\n print(len(camOnTimes))\n print(len(camOffTimes))\n \n acousticfilename = runreconpath+\"AcousticAnalysis_%s.bin\"%run\n a = sbc.DataHandling.ReadBinary.ReadBlock(acousticfilename)\n bubt0 = a[\"bubble_t0\"]\n \n pmttracetime = e[\"PMTtraces\"][\"t0_sec\"][:,0]+e[\"PMTtraces\"][\"t0_frac\"][:,0]\n d=sbc.AnalysisModules.PMTfastDAQalignment.PMTandFastDAQalignment(e)\n pmtalign = d[\"PMT_trigt0_sec\"]+d[\"PMT_trigt0_frac\"]\n tracetimes = pmttracetime - pmtalign\n at0 = bubt0[en,0]\n at0_1 = bubt0[en,1]\n \n allxyzfname = \"/pnfs/coupp/persistent/grid_output/SBC-17/output/SimpleXYZ_all.bin\"\n xyzf = sbc.DataHandling.ReadBinary.ReadBlock(allxyzfname)\n indices = [i for i,x in enumerate(xyzf[\"runid\"]) if str(x[0])+\"_\"+str(x[1]) == run]\n xyz_reconstructed = True\n if len(indices) > 0:\n runposreco = {\"ev\":[xyzf[\"ev\"][indices]],\"x\":[xyzf[\"bubX\"][indices]],\n \"y\":[xyzf[\"bubY\"][indices]],\"z\":[xyzf[\"bubZ\"][indices]]}\n z = runposreco[\"z\"][0][int(int(en))]\n else:\n print(\"no handscan?\")\n z = 1.5\n xyz_reconstructed = False\n lag_expected = (-23.387649*z - 261.020495)*1e-6 # fit from other analysis\n t0_expected_p0 = at0 + lag_expected\n t0_expected_p1 = at0_1 + lag_expected\n \n i=0\n candidates = []\n candidate_times=[]\n for t in (tracetimes-at0):\n \n if t<0.2 and t>-0.2:\n lastCamOff = 0\n for k in range(len(camOffTimes)):\n if t+at0 > camOffTimes[k]:\n lastCamOff = camOffTimes[k]\n elif t+at0 < camOffTimes[k]:\n break\n if t+at0-lastCamOff > 25e-6:\n \n pmtdiffs.append(t)\n trace = np.fabs(e[\"PMTtraces\"][\"traces\"][i][0])\n if max(trace) == 128:\n trace = pi.stitchTraces(trace,np.fabs(e[\"PMTtraces\"][\"traces\"][i][1]))\n dt = e[\"PMTtraces\"][\"dt\"][i][0]\n #baseline = np.mean(trace[0:50])\n #trace = trace - baseline\n [phe,n,totInt,pktimes] = pi.SBC_pulse_integrator_bressler(trace,dt)\n \n if phe != None:\n phe /= mu\n candidates.append(phe)\n candidate_times.append(t)\n i+=1\n candidate_phe = 0\n the_index = 0\n i=0\n near_trace_indices = []\n for t in candidate_times:\n if t > -500e-6 and t <0:\n near_trace_indices.append(list(tracetimes-at0).index(t))\n if candidates[i]>candidate_phe:\n candidate_phe = candidates[i]\n the_index = i\n i+=1\n \n if len(candidates) != 0:\n if max(candidates)>0:\n diffs.append(candidate_times[candidates.index(max(candidates))])\n fig,ax1 = plt.subplots()\n ax2 = ax1.twinx()\n ax1.plot(fdt,p0,'b',alpha=0.6, label = 'piezo 0')\n ax1.plot(fdt,p1,'k',alpha=0.2, label= 'piezo 1')\n for i in range(len(candidates)):\n if i == the_index:\n ax2.plot([candidate_times[i]+at0,candidate_times[i]+at0],[0,candidates[i]],'r',lw=4)\n else:\n ax2.plot([candidate_times[i]+at0,candidate_times[i]+at0],[0,candidates[i]],'y',lw=4)\n #ax2.plot([min(candidate_times),max(candidate_times)],[0,0],linewidth=2)\n ax1.plot([at0,at0],[-0.5,0.5],'b',linewidth=2, label = 'acoustic t0, p0')\n ax1.plot([at0_1,at0_1],[-0.5,0.5],'k',linewidth=2, label = 'acoustic t0, p1')\n \"\"\"\n if xyz_reconstructed:\n ax1.plot([t0_expected_p0,t0_expected_p0],[-0.5,0.5],'b:',linewidth=2, label = 'expected PMT t0, p0')\n ax1.plot([t0_expected_p1,t0_expected_p1],[-0.5,0.5],'k:',linewidth=2, label = 'expected PMT t0, p1')\n else:\n ax1.plot([t0_expected_p0,t0_expected_p0],[-0.5,0.5],'b:',linewidth=2, label = 'expected PMT t0, p0, center of chamber')\n ax1.plot([t0_expected_p1,t0_expected_p1],[-0.5,0.5],'k:',linewidth=2, label = 'expected PMT t0, p1, center of chamber')\n \"\"\"\n ax1.plot(fdt,cgate,'c')\n ax1.plot(fdt[:-1],dcam,'m')\n ax2.set_ylabel('pmt signal (phe)',fontsize=20)\n ax1.set_xlabel('time (s)',fontsize=20)\n ax1.set_ylabel('Acoustic signa(V)',fontsize=20)\n ax1.set_ylim([min(p1),max(p1)])\n ax2.set_xlim([-0.1,0.1])\n #ax2.set_ylim([0,5])\n ax1.legend()\n plt.show\n \n for j in near_trace_indices:\n trace = e[\"PMTtraces\"][\"traces\"][j][0]\n dt = e[\"PMTtraces\"][\"dt\"]\n dt_tr = dt[j][0]\n tPMT = np.arange(len(trace))*dt_tr\n plt.figure()\n plt.plot(tPMT,trace)\n plt.xlabel(\"t (s)\")\n plt.ylabel(\"PMT signal\")\n plt.show\n \n plt.figure()\n plt.plot(e[\"fastDAQ\"][\"time\"],e[\"fastDAQ\"][\"VetoCoinc\"])\n plt.ylabel(\"Veto Coincidence signal\",fontsize=18)\n plt.xlabel(\"time (s)\")\n plt.show", "def __init__(\n self, ctx, coll_j_range=Range('J', 0, Symbol('Jmax') + 1),\n coll_m_range=Range('M'),\n coll_j_dumms=tuple(\n Symbol('J{}'.format(i)) for i in range(1, 30)\n ),\n coll_m_dumms=tuple(\n Symbol('M{}'.format(i)) for i in range(1, 30)\n ),\n tilde_range=Range(r'\\tilde{Q}', 0, Symbol('Ntilde')),\n form_tilde=form_tilde,\n m_range=Range('m'), form_m=form_m, **kwargs\n ):\n super().__init__(ctx, **kwargs)\n\n # Convenient names for quantum number access functions inside drudge\n # scripts.\n self.set_name(\n n_=NOf, NOf=NOf, l_=LOf, LOf=LOf, j_=JOf, JOf=JOf,\n tilde_=TildeOf, TildeOf=TildeOf, m_=MOf, MOf=MOf,\n pi_=PiOf, PiOf=PiOf\n )\n\n self.coll_j_range = coll_j_range\n self.coll_m_range = coll_m_range\n self.coll_j_dumms = coll_j_dumms\n self.coll_m_dumms = coll_m_dumms\n self.set_dumms(coll_j_range, coll_j_dumms)\n self.set_dumms(coll_m_range, coll_m_dumms)\n\n self.tilde_range = tilde_range\n self.form_tilde = form_tilde\n self.tilde_dumms = tuple(form_tilde(i) for i in self.qp_dumms)\n self.set_dumms(tilde_range, self.tilde_dumms)\n\n self.m_range = m_range\n self.form_m = form_m\n self.m_dumms = tuple(form_m(i) for i in self.qp_dumms)\n self.set_dumms(m_range, self.m_dumms)\n\n self.add_resolver_for_dumms()\n\n # Add utility about CG coefficients and related things.\n self.set_name(\n CG=CG, Wigner3j=Wigner3j, Wigner6j=Wigner6j, Wigner9j=Wigner9j\n )\n\n self._am_sum_simplifiers = BCastVar(self.ctx, {\n # TODO: Add more simplifications here.\n 2: [_sum_2_3j_to_delta],\n 5: [_sum_4_3j_to_6j]\n })\n self.set_tensor_method('simplify_am', self.simplify_am)\n\n # All expressions for J/j, for merging of simple terms with factors in\n # J/j-hat style.\n self._j_exprs = frozenset(itertools.chain(self.coll_j_dumms, (\n JOf(i) for i in self.tilde_dumms\n )))\n\n # For angular momentum coupling.\n self.set_tensor_method('do_amc', self.do_amc)\n\n # Special simplification routines.\n self.set_tensor_method('simplify_pono', self.simplify_pono)\n self.set_tensor_method('deep_simplify', self.deep_simplify)\n self.set_tensor_method('merge_j', self.merge_j)", "def DoDropFloatingPane(self, docks, panes, target, pt):\r\n \r\n screenPt = self._frame.ClientToScreen(pt)\r\n paneInfo = self.PaneHitTest(panes, pt)\r\n\r\n if paneInfo.IsMaximized():\r\n return False, target\r\n\r\n if paneInfo.window is None:\r\n return False, target\r\n\r\n # search the dock guides.\r\n # reverse order to handle the center first.\r\n for i in xrange(len(self._guides)-1, -1, -1):\r\n guide = self._guides[i]\r\n\r\n # do hit testing on the guide\r\n dir = guide.host.HitTest(screenPt.x, screenPt.y)\r\n\r\n if dir == -1: # point was outside of the dock guide\r\n continue\r\n\r\n if dir == wx.ALL: # target is a single dock guide\r\n return self.DoDropLayer(docks, target, guide.dock_direction)\r\n \r\n elif dir == wx.CENTER:\r\n\r\n if not target.IsNotebookDockable():\r\n continue\r\n if not paneInfo.IsNotebookDockable() and not paneInfo.IsNotebookControl():\r\n continue\r\n\r\n if not paneInfo.HasNotebook():\r\n \r\n # Add a new notebook pane with the original as a tab...\r\n self.CreateNotebookBase(panes, paneInfo)\r\n \r\n # Add new item to notebook\r\n target.NotebookPage(paneInfo.notebook_id)\r\n \r\n else:\r\n \r\n drop_pane = False\r\n drop_row = False\r\n\r\n insert_dir = paneInfo.dock_direction\r\n insert_layer = paneInfo.dock_layer\r\n insert_row = paneInfo.dock_row\r\n insert_pos = paneInfo.dock_pos\r\n\r\n if insert_dir == AUI_DOCK_CENTER:\r\n \r\n insert_layer = 0\r\n if dir == wx.LEFT:\r\n insert_dir = AUI_DOCK_LEFT\r\n elif dir == wx.UP:\r\n insert_dir = AUI_DOCK_TOP\r\n elif dir == wx.RIGHT:\r\n insert_dir = AUI_DOCK_RIGHT\r\n elif dir == wx.DOWN:\r\n insert_dir = AUI_DOCK_BOTTOM\r\n \r\n if insert_dir == AUI_DOCK_LEFT:\r\n \r\n drop_pane = (dir == wx.UP or dir == wx.DOWN)\r\n drop_row = (dir == wx.LEFT or dir == wx.RIGHT)\r\n if dir == wx.RIGHT:\r\n insert_row += 1\r\n elif dir == wx.DOWN:\r\n insert_pos += 1\r\n \r\n elif insert_dir == AUI_DOCK_RIGHT:\r\n \r\n drop_pane = (dir == wx.UP or dir == wx.DOWN)\r\n drop_row = (dir == wx.LEFT or dir == wx.RIGHT)\r\n if dir == wx.LEFT:\r\n insert_row += 1\r\n elif dir == wx.DOWN:\r\n insert_pos += 1\r\n \r\n elif insert_dir == AUI_DOCK_TOP:\r\n \r\n drop_pane = (dir == wx.LEFT or dir == wx.RIGHT)\r\n drop_row = (dir == wx.UP or dir == wx.DOWN)\r\n if dir == wx.DOWN:\r\n insert_row += 1\r\n elif dir == wx.RIGHT:\r\n insert_pos += 1\r\n \r\n elif insert_dir == AUI_DOCK_BOTTOM:\r\n \r\n drop_pane = (dir == wx.LEFT or dir == wx.RIGHT)\r\n drop_row = (dir == wx.UP or dir == wx.DOWN)\r\n if dir == wx.UP:\r\n insert_row += 1\r\n elif dir == wx.RIGHT:\r\n insert_pos += 1\r\n \r\n if paneInfo.dock_direction == AUI_DOCK_CENTER:\r\n insert_row = GetMaxRow(panes, insert_dir, insert_layer) + 1\r\n\r\n if drop_pane:\r\n return self.DoDropPane(panes, target, insert_dir, insert_layer, insert_row, insert_pos)\r\n\r\n if drop_row:\r\n return self.DoDropRow(panes, target, insert_dir, insert_layer, insert_row)\r\n \r\n return True, target\r\n \r\n return False, target", "def init_widget(self):\n super(WxDockPane, self).init_widget()\n d = self.declaration\n self.set_title(d.title)\n self.set_title_bar_visible(d.title_bar_visible)\n self.set_title_bar_orientation(d.title_bar_orientation)\n self.set_closable(d.closable)\n self.set_movable(d.movable)\n self.set_floatable(d.floatable)\n self.set_floating(d.floating)\n self.set_dock_area(d.dock_area)\n self.set_allowed_dock_areas(d.allowed_dock_areas)\n widget = self.widget\n widget.Bind(EVT_DOCK_PANE_FLOATED, self.on_floated)\n widget.Bind(EVT_DOCK_PANE_DOCKED, self.on_docked)\n widget.Bind(EVT_DOCK_PANE_CLOSED, self.on_closed)", "def phast_cmmd(self):\n temp = '{prog} -R {rho} -C {ecov} -E {elen} -N {chrom} -i MAF {maf} {model} > {wig}\\n'.format(**self.dict)\n return temp.format(fnum=self.fnum)", "def __init__(self, parent, direction=0):\r\n\r\n self._direction = direction\r\n\r\n style = wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP | \\\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER\r\n\r\n # Use of FRAME_SHAPED on wxMac causes the frame to be visible\r\n # breaking the docking hints.\r\n if wx.Platform != '__WXMAC__':\r\n style |= wx.FRAME_SHAPED\r\n\r\n AuiDockingGuide.__init__(self, parent, style=style, name=\"auiSingleDockTarget\")\r\n \r\n self.Hide()\r\n\r\n useAero = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_AERO_DOCKING_GUIDES\r\n useWhidbey = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_WHIDBEY_DOCKING_GUIDES\r\n \r\n self._useAero = useAero or useWhidbey\r\n self._valid = True\r\n \r\n if useAero:\r\n sizeX, sizeY = aeroguideSizeX, aeroguideSizeY\r\n elif useWhidbey:\r\n sizeX, sizeY = whidbeySizeX, whidbeySizeY\r\n else:\r\n sizeX, sizeY = guideSizeX, guideSizeY\r\n\r\n if direction not in [wx.TOP, wx.BOTTOM]:\r\n sizeX, sizeY = sizeY, sizeX\r\n\r\n if self._useAero:\r\n self.CreateShapesWithStyle(useWhidbey)\r\n \r\n if wx.Platform == \"__WXGTK__\":\r\n self.Bind(wx.EVT_WINDOW_CREATE, self.SetGuideShape)\r\n else:\r\n self.SetGuideShape()\r\n \r\n self.SetSize(self.region.GetBox().GetSize())\r\n else:\r\n self.SetSize((sizeX, sizeY))\r\n \r\n self.rect = wx.Rect(0, 0, sizeX, sizeY)\r\n\r\n if self._useAero:\r\n useAero = (useWhidbey and [2] or [1])[0]\r\n else:\r\n useAero = 0\r\n \r\n self.target = AuiDockingGuideWindow(self, self.rect, direction, False, useAero)", "def set_dock_area(self, dock_area):\n self.widget.SetDockArea(_DOCK_AREA_MAP[dock_area])", "def _d_converter(self):\n units = {'um':1e-6, 'mm':1e-3, 'inch':2.54e-2, 'in':2.54e-2,\\\n 'micron':1e-6, 'mil':2.54e-5, 'm':1.0}\n for i in self.stack:\n i.thickness = i.thickness*units[i.units]\n return", "def process_meter_message(self, d):\n dpid = int(d.get(\"dpid\", 0))\n dp = self.dpset.get(dpid)\n if not dp:\n return \"Datapath does not exist!\"\n\n ofproto = dp.ofproto\n parser = dp.ofproto_parser\n\n command = {\n 'add': ofproto.OFPMC_ADD,\n 'mod': ofproto.OFPMC_MODIFY,\n 'del': ofproto.OFPMC_DELETE,\n }\n cmd = command.get(d[\"operation\"], ofproto.OFPMC_ADD)\n\n meter_id = d[\"meter_id\"]\n\n flags = 0\n bands = []\n if \"flags\" in d: # Ryu's format\n print(d['flags'])\n for f in d['flags']:\n flags += 0x01 if f == 'KBPS' else 0\n flags += 0x02 if f == 'PKTPS' else 0\n flags += 0x04 if f == 'BURST' else 0\n flags += 0x08 if f == 'STATS' else 0\n\n for band in d[\"bands\"]:\n if band['type'] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band['rate'],\n burst_size=band['burst_size'])]\n elif band['type'] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band['rate'],\n burst_size=band['burst_size'], prec_level=band['prec_level'])]\n\n else: # FlowManager's format\n flags += 0x01 if d['OFPMF_KBPS'] else 0\n flags += 0x02 if d['OFPMF_PKTPS'] else 0\n flags += 0x04 if d['OFPMF_BURST'] else 0\n flags += 0x08 if d['OFPMF_STATS'] else 0\n\n # Flags must have KBPS or PKTPS\n flags = flags if (flags & 0x03) else (flags | 0x01)\n\n for band in d[\"bands\"]:\n #mtype = type_convert.get(band[0])\n if band[0] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band[1],\n burst_size=band[2])]\n elif band[0] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band[1],\n burst_size=band[2], prec_level=band[3])]\n\n # TODO: catch some errors\n meter_mod = parser.OFPMeterMod(dp, cmd, flags, meter_id, bands)\n try:\n dp.send_msg(meter_mod)\n except KeyError as e:\n return e.__repr__()\n except Exception as e:\n return e.__repr__()\n\n return \"Message sent successfully.\"", "def get_measure_par(input_object):\r\n input_object.measurement_strategy = ui.measurement_strategy.currentIndex()\r\n input_object.len_total = ui.total_length.text()\r\n input_object.frequency = ui.frequency.text()\r\n input_object.num_of_mea = ui.num_of_mea.text()\r\n input_object.len_step = ui.length_step.text()\r\n input_object.time_step = ui.time_step.text()\r\n input_object.temperature = ui.temperature.text()\r\n input_object.humidity = ui.humidity.text()\r\n input_object.na_average_factor = ui.na_average_facotr.value()\r\n input_object.multi_measure = ui.multi_measure.value()\r\n if ui.NA_state.text().strip() != '':\r\n input_object.na_state = ui.NA_state.text().strip()\r\n else:\r\n input_object.na_state = None", "def calc_spindle_psd_i(self, psd_bandwidth, zpad=False, zpad_len=3):\n \n print('Calculating power spectra (this may take a few minutes)...')\n self.metadata['spindle_analysis']['psd_dtype'] = 'raw_individual'\n self.metadata['spindle_analysis']['psd_method'] = 'multitaper'\n self.metadata['spindle_analysis']['psd_bandwidth'] = psd_bandwidth\n self.metadata['spindle_analysis']['zeropad'] = zpad\n self.metadata['spindle_analysis']['zeropad_len_sec'] = zpad_len\n sf = self.metadata['analysis_info']['s_freq']\n \n spindles_zpad = {}\n spindle_psd = {}\n spindle_multitaper_calcs = {}\n for chan in self.spindles:\n spindles_zpad[chan] = {}\n spindle_psd[chan] = {}\n # waveform resolution is dependent on length of signal, regardless of zero-padding\n spindle_multitaper_calcs[chan] = pd.DataFrame(columns=['spin_samples', 'spin_seconds', 'zpad_samples', 'zpad_seconds', 'waveform_resoultion_Hz', \n 'psd_resolution_Hz', 'N_taper_len', 'W_bandwidth', 'K_tapers'])\n spindle_multitaper_calcs[chan].index.name = 'spindle_num'\n \n if len(self.spindles[chan]) > 0:\n for x in self.spindles[chan]:\n # subtract mean to zero-center spindle for zero-padding\n data = self.spindles[chan][x].Raw.values - np.mean(self.spindles[chan][x].Raw.values)\n zpad_samples=0\n zpad_seconds=0\n tx=0\n \n # option to zero-pad the spindle\n if zpad:\n total_len = zpad_len*sf\n zpad_samples = total_len - len(data)\n zpad_seconds = zpad_samples/sf\n if zpad_samples > 0:\n padding = np.repeat(0, zpad_samples)\n data_pad = np.append(data, padding)\n else:\n spin_len = len(data)/sf\n print(f'Spindle {chan}:{x} length {spin_len} seconds longer than pad length {zpad_len}')\n data_pad = data\n \n # or leave as-is\n else:\n data_pad = data\n \n # record PS params [K = 2NW-1]\n spin_samples = len(data)\n spin_seconds = len(data)/sf\n waveform_res = 1/spin_seconds\n psd_res = 1/(len(data_pad)/sf)\n N_taper_len = len(data_pad)/sf\n W_bandwidth = psd_bandwidth\n K_tapers = int((2*N_taper_len*W_bandwidth)-1)\n spindle_multitaper_calcs[chan].loc[x] = [spin_samples, spin_seconds, zpad_samples, zpad_seconds, waveform_res, psd_res, N_taper_len, W_bandwidth, K_tapers]\n\n # calculate power spectrum\n try:\n pwr, freqs = psd_array_multitaper(data_pad, sf, adaptive=True, bandwidth=psd_bandwidth, fmax=25, \n normalization='full', verbose=0)\n except ValueError:\n print(f'Specified bandwidth too small for data length. Skipping spindle {chan}:{x}.')\n continue\n \n # convert to series & add to dict\n psd = pd.Series(pwr, index=freqs)\n spindle_psd[chan][x] = psd\n spindles_zpad[chan][x] = data_pad\n \n self.spindles_zpad = spindles_zpad\n self.spindle_multitaper_calcs = spindle_multitaper_calcs\n self.spindle_psd_i = spindle_psd\n print('Done. \\nSpectra stored in obj.spindle_psd_i. Calculations stored in obj.spindle_multitaper_calcs. Zero-padded spindle data in obj.spindles_zpad.\\n')" ]
[ "0.5546065", "0.5520014", "0.55192053", "0.53659904", "0.536205", "0.53281355", "0.53086746", "0.5241043", "0.5237947", "0.5195551", "0.51808137", "0.51035964", "0.50467604", "0.4982178", "0.49796942", "0.4977867", "0.49674752", "0.49330243", "0.49260616", "0.49172583", "0.49066922", "0.49011707", "0.48575395", "0.48568487", "0.48416606", "0.4838488", "0.48306945", "0.4828391", "0.48141962", "0.47997794", "0.4781479", "0.4746394", "0.47235575", "0.47103587", "0.47065914", "0.4676469", "0.46476442", "0.46258953", "0.46018448", "0.45859197", "0.45851326", "0.4584375", "0.4580987", "0.456227", "0.4557806", "0.45559666", "0.455524", "0.45262834", "0.4521255", "0.45201454", "0.45164013", "0.45101923", "0.44918126", "0.44907203", "0.44884565", "0.44831708", "0.4480437", "0.44774944", "0.44763723", "0.4475722", "0.4473817", "0.44714925", "0.44710246", "0.44702834", "0.4468506", "0.4467585", "0.44625616", "0.44560966", "0.44539663", "0.44451824", "0.44378573", "0.4428824", "0.44238406", "0.4423143", "0.44169682", "0.4415397", "0.44078583", "0.44075754", "0.44011894", "0.43994284", "0.43955362", "0.43890262", "0.43800965", "0.43800455", "0.43733674", "0.43661538", "0.43642983", "0.436142", "0.43550488", "0.4353995", "0.4351983", "0.43516138", "0.43495935", "0.43476406", "0.43473503", "0.43447092", "0.4343812", "0.4341129", "0.434018", "0.43363836", "0.4331595" ]
0.0
-1
Form dock_pd. Based on dock_proc dock_meas dock_chi2 dock_refine_ls dock_peak
def action_tof(obj: TOF, thread: QtCore.QThread): w_actions = [] # f_meas = obj.is_attribute("pd_meas") # f_chi2 = obj.is_attribute("chi2") # f_phase = obj.is_attribute("phase") # l_pd_peak = [] # if f_phase: # phase = obj.phase # for item in phase.items: # try: # pd_peak = getattr(obj, f"pd_peak_{item.label.lower():}") # l_pd_peak.append(pd_peak) # except AttributeError: # pass # f_setup = obj.is_attribute("setup") # f_pd_instr_resolution = obj.is_attribute("pd_instr_resolution") # f_pd_background = obj.is_attribute("pd_background") # f_range = obj.is_attribute("range") # if not(f_chi2 & f_meas & f_setup & f_pd_instr_resolution & f_phase & # f_pd_background & f_range): # if not f_chi2: # qtb_1 = QtWidgets.QToolButton() # qtb_1.setText("Add chi2") # qtb_1.clicked.connect(lambda: add_items(obj, [Chi2()], thread)) # w_actions.append(qtb_1) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def CalculateDockSizerLimits(self, dock):\r\n\r\n docks, panes = CopyDocksAndPanes2(self._docks, self._panes)\r\n\r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n opposite_size = self.GetOppositeDockTotalSize(docks, dock.dock_direction)\r\n\r\n for tmpDock in docks:\r\n \r\n if tmpDock.dock_direction == dock.dock_direction and \\\r\n tmpDock.dock_layer == dock.dock_layer and \\\r\n tmpDock.dock_row == dock.dock_row:\r\n \r\n tmpDock.size = 1\r\n break\r\n \r\n sizer, panes, docks, uiparts = self.LayoutAll(panes, docks, [], True, False)\r\n client_size = self._frame.GetClientSize()\r\n sizer.SetDimension(0, 0, client_size.x, client_size.y)\r\n sizer.Layout()\r\n\r\n for part in uiparts:\r\n \r\n part.rect = wx.RectPS(part.sizer_item.GetPosition(), part.sizer_item.GetSize())\r\n if part.type == AuiDockUIPart.typeDock:\r\n part.dock.rect = part.rect\r\n \r\n sizer.Destroy()\r\n new_dock = None\r\n\r\n for tmpDock in docks:\r\n if tmpDock.dock_direction == dock.dock_direction and \\\r\n tmpDock.dock_layer == dock.dock_layer and \\\r\n tmpDock.dock_row == dock.dock_row:\r\n \r\n new_dock = tmpDock\r\n break\r\n \r\n partnerDock = self.GetPartnerDock(dock)\r\n\r\n if partnerDock:\r\n partnerRange = partnerDock.size - partnerDock.min_size\r\n if partnerDock.min_size == 0:\r\n partnerRange -= sash_size\r\n if dock.IsHorizontal():\r\n partnerRange -= caption_size\r\n \r\n direction = dock.dock_direction\r\n \r\n if direction == AUI_DOCK_LEFT:\r\n minPix = new_dock.rect.x + new_dock.rect.width\r\n maxPix = dock.rect.x + dock.rect.width\r\n maxPix += partnerRange\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n minPix = new_dock.rect.y + new_dock.rect.height\r\n maxPix = dock.rect.y + dock.rect.height\r\n maxPix += partnerRange\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n minPix = dock.rect.x - partnerRange - sash_size\r\n maxPix = new_dock.rect.x - sash_size\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n minPix = dock.rect.y - partnerRange - sash_size\r\n maxPix = new_dock.rect.y - sash_size\r\n\r\n return minPix, maxPix\r\n \r\n direction = new_dock.dock_direction\r\n \r\n if direction == AUI_DOCK_LEFT:\r\n minPix = new_dock.rect.x + new_dock.rect.width\r\n maxPix = client_size.x - opposite_size - sash_size\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n minPix = new_dock.rect.y + new_dock.rect.height\r\n maxPix = client_size.y - opposite_size - sash_size\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n minPix = opposite_size\r\n maxPix = new_dock.rect.x - sash_size\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n minPix = opposite_size\r\n maxPix = new_dock.rect.y - sash_size\r\n\r\n return minPix, maxPix", "def dock_complex(pose):\n #PyMOL observer assuming the initial call was already made prior to this line.\n AddPyMolObserver_to_energies(pose, True)\n # defining scoring functions (DNA + specific to structures of interests)\n fa_score = get_fa_scorefxn()\n dna_score = create_score_function('dna')\n dna_score.set_weight(fa_elec, 1)\n # movemap minimization / fast relax\n mm = MoveMap()\n mm.set_bb_true_range(\"enter beginning region\", \"enter ending region\")#min in this motif only\n relax = FastRelax()\n relax.set_scorefxn(scorefxn)\n relax.apply(pose)\n # defining specific complex docking protocol\n docking = DockMCMProtocol()\n docking.set_scorefxn(dna_score)\n docking.set_scorefxn_pack(fa_score)\n docking.set_partners(\"B_ACD\")\n # scoring pre and post docking\n dna_init = dna_score(pose)\n fa_init = fa_score(pose)\n # dockng occurs here\n docking.apply(pose)\n # scoring post docking.\n dna_final = dna_score(pose)\n fa_final = fa_score(pose)\n return [fa_init, fa_final, dna_init, dna_final]\n #raise Exception(\"Complex docking not implemented\")", "def dock(self, ligands, run_count=1, cpu=8,\n exhaustiveness=10, write_dir=os.getcwd()):\n\n long = '{0}{1}_{2}_out.csv'.format(\n write_dir, self.name, self.id)\n trimmed = '{0}{1}_{2}_trimmed.csv'.format(\n write_dir, self.name, self.id)\n short = '{0}{1}_{2}_summary.csv'.format(\n write_dir, self.name, self.id)\n if not os.path.exists(write_dir):\n os.makedirs(write_dir)\n os.chdir(write_dir)\n\n '''Long table format, all outputs'''\n if os.path.exists(long):\n long_df = pd.read_csv(long)\n else:\n long_df = pd.DataFrame(\n columns=['Date_time', 'Exhaustiveness', 'Run_number',\n 'Receptor', 'Ligand', 'Rank', 'Affinity',\n 'Dist_rmsd_l.b.', 'Dist_rmsd_u.b.', 'species',\n 'defaultLea', 'defaultLigand'\n ]\n )\n long_df.to_csv(long, index=False, mode='w')\n\n '''trimmed table is highest ranking binding from each dock iteration only'''\n if os.path.exists(trimmed):\n trim_df = pd.read_csv(trimmed)\n else:\n trim_df = long_df\n trim_df.to_csv(trimmed, index=False, mode='w')\n\n '''short table format with summary statistics of all ligand docks'''\n if os.path.exists(short):\n short_df = pd.read_csv(short, header=None,\n names=['', 'dGmean', 'dGsd', 'KDmean', 'KDsd'])\n short_df.to_csv(short, header=False, index=False, mode='w')\n else:\n short_df = pd.DataFrame(\n columns=['', 'dGmean', 'dGsd', 'KDmean', 'KDsd'],\n data=[['', self.id],\n ['type', self.type],\n ['species', self.species],\n ['lipids', ', '.join(self.lipid_patterns)],\n [],\n ['', 'dG mean', 'dG sd', 'KD mean', 'KD sd'],\n ['ref', np.nan, np.nan, np.nan, np.nan]],\n )\n\n for mol in ligands:\n\n '''if you need cores for other things while docks are running'''\n # cpu_lock = time.localtime(time.time()).tm_hour\n # if 21 > cpu_lock > 8:\n # cpu = 6\n\n '''vinaDock.py init and output long table'''\n dock_output = self.annotate_long(\n Docker(receptor=self.pdb,\n ligand=mol,\n log_path='{0}\\\\{1}_{2}_log.txt'.format(\n write_dir, self.id, mol.name),\n box=self.box, run_count=run_count,\n exhaustiveness=exhaustiveness,\n cpu=cpu\n ).run()\n )\n\n '''collecting output and doing math to write to short table'''\n long_df = long_df.append(dock_output)\n trim = dock_output.loc[dock_output['Rank'] == '1']\n trim_df = trim_df.append(trim)\n\n dg = np.mean(trim_df.loc[\n trim_df['Ligand'] == mol.name, 'Affinity'\n ].astype(float)), \\\n np.std(trim_df.loc[\n trim_df['Ligand'] == mol.name, 'Affinity'\n ].astype(float))\n kd = KD(dg[0]), \\\n KD(dg[0]) * (-dg[1] / dg[0])\n\n short_df.loc[6, 'dGmean'] = (\n np.mean(trim_df.loc[\n trim_df['defaultLigand'] == 1, 'Affinity'].astype(float))\n )\n short_df.loc[6, 'dGsd'] = (\n np.std(trim_df.loc[\n trim_df['defaultLigand'] == 1, 'Affinity'].astype(float))\n )\n short_df.loc[6, 'KDmean'] = KD(short_df.loc[6, 'dGmean'])\n short_df.loc[6, 'KDsd'] = short_df.loc[6, 'KDmean'] * \\\n -(short_df.loc[6, 'dGsd'] / short_df.loc[6, 'dGmean'])\n short_df = short_df.append(\n pd.DataFrame(\n columns=['', 'dGmean', 'dGsd', 'KDmean', 'KDsd'],\n data=[[mol.name, dg[0], dg[1], kd[0], kd[1]]]\n ), ignore_index=True\n )\n\n dock_output.to_csv(long, index=False, header=False, mode='a')\n trim.to_csv(trimmed, index=False, header=False, mode='a')\n short_df.to_csv(short, index=False, header=False, mode='w')\n\n return long_df, trim_df, short_df", "def GetDock(self):\n return self.dock", "def overviewCommand(self):\n plt.figure(11)\n plt.clf()\n ax = plt.subplot(211)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET'),\n color='r', label='FUOFFSET',\n linewidth=1, alpha=1) \n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='r', linewidth=3, alpha=0.5,\n label=self.DLtrack+'-PSP')\n plt.legend()\n plt.subplot(212, sharex=ax)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET')-\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='k', label='$\\Delta$',\n linewidth=1, alpha=1) \n \n signal = self.raw['OPDC'].data.field('FUOFFSET')\n plt.figure(12)\n plt.clf()\n ax2 = plt.subplot(111)\n Fs = 1e6/np.diff(self.raw['OPDC'].data.field('TIME')).mean()\n print Fs\n ax2.psd(signal[:50000], NFFT=5000, Fs=Fs, label='FUOFFSET',scale_by_freq=0)\n plt.legend()", "def test_vs_docking():\n vs = virtualscreening(n_cpu=-1)\n vs.load_ligands('sdf', os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'))\n vs.dock(engine='autodock_vina',\n protein=os.path.join(test_data_dir, 'data/dude/xiap/receptor_rdkit.pdb'),\n auto_ligand=os.path.join(test_data_dir, 'data/dude/xiap/crystal_ligand.sdf'),\n exhaustiveness=1,\n seed=0)\n mols = list(vs.fetch())\n assert_equal(len(mols), 3)\n mol_data = mols[0].data\n assert_in('vina_affinity', mol_data)\n assert_in('vina_rmsd_lb', mol_data)\n assert_in('vina_rmsd_ub', mol_data)", "def dock(rec_outpath, reorder_outpath, init='dock_init'):\n init = eval(init)\n receptor = os.path.basename(rec_outpath).split('_')[0]\n dock_dir = os.path.join(init.data_dir, init.dock_folder) \n rec_path = os.path.join(init.data_dir, rec_outpath)\n reorder_path = os.path.join(init.data_dir, reorder_outpath)\n\n dock_name = os.path.basename(rec_path).replace('receptor','dock')\n out_path = os.path.join(dock_dir, receptor, dock_name)\n\n\n\n if not os.path.exists(os.path.dirname(out_path)):\n os.makedirs(os.path.dirname(out_path))\n\n kw = {\n 'receptor': rec_path,\n 'ligand': reorder_path,\n 'autobox_ligand':reorder_path,\n 'out':out_path\n }\n\n cmd = init._make_command(**kw)\n cl = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n cl.wait()\n\n return [[rec_outpath, reorder_outpath, os.path.join(init.dock_folder, receptor, dock_name)]]", "def dockControl(*args, allowedArea: Union[AnyStr, List[AnyStr], bool]=\"all\", annotation:\n Union[AnyStr, bool]=\"\", area: Union[AnyStr, bool]=\"\", backgroundColor:\n Union[List[float, float, float], bool]=None, closeCommand: Script=None,\n content: Union[AnyStr, bool]=\"\", defineTemplate: AnyStr=\"\", docTag:\n Union[AnyStr, bool]=\"\", dockStation: AnyStr=\"\", dragCallback: Script=None,\n dropCallback: Script=None, enable: bool=True, enableBackground: bool=True,\n enableKeyboardFocus: bool=True, enablePopupOption: bool=True, exists: bool=True,\n fixedHeight: bool=True, fixedWidth: bool=True, floatChangeCommand: Script=None,\n floating: bool=True, fullPathName: bool=True, height: Union[int, bool]=0,\n highlightColor: Union[List[float, float, float], bool]=None, isObscured:\n bool=True, label: Union[AnyStr, bool]=\"\", manage: bool=True, moveable:\n bool=True, noBackground: bool=True, numberOfPopupMenus: bool=True, parent:\n Union[AnyStr, bool]=\"\", popupMenuArray: bool=True, preventOverride: bool=True,\n r: bool=True, retain: bool=True, sizeable: bool=True, splitLayout: AnyStr=\"\",\n state: Union[AnyStr, bool]=\"\", statusBarMessage: AnyStr=\"\", useTemplate:\n AnyStr=\"\", visible: bool=True, visibleChangeCommand: Union[Script, bool]=None,\n width: Union[int, bool]=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def LayoutAll(self, panes, docks, uiparts, spacer_only=False, oncheck=True):\r\n \r\n container = wx.BoxSizer(wx.VERTICAL)\r\n\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n cli_size = self._frame.GetClientSize()\r\n \r\n # empty all docks out\r\n for dock in docks:\r\n dock.panes = []\r\n if dock.fixed:\r\n # always reset fixed docks' sizes, because\r\n # the contained windows may have been resized\r\n dock.size = 0\r\n \r\n dock_count = len(docks)\r\n \r\n # iterate through all known panes, filing each\r\n # of them into the appropriate dock. If the\r\n # pane does not exist in the dock, add it\r\n for p in panes:\r\n\r\n # don't layout hidden panes.\r\n if p.IsShown():\r\n \r\n # find any docks with the same dock direction, dock layer, and\r\n # dock row as the pane we are working on\r\n arr = FindDocks(docks, p.dock_direction, p.dock_layer, p.dock_row)\r\n\r\n if arr:\r\n dock = arr[0]\r\n\r\n else:\r\n # dock was not found, so we need to create a new one\r\n d = AuiDockInfo()\r\n d.dock_direction = p.dock_direction\r\n d.dock_layer = p.dock_layer\r\n d.dock_row = p.dock_row\r\n docks.append(d)\r\n dock = docks[-1]\r\n\r\n if p.HasFlag(p.needsRestore) and not p.HasFlag(p.wasMaximized):\r\n \r\n isHor = dock.IsHorizontal()\r\n sashSize = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n\r\n # get the sizes of any docks that might \r\n # overlap with our restored dock\r\n\r\n # make list of widths or heights from the size in the dock rects\r\n sizes = [d.rect[2:][isHor] for \\\r\n d in docks if d.IsOk() and \\\r\n (d.IsHorizontal() == isHor) and \\\r\n not d.toolbar and \\\r\n d.dock_direction != AUI_DOCK_CENTER]\r\n \r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n\r\n # set max size allowing for sashes and absolute minimum\r\n maxsize = frameRect[2:][isHor] - sum(sizes) - (len(sizes)*10) - (sashSize*len(sizes))\r\n dock.size = min(p.previousDockSize,maxsize)\r\n\r\n else:\r\n dock.size = 0\r\n\r\n if p.HasFlag(p.wasMaximized):\r\n self.MaximizePane(p, savesizes=False)\r\n p.SetFlag(p.wasMaximized, False)\r\n\r\n if p.HasFlag(p.needsRestore):\r\n if p.previousDockPos is not None:\r\n DoInsertPane(dock.panes, dock.dock_direction, dock.dock_layer, dock.dock_row, p.previousDockPos)\r\n p.dock_pos = p.previousDockPos\r\n p.previousDockPos = None\r\n p.SetFlag(p.needsRestore, False)\r\n\r\n if p.IsDocked():\r\n # remove the pane from any existing docks except this one\r\n docks = RemovePaneFromDocks(docks, p, dock)\r\n\r\n # pane needs to be added to the dock,\r\n # if it doesn't already exist \r\n if not FindPaneInDock(dock, p.window):\r\n dock.panes.append(p)\r\n else:\r\n # remove the pane from any existing docks\r\n docks = RemovePaneFromDocks(docks, p)\r\n \r\n # remove any empty docks\r\n docks = [dock for dock in docks if dock.panes]\r\n\r\n dock_count = len(docks)\r\n # configure the docks further\r\n for ii, dock in enumerate(docks):\r\n # sort the dock pane array by the pane's\r\n # dock position (dock_pos), in ascending order\r\n dock.panes.sort(PaneSortFunc)\r\n dock_pane_count = len(dock.panes)\r\n \r\n # for newly created docks, set up their initial size\r\n if dock.size == 0:\r\n size = 0\r\n for pane in dock.panes:\r\n pane_size = pane.best_size\r\n if pane_size == wx.Size(-1, -1):\r\n pane_size = pane.min_size\r\n if pane_size == wx.Size(-1, -1) and pane.window:\r\n pane_size = pane.window.GetSize()\r\n if dock.IsHorizontal():\r\n size = max(pane_size.y, size)\r\n else:\r\n size = max(pane_size.x, size)\r\n \r\n # add space for the border (two times), but only\r\n # if at least one pane inside the dock has a pane border\r\n for pane in dock.panes:\r\n if pane.HasBorder():\r\n size = size + pane_border_size*2\r\n break\r\n \r\n # if pane is on the top or bottom, add the caption height,\r\n # but only if at least one pane inside the dock has a caption\r\n if dock.IsHorizontal():\r\n for pane in dock.panes:\r\n if pane.HasCaption() and not pane.HasCaptionLeft():\r\n size = size + caption_size\r\n break\r\n else:\r\n for pane in dock.panes:\r\n if pane.HasCaptionLeft() and not pane.HasCaption():\r\n size = size + caption_size\r\n break\r\n \r\n # new dock's size may not be more than the dock constraint\r\n # parameter specifies. See SetDockSizeConstraint()\r\n max_dock_x_size = int(self._dock_constraint_x*float(cli_size.x))\r\n max_dock_y_size = int(self._dock_constraint_y*float(cli_size.y))\r\n if cli_size <= wx.Size(20, 20):\r\n max_dock_x_size = 10000\r\n max_dock_y_size = 10000\r\n\r\n if dock.IsHorizontal():\r\n size = min(size, max_dock_y_size)\r\n else:\r\n size = min(size, max_dock_x_size)\r\n\r\n # absolute minimum size for a dock is 10 pixels\r\n if size < 10:\r\n size = 10\r\n\r\n dock.size = size\r\n\r\n # determine the dock's minimum size\r\n plus_border = False\r\n plus_caption = False\r\n plus_caption_left = False\r\n dock_min_size = 0\r\n for pane in dock.panes:\r\n if pane.min_size != wx.Size(-1, -1):\r\n if pane.HasBorder():\r\n plus_border = True\r\n if pane.HasCaption():\r\n plus_caption = True\r\n if pane.HasCaptionLeft():\r\n plus_caption_left = True\r\n if dock.IsHorizontal():\r\n if pane.min_size.y > dock_min_size:\r\n dock_min_size = pane.min_size.y\r\n else:\r\n if pane.min_size.x > dock_min_size:\r\n dock_min_size = pane.min_size.x\r\n \r\n if plus_border:\r\n dock_min_size += pane_border_size*2\r\n if plus_caption and dock.IsHorizontal():\r\n dock_min_size += caption_size\r\n if plus_caption_left and dock.IsVertical():\r\n dock_min_size += caption_size\r\n \r\n dock.min_size = dock_min_size\r\n \r\n # if the pane's current size is less than it's\r\n # minimum, increase the dock's size to it's minimum\r\n if dock.size < dock.min_size:\r\n dock.size = dock.min_size\r\n\r\n # determine the dock's mode (fixed or proportional)\r\n # determine whether the dock has only toolbars\r\n action_pane_marked = False\r\n dock.fixed = True\r\n dock.toolbar = True\r\n for pane in dock.panes:\r\n if not pane.IsFixed():\r\n dock.fixed = False\r\n if not pane.IsToolbar():\r\n dock.toolbar = False\r\n if pane.HasFlag(AuiPaneInfo.optionDockFixed):\r\n dock.fixed = True\r\n if pane.HasFlag(AuiPaneInfo.actionPane):\r\n action_pane_marked = True\r\n\r\n # if the dock mode is proportional and not fixed-pixel,\r\n # reassign the dock_pos to the sequential 0, 1, 2, 3\r\n # e.g. remove gaps like 1, 2, 30, 500\r\n if not dock.fixed:\r\n for jj in xrange(dock_pane_count):\r\n pane = dock.panes[jj]\r\n pane.dock_pos = jj\r\n \r\n # if the dock mode is fixed, and none of the panes\r\n # are being moved right now, make sure the panes\r\n # do not overlap each other. If they do, we will\r\n # adjust the panes' positions\r\n if dock.fixed and not action_pane_marked:\r\n pane_positions, pane_sizes = self.GetPanePositionsAndSizes(dock)\r\n offset = 0\r\n for jj in xrange(dock_pane_count):\r\n pane = dock.panes[jj]\r\n pane.dock_pos = pane_positions[jj]\r\n amount = pane.dock_pos - offset\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n pane.dock_pos += -amount\r\n\r\n offset += pane_sizes[jj]\r\n dock.panes[jj] = pane\r\n\r\n if oncheck:\r\n self._docks[ii] = dock \r\n\r\n # shrink docks if needed \r\n## docks = self.SmartShrink(docks, AUI_DOCK_TOP)\r\n## docks = self.SmartShrink(docks, AUI_DOCK_LEFT)\r\n\r\n if oncheck:\r\n self._docks = docks\r\n \r\n # discover the maximum dock layer\r\n max_layer = 0\r\n dock_count = len(docks)\r\n \r\n for ii in xrange(dock_count):\r\n max_layer = max(max_layer, docks[ii].dock_layer)\r\n\r\n # clear out uiparts\r\n uiparts = []\r\n\r\n # create a bunch of box sizers,\r\n # from the innermost level outwards.\r\n cont = None\r\n middle = None\r\n\r\n if oncheck:\r\n docks = self._docks\r\n \r\n for layer in xrange(max_layer+1):\r\n # find any docks in this layer\r\n arr = FindDocks(docks, -1, layer, -1)\r\n # if there aren't any, skip to the next layer\r\n if not arr:\r\n continue\r\n\r\n old_cont = cont\r\n\r\n # create a container which will hold this layer's\r\n # docks (top, bottom, left, right)\r\n cont = wx.BoxSizer(wx.VERTICAL)\r\n\r\n # find any top docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_TOP, layer, -1)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(cont, row, uiparts, spacer_only)\r\n \r\n # fill out the middle layer (which consists\r\n # of left docks, content area and right docks)\r\n \r\n middle = wx.BoxSizer(wx.HORIZONTAL)\r\n\r\n # find any left docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_LEFT, layer, -1)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n # add content dock (or previous layer's sizer\r\n # to the middle\r\n if not old_cont:\r\n # find any center docks\r\n arr = FindDocks(docks, AUI_DOCK_CENTER, -1, -1)\r\n if arr:\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n elif not self._has_maximized:\r\n # there are no center docks, add a background area\r\n sizer_item = middle.Add((1, 1), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.pane = None\r\n part.dock = None\r\n part.button = None\r\n part.cont_sizer = middle\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n else:\r\n middle.Add(old_cont, 1, wx.EXPAND)\r\n \r\n # find any right docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_RIGHT, layer, -1, reverse=True)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(middle, row, uiparts, spacer_only)\r\n \r\n if len(middle.GetChildren()) > 0:\r\n cont.Add(middle, 1, wx.EXPAND)\r\n\r\n # find any bottom docks in this layer\r\n arr = FindDocks(docks, AUI_DOCK_BOTTOM, layer, -1, reverse=True)\r\n for row in arr:\r\n uiparts = self.LayoutAddDock(cont, row, uiparts, spacer_only)\r\n\r\n if not cont:\r\n # no sizer available, because there are no docks,\r\n # therefore we will create a simple background area\r\n cont = wx.BoxSizer(wx.VERTICAL)\r\n sizer_item = cont.Add((1, 1), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.pane = None\r\n part.dock = None\r\n part.button = None\r\n part.cont_sizer = middle\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if oncheck:\r\n self._uiparts = uiparts\r\n self._docks = docks\r\n\r\n container.Add(cont, 1, wx.EXPAND)\r\n\r\n if oncheck:\r\n return container\r\n else:\r\n return container, panes, docks, uiparts", "def config_pbc_md(self):\n\n self._config_md()\n self.title = \"PBC MD Simulation\"\n self.cntrl[\"cut\"] = 8.0\n self.cntrl[\"igb\"] = 0\n self.cntrl[\"iwrap\"] = 1\n self.cntrl[\"ntp\"] = 1\n self.cntrl[\"barostat\"] = 2", "def dock_simple(pose, dock_partners, foldtree):\n assert isinstance(dock_partners, str)\n # setup foldtree\n if foldtree is not None:\n assert isinstance(foldtree, str)\n setup_foldtree(pose, foldtree, Vector1([1]))\n # specify scoring functions\n fa_score = get_fa_scorefxn()\n dna_score = create_score_function('dna')\n dna_score.set_weight(fa_elec, 1)\n # specify docking protocol\n docking = DockMCMProtocol()\n docking.set_scorefxn(dna_score)\n docking.set_scorefxn_pack(fa_score)\n docking.set_partners(dock_partners)\n # obtain initial and final scores after docking\n dna_init = dna_score(pose)\n fa_init = fa_score(pose)\n docking.apply(pose)\n dna_final = dna_score(pose)\n fa_final = fa_score(pose)\n return [fa_init, fa_final, dna_init, dna_final]", "def docking_rdock(self, ligand_file, docking_file, docking_log_file):\n\n docking_prefix = '.'.join(docking_file.strip().split('.')[:-1])\n run_line = '%s' % self.docking_program\n run_line += ' -r %s' % self.dock_config_file\n run_line += ' -p dock.prm'\n run_line += ' -n %d' % self.exhaustiveness\n run_line += ' -i %s' % ligand_file\n run_line += ' -o %s' % docking_prefix\n\n# run_line2 = 'sdsort -n -fSCORE %s.sd' % (docking_prefix)\n run_line2 = 'sdsort -n -fSCORE.INTER %s.sd' % (docking_prefix)\n\n e = None\n try:\n result = subprocess.check_output(run_line.split(),\n stderr=subprocess.STDOUT,\n timeout=self.timeout_dock,\n universal_newlines=True)\n if self.output_save:\n fp = open(docking_log_file, 'w')\n fp.write(result)\n fp.close()\n\n result2 = subprocess.check_output(run_line2.split(),\n universal_newlines=True)\n fp = open(docking_file, 'w')\n fp.write(result2)\n fp.close()\n\n except Exception as e:\n return [99.999], e\n\n affinity_list = list()\n out_lines = result2.split('\\n')\n check_score = False\n for line in out_lines:\n if line[0:16] == '> <SCORE.INTER>':\n# if line[0:10] == '> <SCORE>':\n check_score = True\n continue\n if check_score is True:\n affinity = float(line)\n affinity_list += [affinity]\n check_score = False\n continue\n if len(affinity_list) == 0:\n e = 'WARNING: Could not find any conformations.'\n return [99.999], e\n return affinity_list, e", "def SetDockPos(self, source):\r\n \r\n self.dock_direction = source.dock_direction\r\n self.dock_layer = source.dock_layer\r\n self.dock_row = source.dock_row\r\n self.dock_pos = source.dock_pos\r\n self.dock_proportion = source.dock_proportion\r\n self.floating_pos = wx.Point(*source.floating_pos)\r\n self.floating_size = wx.Size(*source.floating_size)\r\n self.rect = wx.Rect(*source.rect)\r\n \r\n return self", "def GetDockPixelOffset(self, test):\r\n\r\n # the only way to accurately calculate the dock's\r\n # offset is to actually run a theoretical layout\r\n docks, panes = CopyDocksAndPanes2(self._docks, self._panes)\r\n panes.append(test)\r\n\r\n sizer, panes, docks, uiparts = self.LayoutAll(panes, docks, [], True, False)\r\n client_size = self._frame.GetClientSize()\r\n sizer.SetDimension(0, 0, client_size.x, client_size.y)\r\n sizer.Layout()\r\n\r\n for part in uiparts:\r\n pos = part.sizer_item.GetPosition()\r\n size = part.sizer_item.GetSize()\r\n part.rect = wx.RectPS(pos, size)\r\n if part.type == AuiDockUIPart.typeDock:\r\n part.dock.rect = part.rect\r\n\r\n sizer.Destroy()\r\n\r\n for dock in docks:\r\n if test.dock_direction == dock.dock_direction and \\\r\n test.dock_layer == dock.dock_layer and \\\r\n test.dock_row == dock.dock_row:\r\n \r\n if dock.IsVertical():\r\n return dock.rect.y\r\n else:\r\n return dock.rect.x\r\n \r\n return 0", "def GetPartnerDock(self, dock):\r\n\r\n for layer in xrange(dock.dock_layer, -1, -1):\r\n \r\n bestDock = None\r\n\r\n for tmpDock in self._docks:\r\n \r\n if tmpDock.dock_layer != layer:\r\n continue\r\n \r\n if tmpDock.dock_direction != dock.dock_direction:\r\n continue\r\n\r\n if tmpDock.dock_layer < dock.dock_layer:\r\n \r\n if not bestDock or tmpDock.dock_row < bestDock.dock_row:\r\n bestDock = tmpDock\r\n \r\n elif tmpDock.dock_row > dock.dock_row:\r\n \r\n if not bestDock or tmpDock.dock_row > bestDock.dock_row:\r\n bestDock = tmpDock\r\n \r\n if bestDock:\r\n return bestDock\r\n \r\n return None", "def SavePerspective(self):\r\n\r\n result = \"layout2|\"\r\n\r\n for pane in self._panes:\r\n result += self.SavePaneInfo(pane) + \"|\"\r\n \r\n for dock in self._docks:\r\n result = result + (\"dock_size(%d,%d,%d)=%d|\")%(dock.dock_direction,\r\n dock.dock_layer,\r\n dock.dock_row,\r\n dock.size)\r\n return result", "def test_plants_docking(self):\n self.workdir = prepare_work_dir(__rootpath__, create=True)\n settings['workdir'] = self.workdir\n settings['bindingsite_center'] = [7.79934, 9.49666, 3.39229]\n settings['exec_path'] = exec_path\n\n plants = PlantsDocking(**settings)\n self.assertTrue(plants.run(self.protein, self.ligand))\n\n outputfiles = glob.glob('{0}/_entry_00001_conf_*.mol2'.format(self.workdir))\n self.assertEqual(len(outputfiles), plants.config['cluster_structures'])\n self.assertEqual(len(outputfiles), len(plants.results()))", "def CopyDocksAndPanes2(src_docks, src_panes):\r\n \r\n dest_docks = []\r\n\r\n for ii in xrange(len(src_docks)):\r\n dest_docks.append(AuiDockInfo())\r\n dest_docks[ii].dock_direction = src_docks[ii].dock_direction\r\n dest_docks[ii].dock_layer = src_docks[ii].dock_layer\r\n dest_docks[ii].dock_row = src_docks[ii].dock_row\r\n dest_docks[ii].size = src_docks[ii].size\r\n dest_docks[ii].min_size = src_docks[ii].min_size\r\n dest_docks[ii].resizable = src_docks[ii].resizable\r\n dest_docks[ii].fixed = src_docks[ii].fixed\r\n dest_docks[ii].toolbar = src_docks[ii].toolbar\r\n dest_docks[ii].panes = src_docks[ii].panes\r\n dest_docks[ii].rect = wx.Rect(*src_docks[ii].rect)\r\n\r\n dest_panes = []\r\n\r\n for ii in xrange(len(src_panes)):\r\n dest_panes.append(AuiPaneInfo())\r\n dest_panes[ii].name = src_panes[ii].name\r\n dest_panes[ii].caption = src_panes[ii].caption\r\n dest_panes[ii].window = src_panes[ii].window\r\n dest_panes[ii].frame = src_panes[ii].frame\r\n dest_panes[ii].state = src_panes[ii].state\r\n dest_panes[ii].dock_direction = src_panes[ii].dock_direction\r\n dest_panes[ii].dock_layer = src_panes[ii].dock_layer\r\n dest_panes[ii].dock_row = src_panes[ii].dock_row\r\n dest_panes[ii].dock_pos = src_panes[ii].dock_pos\r\n dest_panes[ii].best_size = wx.Size(*src_panes[ii].best_size)\r\n dest_panes[ii].min_size = wx.Size(*src_panes[ii].min_size)\r\n dest_panes[ii].max_size = wx.Size(*src_panes[ii].max_size)\r\n dest_panes[ii].floating_pos = wx.Point(*src_panes[ii].floating_pos)\r\n dest_panes[ii].floating_size = wx.Size(*src_panes[ii].floating_size)\r\n dest_panes[ii].dock_proportion = src_panes[ii].dock_proportion\r\n dest_panes[ii].buttons = src_panes[ii].buttons\r\n dest_panes[ii].rect = wx.Rect(*src_panes[ii].rect)\r\n dest_panes[ii].icon = src_panes[ii].icon\r\n dest_panes[ii].notebook_id = src_panes[ii].notebook_id\r\n dest_panes[ii].transparent = src_panes[ii].transparent\r\n dest_panes[ii].snapped = src_panes[ii].snapped\r\n dest_panes[ii].minimize_mode = src_panes[ii].minimize_mode\r\n\r\n for ii in xrange(len(dest_docks)):\r\n dock = dest_docks[ii]\r\n for jj in xrange(len(dock.panes)):\r\n for kk in xrange(len(src_panes)):\r\n if dock.panes[jj] == src_panes[kk]:\r\n dock.panes[jj] = dest_panes[kk]\r\n\r\n dest_docks[ii] = dock\r\n \r\n return dest_docks, dest_panes", "def __init__(self, structure_id, pdb_file, amb_file, flex1_file, flex2_file, root_dir=None):\n\n super(DOCK, self).__init__(id=structure_id, description='DOCK6 preparation')\n self._root_dir = None\n self.structure_path = pdb_file\n\n if root_dir:\n self.root_dir = root_dir\n else:\n self.root_dir = self.structure_dir\n\n self.dockprep_path = None\n self.receptormol2_path = None\n self.receptorpdb_path = None\n self.dms_path = None\n self.sphgen_path = None\n self.bindingsite_path = None\n self.sphsel_path = None\n self.box_path = None\n self.grid_path = None\n\n self.dock_flexible_outfile = None\n self.dock_flexible_scored_result = None\n self.dock_flexible_conformers_result = None\n\n self.amb_file = amb_file\n self.flex1_file = flex1_file\n self.flex2_file = flex2_file\n\n log.debug('{}: created DOCK6 project folder at {}'.format(structure_id, self.dock_dir))", "def UpdateDockingGuides(self, paneInfo):\r\n\r\n if len(self._guides) == 0:\r\n self.CreateGuideWindows()\r\n\r\n captionSize = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n frameRect = GetInternalFrameRect(self._frame, self._docks)\r\n mousePos = wx.GetMousePosition()\r\n\r\n for indx, guide in enumerate(self._guides):\r\n \r\n pt = wx.Point()\r\n guide_size = guide.host.GetSize()\r\n if not guide.host:\r\n raise Exception(\"Invalid docking host\")\r\n\r\n direction = guide.dock_direction\r\n\r\n if direction == AUI_DOCK_LEFT:\r\n pt.x = frameRect.x + guide_size.x / 2 + 16\r\n pt.y = frameRect.y + frameRect.height / 2\r\n\r\n elif direction == AUI_DOCK_TOP:\r\n pt.x = frameRect.x + frameRect.width / 2\r\n pt.y = frameRect.y + guide_size.y / 2 + 16\r\n\r\n elif direction == AUI_DOCK_RIGHT:\r\n pt.x = frameRect.x + frameRect.width - guide_size.x / 2 - 16\r\n pt.y = frameRect.y + frameRect.height / 2\r\n\r\n elif direction == AUI_DOCK_BOTTOM:\r\n pt.x = frameRect.x + frameRect.width / 2\r\n pt.y = frameRect.y + frameRect.height - guide_size.y / 2 - 16\r\n\r\n elif direction == AUI_DOCK_CENTER:\r\n rc = paneInfo.window.GetScreenRect()\r\n pt.x = rc.x + rc.width / 2\r\n pt.y = rc.y + rc.height / 2\r\n if paneInfo.HasCaption():\r\n pt.y -= captionSize / 2\r\n elif paneInfo.HasCaptionLeft():\r\n pt.x -= captionSize / 2\r\n\r\n # guide will be centered around point 'pt'\r\n targetPosition = wx.Point(pt.x - guide_size.x / 2, pt.y - guide_size.y / 2)\r\n\r\n if guide.host.GetPosition() != targetPosition:\r\n guide.host.Move(targetPosition)\r\n \r\n guide.host.AeroMove(targetPosition)\r\n\r\n if guide.dock_direction == AUI_DOCK_CENTER:\r\n guide.host.ValidateNotebookDocking(paneInfo.IsNotebookDockable())\r\n\r\n guide.host.UpdateDockGuide(mousePos)\r\n \r\n paneInfo.window.Lower()", "def Dock(self):\r\n\r\n if self.IsNotebookPage():\r\n self.notebook_id = -1\r\n self.dock_direction = AUI_DOCK_NONE\r\n \r\n return self.SetFlag(self.optionFloating, False)", "def port1_docked_time(self, port1_docked_time):\n\n self._port1_docked_time = port1_docked_time", "def port1_docked_time(self):\n return self._port1_docked_time", "def port1_docking_date(self, port1_docking_date):\n\n self._port1_docking_date = port1_docking_date", "def makeDPartial( name\n , config\n , DecayDescriptor\n , inputSel\n ) :\n\n _Kcuts1 = \"~ISMUON & (PT > %(DaugPtLoose)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2Loose)s)\" % locals()['config']\n _KcutsPIDK = \" & (PIDK > %(HighPIDK)s)\" % locals()['config']\n _Kcuts2 = \" & (ISLONG) & (P > %(DaugPLoose)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2Loose)s)\" % locals()['config']\n _Kcuts = _Kcuts1 + _KcutsPIDK + _Kcuts2\n _Picuts1 = \"~ISMUON & (PT > %(DaugPtMin)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2)s)\" % locals()['config']\n _PicutsPIDK = \" & (PIDK < %(LowPIDK)s)\" % locals()['config']\n _Picuts2 = \" & (ISLONG) & (P > %(DaugP)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2)s)\" % locals()['config']\n _Picuts = _Picuts1 + _PicutsPIDK + _Picuts2\n _dauCuts = { 'K+': _Kcuts, 'pi+': _Picuts }\n #_Kcuts1 = \"~ISMUON & (PT > 500* MeV) & (MIPCHI2DV(PRIMARY) > 4)\"\n #_KcutsPIDK = \" & (PIDK > 5)\"\n #_Kcuts2 = \" & (ISLONG) & (P > 5000* MeV) & (TRCHI2DOF < 5)\"\n #_Kcuts = _Kcuts1 + _KcutsPIDK + _Kcuts2\n #_Picuts1 = \"~ISMUON & (PT > 500* MeV) & (MIPCHI2DV(PRIMARY) > 4)\"\n #_PicutsPIDK = \" & (PIDK < 0)\"\n #_Picuts2 = \" & (ISLONG) & (P > 5000* MeV) & (TRCHI2DOF < 5)\"\n #_Picuts = _Picuts1 + _PicutsPIDK + _Picuts2\n #_dauCuts = { 'K+': _Kcuts, 'pi+': _Picuts }\n\n _combCuts = \"(APT > %(D0PtLoose)s* MeV)\" \\\n \"& (AP > %(D0P)s* MeV)\" % locals()['config']\n\n _motherCuts = \"(VFASPF(VCHI2PDOF) < %(D0VtxChi2Ndof)s)\" \\\n \"& (BPVVDCHI2 > %(D0FDChi2)s)\" % locals()['config']\n\n\n _Dminus = CombineParticles( DecayDescriptor = DecayDescriptor\n , DaughtersCuts = _dauCuts\n , CombinationCut = _combCuts\n , MotherCut = _motherCuts\n )\n\n return Selection( name+'Sel',\n Algorithm = _Dminus,\n RequiredSelections = inputSel\n )", "def LayoutAddDock(self, cont, dock, uiparts, spacer_only):\r\n \r\n sizer_item = wx.SizerItem()\r\n part = AuiDockUIPart()\r\n\r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n orientation = (dock.IsHorizontal() and [wx.HORIZONTAL] or [wx.VERTICAL])[0]\r\n\r\n # resizable bottom and right docks have a sash before them\r\n if not self._has_maximized and not dock.fixed and \\\r\n dock.dock_direction in [AUI_DOCK_BOTTOM, AUI_DOCK_RIGHT]:\r\n \r\n sizer_item = cont.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n\r\n part.type = AuiDockUIPart.typeDockSizer\r\n part.orientation = orientation\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n # create the sizer for the dock\r\n dock_sizer = wx.BoxSizer(orientation)\r\n\r\n # add each pane to the dock\r\n has_maximized_pane = False\r\n pane_count = len(dock.panes)\r\n\r\n if dock.fixed:\r\n \r\n # figure out the real pane positions we will\r\n # use, without modifying the each pane's pane_pos member\r\n pane_positions, pane_sizes = self.GetPanePositionsAndSizes(dock)\r\n\r\n offset = 0\r\n for pane_i in xrange(pane_count):\r\n \r\n pane = dock.panes[pane_i]\r\n pane_pos = pane_positions[pane_i]\r\n\r\n if pane.IsMaximized():\r\n has_maximized_pane = True\r\n\r\n amount = pane_pos - offset\r\n if amount > 0:\r\n \r\n if dock.IsVertical():\r\n sizer_item = dock_sizer.Add((1, amount), 0, wx.EXPAND)\r\n else:\r\n sizer_item = dock_sizer.Add((amount, 1), 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = (orientation==wx.HORIZONTAL and \\\r\n [wx.VERTICAL] or [wx.HORIZONTAL])[0]\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n offset = offset + amount\r\n \r\n uiparts = self.LayoutAddPane(dock_sizer, dock, pane, uiparts, spacer_only)\r\n\r\n offset = offset + pane_sizes[pane_i]\r\n \r\n # at the end add a very small stretchable background area\r\n sizer_item = dock_sizer.Add((0, 0), 1, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeBackground\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n else:\r\n \r\n for pane_i in xrange(pane_count):\r\n \r\n pane = dock.panes[pane_i]\r\n\r\n if pane.IsMaximized():\r\n has_maximized_pane = True\r\n\r\n # if this is not the first pane being added,\r\n # we need to add a pane sizer\r\n if not self._has_maximized and pane_i > 0:\r\n sizer_item = dock_sizer.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typePaneSizer\r\n part.dock = dock\r\n part.pane = dock.panes[pane_i-1]\r\n part.button = None\r\n part.orientation = (orientation==wx.HORIZONTAL and \\\r\n [wx.VERTICAL] or [wx.HORIZONTAL])[0]\r\n part.cont_sizer = dock_sizer\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n uiparts = self.LayoutAddPane(dock_sizer, dock, pane, uiparts, spacer_only)\r\n \r\n if dock.dock_direction == AUI_DOCK_CENTER or has_maximized_pane:\r\n sizer_item = cont.Add(dock_sizer, 1, wx.EXPAND)\r\n else:\r\n sizer_item = cont.Add(dock_sizer, 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeDock\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n\r\n if dock.IsHorizontal():\r\n cont.SetItemMinSize(dock_sizer, (0, dock.size))\r\n else:\r\n cont.SetItemMinSize(dock_sizer, (dock.size, 0))\r\n\r\n # top and left docks have a sash after them\r\n if not self._has_maximized and not dock.fixed and \\\r\n dock.dock_direction in [AUI_DOCK_TOP, AUI_DOCK_LEFT]:\r\n \r\n sizer_item = cont.Add((sash_size, sash_size), 0, wx.EXPAND)\r\n\r\n part = AuiDockUIPart()\r\n part.type = AuiDockUIPart.typeDockSizer\r\n part.dock = dock\r\n part.pane = None\r\n part.button = None\r\n part.orientation = orientation\r\n part.cont_sizer = cont\r\n part.sizer_item = sizer_item\r\n uiparts.append(part)\r\n \r\n return uiparts", "def port1_docking_date(self):\n return self._port1_docking_date", "def __init__(self, **kwargs):\n\n # mod_path = os.path.join(os.path.dirname(a.__file__), 'BindingPMF.py')\n # print \"\"\"###########\n # # AlGDock #\n # ###########\n # Molecular docking with adaptively scaled alchemical interaction grids\n #\n # in {0}\n # last modified {1}\n # \"\"\".format(mod_path, time.ctime(os.path.getmtime(mod_path)))\n\n from AlGDock.argument_parser import SimulationArguments\n self.args = SimulationArguments(**kwargs)\n\n from AlGDock.simulation_data import SimulationData\n self.data = {}\n self.data['BC'] = SimulationData(self.args.dir['BC'], 'BC', \\\n self.args.params['CD']['pose'])\n self.data['CD'] = SimulationData(self.args.dir['CD'], 'CD', \\\n self.args.params['CD']['pose'])\n\n if not 'max_time' in kwargs.keys():\n kwargs['max_time'] = None\n if not 'run_type' in kwargs.keys():\n kwargs['run_type'] = None\n\n from AlGDock.logger import Logger\n self.log = Logger(self.args, \\\n max_time=kwargs['max_time'], run_type=kwargs['run_type'])\n\n self.T_HIGH = self.args.params['BC']['T_HIGH']\n self.T_TARGET = self.args.params['BC']['T_TARGET']\n\n self._setup()\n\n print '\\n*** Simulation parameters and constants ***'\n for p in ['BC', 'CD']:\n print '\\nfor %s:' % p\n print dictionary_tools.dict_view(self.args.params[p])[:-1]\n\n self.run(kwargs['run_type'])", "def get_bestdockingscore(self):\r\n\r\n if not \"lc\" in self.cPoses.__dict__:\r\n print \"Load first sdf with poses\"\r\n return \r\n\r\n if \"dscores\" in self.__dict__:\r\n return self.dscores\r\n\r\n dscores = {}\r\n for dchem in self.cPoses.lc:\r\n # case where protein is included, case of XP docking\r\n if not \"r_i_docking_score\" in dchem.keys():\r\n continue\r\n\r\n chemblID = dchem[\"s_m_entry_name\"].split(\".\")[0]\r\n #print chemblID\r\n\r\n if not chemblID in dscores.keys():\r\n dscores[chemblID] = {}\r\n dscores[chemblID][\"count\"] = 1\r\n else:\r\n dscores[chemblID][\"count\"] = dscores[chemblID][\"count\"] + 1\r\n\r\n if not \"r_i_docking_score\" in dscores[chemblID].keys():\r\n dscores[chemblID][\"r_i_docking_score\"] = float(dchem[\"r_i_docking_score\"])\r\n dscores[chemblID][\"r_i_glide_emodel\"] = float(dchem[\"r_i_glide_emodel\"])\r\n else:\r\n if float(dchem[\"r_i_docking_score\"]) < dscores[chemblID][\"r_i_docking_score\"]:\r\n dscores[chemblID][\"r_i_docking_score\"] = float(chemblID[\"r_i_docking_score\"])\r\n dscores[chemblID][\"r_i_glide_emodel\"] = float(chemblID[\"r_i_glide_emodel\"])\r\n\r\n self.dscores = dscores\r\n\r\n # write\r\n pfilout = self.pr_out + \"score_poses.txt\"\r\n filout = open(pfilout, \"w\")\r\n filout.write(\"Chemicals\\tNb poses\\tGlide score\\temodel score\\n\")\r\n for chemblID in dscores.keys():\r\n filout.write(\"%s\\t%s\\t%s\\t%s\\n\"%(chemblID, dscores[chemblID][\"count\"], dscores[chemblID][\"r_i_docking_score\"], dscores[chemblID][\"r_i_glide_emodel\"]))\r\n filout.close()", "def init_layout(self):\n super(WxDockPane, self).init_layout()\n self.widget.SetDockWidget(self.dock_widget())", "def GetPanePositionsAndSizes(self, dock):\r\n \r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n gripper_size = self._art.GetMetric(AUI_DOCKART_GRIPPER_SIZE)\r\n\r\n positions = []\r\n sizes = []\r\n\r\n action_pane = -1\r\n pane_count = len(dock.panes)\r\n\r\n # find the pane marked as our action pane\r\n for pane_i in xrange(pane_count):\r\n pane = dock.panes[pane_i]\r\n if pane.HasFlag(AuiPaneInfo.actionPane):\r\n if action_pane != -1:\r\n raise Exception(\"Too many action panes!\")\r\n action_pane = pane_i\r\n \r\n # set up each panes default position, and\r\n # determine the size (width or height, depending\r\n # on the dock's orientation) of each pane\r\n for pane in dock.panes:\r\n positions.append(pane.dock_pos)\r\n size = 0\r\n \r\n if pane.HasBorder():\r\n size += pane_border_size*2\r\n \r\n if dock.IsHorizontal():\r\n if pane.HasGripper() and not pane.HasGripperTop():\r\n size += gripper_size\r\n\r\n if pane.HasCaptionLeft():\r\n size += caption_size\r\n \r\n size += pane.best_size.x\r\n \r\n else:\r\n if pane.HasGripper() and pane.HasGripperTop():\r\n size += gripper_size\r\n\r\n if pane.HasCaption() and not pane.HasCaptionLeft():\r\n size += caption_size\r\n \r\n size += pane.best_size.y\r\n \r\n sizes.append(size)\r\n\r\n # if there is no action pane, just return the default\r\n # positions (as specified in pane.pane_pos)\r\n if action_pane == -1:\r\n return positions, sizes\r\n\r\n offset = 0\r\n for pane_i in xrange(action_pane-1, -1, -1):\r\n amount = positions[pane_i+1] - (positions[pane_i] + sizes[pane_i])\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n positions[pane_i] -= -amount\r\n\r\n offset += sizes[pane_i]\r\n \r\n # if the dock mode is fixed, make sure none of the panes\r\n # overlap we will bump panes that overlap\r\n offset = 0\r\n for pane_i in xrange(action_pane, pane_count):\r\n amount = positions[pane_i] - offset\r\n if amount >= 0:\r\n offset += amount\r\n else:\r\n positions[pane_i] += -amount\r\n\r\n offset += sizes[pane_i]\r\n\r\n return positions, sizes", "def get(self, request):\n# self.context[\"form\"] = AddDockParamsForm()\n# self.context[\"data\"] = { \"sets\": [\n# {\"tape_id\":\"1\", \"tape_len\":1, \"a_aft\":1, \"a_fore\":1, \"f_rate\":1, \"f_qty\":1},\n# {\"tape_id\":\"2\", \"tape_len\":2, \"a_aft\":2, \"a_fore\":2, \"f_rate\":2, \"f_qty\":2},\n# {\"tape_id\":\"3\", \"tape_len\":3, \"a_aft\":3, \"a_fore\":3, \"f_rate\":3, \"f_qty\":3},\n# ] }\n# return render(request, \"dbkeeper/add_dock_params.html\", self.context)\n form = AddDockParamsForm()\n form.setFixedFields()\n try:\n dockParams = Setting.getDockParams()\n form.setFields(len(dockParams[\"sets\"]))\n# form.setData(dockParams)\n self.context[\"data\"] = form.setData(dockParams)\n except:\n self.context[\"data\"] = { \"numRows\": 0 }\n self.context[\"form\"] = form\n return render(request, \"dbkeeper/add_dock_params.html\", self.context)", "def _build_ppdf(self,pdf_dset,renormalize):\n\n if (not hasattr(self,'u')) or (not hasattr(self,'w')) or (not hasattr(self,'sfr')):\n raise AttributeError(\"axes are not set. Call set_axes() first\")\n\n dbinsq = self.dlogcs*self.dlogvout\n\n # Momentum flux PDF\n etaM = pdf_dset['etaM'] # in Msun/kpc^2/yr\n etap = self._etap(self.sfr) # in (Msun*km/s)/kpc^2/yr\n pdf_dset['etap'] = etap\n\n pfact = (self.vout**2+self.cs**2)/(self.vp*self.vout)\n ppdfc = etaM/etap*pdf_dset['Mpdf-cool']*pfact\n ppdfh = etaM/etap*pdf_dset['Mpdf-hot']*pfact\n ppdf = ppdfc + ppdfh\n\n if renormalize:\n renorm = ppdf.sum(dim=['logcs','logvout'])*dbinsq\n ppdfc = ppdfc/renorm\n ppdfh = ppdfh/renorm\n ppdf = ppdf/renorm\n pdf_dset['p_renorm'] = renorm\n\n pdf_dset['ppdf-cool'] = ppdfc\n pdf_dset['ppdf-hot'] = ppdfh\n pdf_dset['etap-cool'] = pdf_dset['etap']*ppdfc.sum(dim=['logcs','logvout'])*dbinsq\n pdf_dset['etap-hot'] = pdf_dset['etap']*ppdfh.sum(dim=['logcs','logvout'])*dbinsq\n pdf_dset['ppdf'] = ppdf", "def getRigBuildData(self):\n\n data = super(SimpleControlComponentGuide, self).getRigBuildData()\n\n data[\"ctrlSize\"] = self.ctrlSizeInputAttr.getValue()\n data[\"ctrlXfo\"] = self.mainCtrl.xfo\n\n return data", "def __init__(self, parent=None, args=[], macros=None):\n super(PyDMChartingDisplay, self).__init__(parent=parent, args=args, macros=macros)\n\n self.channel_map = dict()\n self.setWindowTitle(\"PyDM Charting Tool\")\n\n self.main_layout = QVBoxLayout()\n self.body_layout = QVBoxLayout()\n\n self.pv_layout = QHBoxLayout()\n self.pv_name_line_edt = QLineEdit()\n self.pv_name_line_edt.setAcceptDrops(True)\n self.pv_name_line_edt.installEventFilter(self)\n\n self.pv_protocol_cmb = QComboBox()\n self.pv_protocol_cmb.addItems([\"ca://\", \"archive://\"])\n\n self.pv_connect_push_btn = QPushButton(\"Connect\")\n self.pv_connect_push_btn.clicked.connect(self.add_curve)\n\n self.tab_panel = QTabWidget()\n self.tab_panel.setMaximumWidth(450)\n self.curve_settings_tab = QWidget()\n self.chart_settings_tab = QWidget()\n\n self.charting_layout = QHBoxLayout()\n self.chart = PyDMTimePlot(plot_by_timestamps=False, plot_display=self)\n self.chart.setPlotTitle(\"Time Plot\")\n\n self.splitter = QSplitter()\n\n self.curve_settings_layout = QVBoxLayout()\n self.curve_settings_layout.setAlignment(Qt.AlignTop)\n self.curve_settings_layout.setSizeConstraint(QLayout.SetMinAndMaxSize)\n self.curve_settings_layout.setSpacing(5)\n\n self.crosshair_settings_layout = QVBoxLayout()\n self.crosshair_settings_layout.setAlignment(Qt.AlignTop)\n self.crosshair_settings_layout.setSpacing(5)\n\n self.enable_crosshair_chk = QCheckBox(\"Enable Crosshair\")\n self.cross_hair_coord_lbl = QLabel()\n\n self.curve_settings_inner_frame = QFrame()\n self.curve_settings_inner_frame.setLayout(self.curve_settings_layout)\n\n self.curve_settings_scroll = QScrollArea()\n self.curve_settings_scroll.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)\n self.curve_settings_scroll.setWidget(self.curve_settings_inner_frame)\n\n self.curves_tab_layout = QHBoxLayout()\n self.curves_tab_layout.addWidget(self.curve_settings_scroll)\n\n self.enable_crosshair_chk.setChecked(False)\n self.enable_crosshair_chk.clicked.connect(self.handle_enable_crosshair_checkbox_clicked)\n self.enable_crosshair_chk.clicked.emit(False)\n\n self.chart_settings_layout = QVBoxLayout()\n self.chart_settings_layout.setAlignment(Qt.AlignTop)\n\n self.chart_layout = QVBoxLayout()\n self.chart_panel = QWidget()\n\n self.chart_control_layout = QHBoxLayout()\n self.chart_control_layout.setAlignment(Qt.AlignHCenter)\n self.chart_control_layout.setSpacing(10)\n\n self.view_all_btn = QPushButton(\"View All\")\n self.view_all_btn.clicked.connect(self.handle_view_all_button_clicked)\n self.view_all_btn.setEnabled(False)\n\n self.auto_scale_btn = QPushButton(\"Auto Scale\")\n self.auto_scale_btn.clicked.connect(self.handle_auto_scale_btn_clicked)\n self.auto_scale_btn.setEnabled(False)\n\n self.reset_chart_btn = QPushButton(\"Reset\")\n self.reset_chart_btn.clicked.connect(self.handle_reset_chart_btn_clicked)\n self.reset_chart_btn.setEnabled(False)\n\n self.resume_chart_text = \"Resume\"\n self.pause_chart_text = \"Pause\"\n self.pause_chart_btn = QPushButton(self.pause_chart_text)\n self.pause_chart_btn.clicked.connect(self.handle_pause_chart_btn_clicked)\n\n self.title_settings_layout = QVBoxLayout()\n self.title_settings_layout.setSpacing(10)\n\n self.title_settings_grpbx = QGroupBox()\n self.title_settings_grpbx.setFixedHeight(150)\n\n self.import_data_btn = QPushButton(\"Import Data...\")\n self.import_data_btn.clicked.connect(self.handle_import_data_btn_clicked)\n\n self.export_data_btn = QPushButton(\"Export Data...\")\n self.export_data_btn.clicked.connect(self.handle_export_data_btn_clicked)\n\n self.chart_title_lbl = QLabel(text=\"Chart Title\")\n self.chart_title_line_edt = QLineEdit()\n self.chart_title_line_edt.setText(self.chart.getPlotTitle())\n self.chart_title_line_edt.textChanged.connect(self.handle_title_text_changed)\n\n self.chart_change_axis_settings_btn = QPushButton(text=\"Change Axis Settings...\")\n self.chart_change_axis_settings_btn.clicked.connect(self.handle_change_axis_settings_clicked)\n\n self.update_datetime_timer = QTimer(self)\n self.update_datetime_timer.timeout.connect(self.handle_update_datetime_timer_timeout)\n\n self.chart_sync_mode_layout = QVBoxLayout()\n self.chart_sync_mode_layout.setSpacing(5)\n\n self.chart_sync_mode_grpbx = QGroupBox(\"Data Sampling Mode\")\n self.chart_sync_mode_grpbx.setFixedHeight(80)\n\n self.chart_sync_mode_sync_radio = QRadioButton(\"Synchronous\")\n self.chart_sync_mode_async_radio = QRadioButton(\"Asynchronous\")\n self.chart_sync_mode_async_radio.setChecked(True)\n\n self.graph_drawing_settings_layout = QVBoxLayout()\n\n self.chart_redraw_rate_lbl = QLabel(\"Redraw Rate (Hz)\")\n self.chart_redraw_rate_spin = QSpinBox()\n self.chart_redraw_rate_spin.setRange(MIN_REDRAW_RATE_HZ, MAX_REDRAW_RATE_HZ)\n self.chart_redraw_rate_spin.setValue(DEFAULT_REDRAW_RATE_HZ)\n self.chart_redraw_rate_spin.valueChanged.connect(self.handle_redraw_rate_changed)\n\n self.chart_data_sampling_rate_lbl = QLabel(\"Asynchronous Data Sampling Rate (Hz)\")\n self.chart_data_async_sampling_rate_spin = QSpinBox()\n self.chart_data_async_sampling_rate_spin.setRange(MIN_DATA_SAMPLING_RATE_HZ, MAX_DATA_SAMPLING_RATE_HZ)\n self.chart_data_async_sampling_rate_spin.setValue(DEFAULT_DATA_SAMPLING_RATE_HZ)\n self.chart_data_async_sampling_rate_spin.valueChanged.connect(self.handle_data_sampling_rate_changed)\n self.chart_data_sampling_rate_lbl.hide()\n self.chart_data_async_sampling_rate_spin.hide()\n\n self.chart_limit_time_span_layout = QHBoxLayout()\n self.chart_limit_time_span_layout.setSpacing(5)\n\n self.limit_time_plan_text = \"Limit Time Span\"\n self.chart_limit_time_span_chk = QCheckBox(self.limit_time_plan_text)\n self.chart_limit_time_span_chk.hide()\n self.chart_limit_time_span_lbl = QLabel(\"Hours : Minutes : Seconds\")\n self.chart_limit_time_span_hours_line_edt = QLineEdit()\n self.chart_limit_time_span_minutes_line_edt = QLineEdit()\n self.chart_limit_time_span_seconds_line_edt = QLineEdit()\n self.chart_limit_time_span_activate_btn = QPushButton(\"Apply\")\n self.chart_limit_time_span_activate_btn.setDisabled(True)\n\n self.chart_ring_buffer_size_lbl = QLabel(\"Ring Buffer Size\")\n self.chart_ring_buffer_size_edt = QLineEdit()\n self.chart_ring_buffer_size_edt.installEventFilter(self)\n self.chart_ring_buffer_size_edt.textChanged.connect(self.handle_buffer_size_changed)\n self.chart_ring_buffer_size_edt.setText(str(DEFAULT_BUFFER_SIZE))\n\n self.show_legend_chk = QCheckBox(\"Show Legend\")\n self.show_legend_chk.setChecked(self.chart.showLegend)\n self.show_legend_chk.clicked.connect(self.handle_show_legend_checkbox_clicked)\n\n self.graph_background_color_layout = QFormLayout()\n\n self.background_color_lbl = QLabel(\"Graph Background Color \")\n self.background_color_btn = QPushButton()\n self.background_color_btn.setStyleSheet(\"background-color: \" + self.chart.getBackgroundColor().name())\n self.background_color_btn.setContentsMargins(10, 0, 5, 5)\n self.background_color_btn.setMaximumWidth(20)\n self.background_color_btn.clicked.connect(self.handle_background_color_button_clicked)\n\n self.axis_settings_layout = QVBoxLayout()\n self.axis_settings_layout.setSpacing(5)\n\n self.show_x_grid_chk = QCheckBox(\"Show x Grid\")\n self.show_x_grid_chk.setChecked(self.chart.showXGrid)\n self.show_x_grid_chk.clicked.connect(self.handle_show_x_grid_checkbox_clicked)\n\n self.show_y_grid_chk = QCheckBox(\"Show y Grid\")\n self.show_y_grid_chk.setChecked(self.chart.showYGrid)\n self.show_y_grid_chk.clicked.connect(self.handle_show_y_grid_checkbox_clicked)\n\n self.axis_color_lbl = QLabel(\"Axis and Grid Color\")\n self.axis_color_lbl.setEnabled(False)\n\n self.axis_color_btn = QPushButton()\n self.axis_color_btn.setStyleSheet(\"background-color: \" + DEFAULT_CHART_AXIS_COLOR.name())\n self.axis_color_btn.setContentsMargins(10, 0, 5, 5)\n self.axis_color_btn.setMaximumWidth(20)\n self.axis_color_btn.clicked.connect(self.handle_axis_color_button_clicked)\n self.axis_color_btn.setEnabled(False)\n\n self.grid_opacity_lbl = QLabel(\"Grid Opacity\")\n self.grid_opacity_lbl.setEnabled(False)\n\n self.grid_opacity_slr = QSlider(Qt.Horizontal)\n self.grid_opacity_slr.setFocusPolicy(Qt.StrongFocus)\n self.grid_opacity_slr.setRange(0, 10)\n self.grid_opacity_slr.setValue(5)\n self.grid_opacity_slr.setTickInterval(1)\n self.grid_opacity_slr.setSingleStep(1)\n self.grid_opacity_slr.setTickPosition(QSlider.TicksBelow)\n self.grid_opacity_slr.valueChanged.connect(self.handle_grid_opacity_slider_mouse_release)\n self.grid_opacity_slr.setEnabled(False)\n\n self.reset_chart_settings_btn = QPushButton(\"Reset Chart Settings\")\n self.reset_chart_settings_btn.clicked.connect(self.handle_reset_chart_settings_btn_clicked)\n\n self.curve_checkbox_panel = QWidget()\n\n self.graph_drawing_settings_grpbx = QGroupBox()\n self.graph_drawing_settings_grpbx.setFixedHeight(270)\n\n self.axis_settings_grpbx = QGroupBox()\n self.axis_settings_grpbx.setFixedHeight(180)\n\n self.app = QApplication.instance()\n self.setup_ui()\n\n self.curve_settings_disp = None\n self.axis_settings_disp = None\n self.chart_data_export_disp = None\n self.chart_data_import_disp = None\n self.grid_alpha = 5\n self.time_span_limit_hours = None\n self.time_span_limit_minutes = None\n self.time_span_limit_seconds = None\n self.data_sampling_mode = ASYNC_DATA_SAMPLING", "def LoadPerspective(self, layout, update=True):\r\n\r\n input = layout\r\n\r\n # check layout string version\r\n # 'layout1' = wxAUI 0.9.0 - wxAUI 0.9.2\r\n # 'layout2' = wxAUI 0.9.2 (wxWidgets 2.8)\r\n index = input.find(\"|\")\r\n part = input[0:index].strip()\r\n input = input[index+1:]\r\n \r\n if part != \"layout2\":\r\n return False\r\n\r\n # mark all panes currently managed as docked and hidden\r\n for pane in self._panes:\r\n pane.Dock().Hide()\r\n\r\n # clear out the dock array; this will be reconstructed\r\n self._docks = []\r\n\r\n # replace escaped characters so we can\r\n # split up the string easily\r\n input = input.replace(\"\\\\|\", \"\\a\")\r\n input = input.replace(\"\\\\;\", \"\\b\")\r\n\r\n while 1:\r\n\r\n pane = AuiPaneInfo()\r\n index = input.find(\"|\")\r\n pane_part = input[0:index].strip()\r\n input = input[index+1:]\r\n\r\n # if the string is empty, we're done parsing\r\n if pane_part == \"\":\r\n break\r\n\r\n if pane_part[0:9] == \"dock_size\":\r\n index = pane_part.find(\"=\")\r\n val_name = pane_part[0:index]\r\n value = pane_part[index+1:]\r\n\r\n index = val_name.find(\"(\")\r\n piece = val_name[index+1:]\r\n index = piece.find(\")\")\r\n piece = piece[0:index]\r\n\r\n vals = piece.split(\",\")\r\n dir = int(vals[0])\r\n layer = int(vals[1])\r\n row = int(vals[2])\r\n size = int(value)\r\n \r\n dock = AuiDockInfo()\r\n dock.dock_direction = dir\r\n dock.dock_layer = layer\r\n dock.dock_row = row\r\n dock.size = size\r\n self._docks.append(dock)\r\n \r\n continue\r\n\r\n # Undo our escaping as LoadPaneInfo needs to take an unescaped\r\n # name so it can be called by external callers\r\n pane_part = pane_part.replace(\"\\a\", \"|\")\r\n pane_part = pane_part.replace(\"\\b\", \";\")\r\n\r\n pane = self.LoadPaneInfo(pane_part, pane)\r\n\r\n p = self.GetPane(pane.name)\r\n \r\n if not p.IsOk():\r\n if pane.IsNotebookControl():\r\n # notebook controls - auto add...\r\n self._panes.append(pane)\r\n indx = self._panes.index(pane)\r\n else:\r\n # the pane window couldn't be found\r\n # in the existing layout -- skip it\r\n continue\r\n\r\n else:\r\n indx = self._panes.index(p)\r\n pane.window = p.window\r\n pane.frame = p.frame\r\n pane.buttons = p.buttons\r\n self._panes[indx] = pane\r\n\r\n if isinstance(pane.window, auibar.AuiToolBar) and (pane.IsFloatable() or pane.IsDockable()):\r\n pane.window.SetGripperVisible(True)\r\n \r\n if update:\r\n self.Update()\r\n\r\n return True", "def create_dockable(self, dockable_name, widget):\n pass", "def GetDockWidget(self):\n return self._dock_widget", "def UpdateDockGuide(self, pos):\r\n\r\n self.target.UpdateDockGuide(pos)", "def __init__(self, other=None):\r\n\r\n if other:\r\n self.Assign(other)\r\n else:\r\n # window representing the docking target\r\n self.host = None\r\n # dock direction (top, bottom, left, right, center)\r\n self.dock_direction = AUI_DOCK_NONE", "def OnDocked(self, event):\n self._floating = False\n self._dock_area = event.GetPane().dock_direction\n wx.PostEvent(self, wxDockPaneDockedEvent())", "def create(self, verbose=False):\r\n # delete the window if its handle exists\r\n if cmds.window(self.window, exists=True):\r\n cmds.deleteUI(self.window)\r\n # initialize the window as a pane for docking\r\n self.window = cmds.loadUI(uiFile=self.uiFile, verbose=verbose)\r\n #layoutWin = cmds.paneLayout(configuration='single')\r\n # create a dockControl and parent the control to layoutWin\r\n cmds.dockControl(allowedArea='all', area='right', floating=False, \r\n height=cmds.window(self.window, query=True, height=True), \r\n content=self.window, label='Docked Cone Pointer Window')\r\n cmds.showWindow(self.window)", "def list():\n return [Dock.OMNI, Dock.LEFT, Dock.RIGHT]", "def createDockArea(self):\n self.centralDock = CentralDockArea(self.globalSession)\n self.setCentralWidget(self.centralDock)", "def __init__(self, df_flow, x1='x', x2='y', x3_value=None,resolution=100,x1_center=0.0,x2_center=0.0, D=None, invert_x1=False,\n crop_x1 = None, crop_x2=None):\n\n # Assign the axis names\n self.x1_name = x1\n self.x2_name = x2\n self.x3_name = [x3 for x3 in ['x','y','z'] if x3 not in [x1,x2]][0]\n\n # Find the nearest value in 3rd dimension\n search_values = np.array(sorted(df_flow[self.x3_name].unique()))\n nearest_idx = (np.abs(search_values-x3_value)).argmin()\n nearest_value = search_values[nearest_idx]\n print('Nearest value to in %s of %.2f is %.2f' % (self.x3_name, x3_value,nearest_value))\n \n # Get a sub-frame of only this 3rd dimension value\n df_sub = df_flow[df_flow[self.x3_name]==nearest_value]\n\n # Make sure cropping is valid\n if crop_x1:\n if crop_x1[0] < min(df_sub[x1]):\n raise Exception(\"Invalid x_1 minimum on cropping\")\n if crop_x1[1] > max(df_sub[x1]):\n raise Exception(\"Invalid x_1 maximum on cropping\")\n\n if crop_x2:\n if crop_x2[0] < min(df_sub[x2]):\n raise Exception(\"Invalid x_2 minimum on cropping\")\n if crop_x2[1] > max(df_sub[x2]):\n raise Exception(\"Invalid x_2 maximum on cropping\")\n\n # If cropping x1 do it now\n # if crop_x1:\n # df_sub = df_sub[(df_sub[x1] >= crop_x1[0]) & (df_sub[x1] <= crop_x1[1])]\n # if crop_x2:\n # df_sub = df_sub[(df_sub[x2] >= crop_x2[0]) & (df_sub[x2] <= crop_x2[1])]\n\n # Store the relevent values\n self.x1_in = df_sub[x1]\n self.x2_in = df_sub[x2]\n self.u_in = df_sub['u']\n self.v_in = df_sub['v']\n self.w_in = df_sub['w']\n\n # Save the desired resolution\n self.res = resolution\n\n # Grid the data, if cropping available use that\n if crop_x1:\n # self.x1_lin = np.linspace(min(self.x1_in), max(self.x1_in), resolution)\n self.x1_lin = np.linspace(crop_x1[0], crop_x1[1], resolution)\n else:\n self.x1_lin = np.linspace(min(self.x1_in), max(self.x1_in), resolution)\n if crop_x2:\n # self.x2_lin = np.linspace(min(self.x2_in), max(self.x2_in), resolution)\n self.x2_lin = np.linspace(crop_x2[0], crop_x2[1], resolution)\n else:\n self.x2_lin = np.linspace(min(self.x2_in), max(self.x2_in), resolution)\n \n # Mesh and interpolate u, v and w\n # print(self.x1_lin)\n # print(sorted(self.x1_in))\n self.x1_mesh, self.x2_mesh = np.meshgrid(self.x1_lin, self.x2_lin)\n self.u_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.u_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n self.v_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.v_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n self.w_mesh = griddata(np.column_stack([self.x1_in, self.x2_in]), self.w_in,(self.x1_mesh.flatten(), self.x2_mesh.flatten()), method='cubic')\n \n # Save flat vectors\n self.x1_flat = self.x1_mesh.flatten()\n self.x2_flat = self.x2_mesh.flatten()\n\n # Save u-cubed\n self.u_cubed = self.u_mesh ** 3\n\n\n # Save re-centing points for visualization\n self.x1_center = x1_center\n self.x2_center = x2_center\n\n\n # If inverting, invert x1, and x1_center\n if invert_x1:\n self.x1_mesh = self.x1_mesh * -1\n self.x1_lin = self.x1_lin * -1\n self.x1_flat = self.x1_flat * -1 \n self.x1_center = self.x1_center * -1 \n self.v_mesh =self.v_mesh * -1\n\n\n # Set the diamater which will be used in visualization\n # Annalysis in D or meters?\n if D == None:\n self.plot_in_D = False\n self.D = 1.\n else:\n self.plot_in_D = True\n self.D = D", "def GetDockArea(self):\n return self._dock_area", "def dock_dx_dy(block1, dock1n, block2, dock2n):\n _dock1 = block1.docks[dock1n]\n _dock2 = block2.docks[dock2n]\n _d1type, _d1dir, _d1x, _d1y = _dock1[0:4]\n _d2type, _d2dir, _d2x, _d2y = _dock2[0:4]\n if block1 == block2:\n return (100, 100)\n if _d1dir == _d2dir:\n return (100, 100)\n if (_d2type is not 'number') or (dock2n is not 0):\n if block1.connections is not None and \\\n dock1n < len(block1.connections) and \\\n block1.connections[dock1n] is not None:\n return (100, 100)\n if block2.connections is not None and \\\n dock2n < len(block2.connections) and \\\n block2.connections[dock2n] is not None:\n return (100, 100)\n if _d1type != _d2type:\n if block1.name in STRING_OR_NUMBER_ARGS:\n if _d2type == 'number' or _d2type == 'string':\n pass\n elif block1.name in CONTENT_ARGS:\n if _d2type in CONTENT_BLOCKS:\n pass\n else:\n return (100, 100)\n (_b1x, _b1y) = block1.spr.get_xy()\n (_b2x, _b2y) = block2.spr.get_xy()\n return ((_b1x + _d1x) - (_b2x + _d2x), (_b1y + _d1y) - (_b2y + _d2y))", "def port2_docked_time(self):\n return self._port2_docked_time", "def createCfg_prep_dcard(self, jobOptions):\n category_output = self.channel\n if jobOptions['label']:\n category_output += \"_%s\" % jobOptions['label']\n lines = []\n lines.append(\"process.fwliteInput.fileNames = cms.vstring('%s')\" % jobOptions['inputFile'])\n lines.append(\"process.fwliteOutput.fileName = cms.string('%s')\" % jobOptions['datacardFile'])\n lines.append(\"process.prepareDatacards.processesToCopy = cms.vstring(%s)\" % self.prep_dcard_processesToCopy)\n lines.append(\"process.prepareDatacards.signals = cms.vstring(%s)\" % self.prep_dcard_signals)\n lines.append(\"process.prepareDatacards.makeSubDir = cms.bool(True)\")\n lines.append(\"process.prepareDatacards.categories = cms.VPSet(\")\n for charge in [\"OS\", \"SS\"]:\n for ptEtaBin in [\n \"BB_LL\", \"BB_ML\", \"BB_MM\", \"BB_HL\", \"BB_HM\", \"BB_HH\",\n \"EE_LL\", \"EE_ML\", \"EE_MM\", \"EE_HL\", \"EE_HM\", \"EE_HH\",\n \"BE_LL\", \"BE_ML\", \"EB_ML\",\"BE_MM\", \"BE_HL\", \"EB_HL\",\n \"BE_HM\", \"EB_HM\", \"BE_HH\", \"total\",\n ]:\n lines.append(\" cms.PSet(\")\n lines.append(\" input = cms.string('%s/%s'),\" % (charge, ptEtaBin))\n lines.append(\" output = cms.string('ttH_%s_%s_%s')\" % (self.channel, charge, ptEtaBin))\n lines.append(\" ),\")\n lines.append(\")\")\n lines.append(\"process.prepareDatacards.histogramToFit = cms.string('%s')\" % jobOptions['histogramToFit'])\n lines.append(\"process.prepareDatacards.sysShifts = cms.vstring(%s)\" % systematics.muon_E)\n create_cfg(self.cfgFile_prep_dcard, jobOptions['cfgFile_modified'], lines)", "def WindingDesign(main):\n oEditor = main['ANSYS']['oEditor']\n\n # Slots number\n Slots = main['ANSYS']['FixedVariables']['Slots']\n\n # SlotType\n SlotType = main['ANSYS']['FixedVariables']['SlotType']\n\n # Geimetric parameters\n g = main['ANSYS']['DesignProperties']['Stator']['g']\n\n Hs0 = main['ANSYS']['DesignProperties']['Slot']['Hs0']\n Hs1 = main['ANSYS']['DesignProperties']['Slot']['Hs1']\n Hs2 = main['ANSYS']['DesignProperties']['Slot']['Hs2']\n Bs1 = main['ANSYS']['DesignProperties']['Slot']['Bs1']\n Bs2 = main['ANSYS']['DesignProperties']['Slot']['Bs2']\n\n DiaGap = main['ANSYS']['DesignProperties']['Rotor']['DiaGap']\n\n # Coils Arrange ABC\n PhasesABC = main['ANSYS']['Winding']['ABC']\n\n # Color used for phases\n Color = main['ANSYS']['Winding']['Color']\n\n oEditor.CreateUserDefinedPart(\n [\n \"NAME:UserDefinedPrimitiveParameters\",\n \"DllName:=\"\t\t, \"RMxprt/LapCoil.dll\",\n \"Version:=\"\t\t, \"16.0\",\n \"NoOfParameters:=\"\t, 22,\n \"Library:=\"\t\t, \"syslib\",\n [\n \"NAME:ParamVector\",\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"DiaGap\",\n \"Value:=\"\t\t, \"DiaGap+g*2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"DiaYoke\",\n \"Value:=\"\t\t, \"DiaYoke\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Length\",\n \"Value:=\"\t\t, \"0mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Skew\",\n \"Value:=\"\t\t, \"0deg\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Slots\",\n \"Value:=\"\t\t, str(int(Slots))\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"SlotType\",\n \"Value:=\"\t\t, str(int(SlotType))\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Hs0\",\n \"Value:=\"\t\t, \"Hs0\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Hs1\",\n \"Value:=\"\t\t, \"Hs1\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Hs2\",\n \"Value:=\"\t\t, \"Hs2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Bs0\",\n \"Value:=\"\t\t, \"Bs0\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Bs1\",\n \"Value:=\"\t\t, \"Bs1\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Bs2\",\n \"Value:=\"\t\t, \"Bs2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Rs\",\n \"Value:=\"\t\t, \"Rs\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"FilletType\",\n \"Value:=\"\t\t, \"0\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"Layers\",\n \"Value:=\"\t\t, \"2\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"CoilPitch\",\n \"Value:=\"\t\t, \"1\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"EndExt\",\n \"Value:=\"\t\t, \"5mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"SpanExt\",\n \"Value:=\"\t\t, \"25mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"BendAngle\",\n \"Value:=\"\t\t, \"0deg\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"SegAngle\",\n \"Value:=\"\t\t, \"10deg\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"LenRegion\",\n \"Value:=\"\t\t, \"200mm\"\n ],\n [\n \"NAME:Pair\",\n \"Name:=\"\t\t, \"InfoCoil\",\n \"Value:=\"\t\t, \"0\"\n ]\n ]\n ],\n [\n \"NAME:Attributes\",\n \"Name:=\"\t\t, \"LapCoil1\",\n \"Flags:=\"\t\t, \"\",\n \"Color:=\"\t\t, \"(143 175 143)\",\n \"Transparency:=\"\t, 0,\n \"PartCoordinateSystem:=\", \"Global\",\n \"UDMId:=\"\t\t, \"\",\n \"MaterialValue:=\"\t, \"\\\"copper\\\"\",\n \"SurfaceMaterialValue:=\", \"\\\"\\\"\",\n \"SolveInside:=\"\t\t, True,\n \"ShellElement:=\"\t, False,\n \"ShellElementThickness:=\", \"0mm\",\n \"IsMaterialEditable:=\"\t, True,\n \"UseMaterialAppearance:=\", False,\n \"IsLightweight:=\"\t, False\n ]\n )\n\n # Body Separation\n oEditor.SeparateBody(\n [\n \"NAME:Selections\",\n \"Selections:=\"\t\t, \"LapCoil1\",\n \"NewPartsModelFlag:=\"\t, \"Model\"\n ],\n [\n \"CreateGroupsForNewObjects:=\", False\n ]\n )\n\n # Average Slot Width\n AverWidth = (Bs2 + Bs1)/2\n\n # Average Radius\n AverRadius = DiaGap/2 + g + Hs0 + Hs1 + Hs2*0.75\n\n # Angle to shift and find the kth tooth\n ShiftSlot = 1/Slots*np.pi\n\n # Angle to fond the corrent layer\n ShiftLayer = np.arctan(AverWidth/4/AverRadius)\n\n # List to save the coils sides names\n WindingNames = [[], [], []]\n\n # Phases name to employed\n PhaseNames = ['A', 'B', 'C']\n\n for phase, row in enumerate(PhasesABC):\n\n PhaseName = [[], []]\n\n for coil, slot in enumerate(row):\n\n SlotAngle = np.abs(slot)/Slots*2*np.pi - ShiftSlot\n\n if coil % 2 == 1:\n SlotAngle = SlotAngle - ShiftLayer\n\n else:\n SlotAngle = SlotAngle + ShiftLayer\n\n x = np.cos(SlotAngle)*AverRadius\n y = np.sin(SlotAngle)*AverRadius\n\n Name0 = oEditor.GetBodyNamesByPosition(\n [\n \"NAME:Parameters\",\n \"XPosition:=\", str(x)+\"mm\",\n \"YPosition:=\", str(y)+\"mm\",\n \"ZPosition:=\", \"0mm\"\n ]\n )\n\n C = Color[phase]\n\n if np.sign(slot) == 1:\n\n CoilSideName = PhaseNames[phase]+\"In\"+str(np.abs(coil))\n\n PhaseName[0] += [CoilSideName]\n\n oEditor.ChangeProperty(\n [\n \"NAME:AllTabs\",\n [\n \"NAME:Geometry3DAttributeTab\",\n [\n \"NAME:PropServers\",\n Name0[0]\n ],\n [\n \"NAME:ChangedProps\",\n [\n \"NAME:Name\",\n \"Value:=\"\t\t,\n CoilSideName\n ],\n [\n \"NAME:Color\",\n \"R:=\"\t\t\t, C[0],\n \"G:=\"\t\t\t, C[1],\n \"B:=\"\t\t\t, C[2]\n ],\n\n ]\n ]\n ]\n )\n else:\n\n CoilSideName = PhaseNames[phase]+\"Out\"+str(np.abs(coil))\n\n PhaseName[1] += [CoilSideName]\n\n oEditor.ChangeProperty(\n [\n \"NAME:AllTabs\",\n [\n \"NAME:Geometry3DAttributeTab\",\n [\n \"NAME:PropServers\",\n Name0[0]\n ],\n [\n \"NAME:ChangedProps\",\n [\n \"NAME:Name\",\n \"Value:=\"\t\t,\n CoilSideName\n ],\n [\n \"NAME:Color\",\n \"R:=\"\t\t\t, C[0],\n \"G:=\"\t\t\t, C[1],\n \"B:=\"\t\t\t, C[2],\n ],\n\n ]\n ]\n ]\n )\n\n WindingNames[phase] += PhaseName\n\n main['ANSYS']['Winding']['CoilNames'] = WindingNames\n\n return main", "def dop_comp(field) :\n dop = fits.open(field+'/'+field+'_rv.fits')\n r13 = apload.ApLoad(apred='r13')\n old = r13.apField(field)\n\n i1,i2 = match.match(dop[1].data['APOGEE_ID'],old[1].data['APOGEE_ID'])\n print(len(dop[1].data),len(old[1].data),len(i1))\n\n fig,ax=plots.multi(1,1)\n plots.plotc(ax,dop[1].data['RV_TEFF'][i1],dop[1].data['VHELIO_AVG'][i1]-old[1].data['VHELIO_AVG'][i2],dop[1].data['VSCATTER'][i1])\n\n j=np.argsort(np.abs(dop[1].data['VHELIO_AVG'][i1]-old[1].data['VHELIO_AVG'][i2],dop[1].data['VSCATTER'][i1]))\n\n plots._data = dop[1].data\n plots._id_cols=['APOGEE_ID']\n plots.event(fig)\n key=' '\n sf,sax=plots.multi(1,2,sharex=True,hspace=0.001)\n while key != 'e' :\n x,y,key,index = plots.mark(fig,index=True)\n obj = dop[1].data['APOGEE_ID'][i1[index]]\n #jv = np.where(dop[2].data['APOGEE_ID'] == dop[1].data['APOGEE_ID'][i1])[0]\n out=pickle.load(open(field+'/'+obj+'_out.pkl','rb'))\n print(obj,old[1].data['APOGEE_ID'][i2[index]])\n print(out[0])\n sax[0].cla()\n spec=old[2].data['SPEC'][i2[index]]\n plots.plotl(sax[0],old[3].data['WAVE'][0,:],spec/convolve(spec,np.ones(500)/500,mode='same'),xr=[15000,17000],yr=[0.5,1.5])\n for mod,obs in zip(out[2],out[3]) :\n sax[1].cla()\n for chip in range(3) :\n plots.plotl(sax[1],obs.wave[:,chip],obs.flux[:,chip],color='k',yr=[0.5,1.5])\n gd = np.where(obs.mask[:,chip] == False)[0]\n plots.plotl(sax[1],obs.wave[gd,chip],obs.flux[gd,chip],color='g')\n plots.plotl(sax[1],mod.wave[:,chip],mod.flux[:,chip],color='r')\n plt.draw()\n input('hit a key: ')", "def dock(self,\n protein_file,\n ligand_file,\n centroid=None,\n box_dims=None,\n dry_run=False):\n protein_docked, ligand_docked = self.pose_generator.generate_poses(\n protein_file, ligand_file, centroid, box_dims, dry_run)\n if not dry_run:\n score = self.pose_scorer.score(protein_docked, ligand_docked)\n else:\n score = np.zeros((1,))\n return (score, (protein_docked, ligand_docked))", "def dock(self):\n if not self.dockingClient.gh or not self.dockingClient.get_state() in (GoalStatus.SUCCEEDED, GoalStatus.PENDING, GoalStatus.ACTIVE):\n self.dockingClient.send_goal(AutoDockingGoal()) #TODO test if parameter is required\n rospy.loginfo(self.name + \": docking\") \n if self.dockingClient.get_state() == GoalStatus.SUCCEEDED: \n self.dockingClient.stop_tracking_goal()\n rospy.loginfo(self.name + \": docking succeeded\")\n self.docked = True \n return True\n return False", "def config_gb_md(self):\n\n self._config_md()\n self.title = \"GB MD Simulation\"\n self.cntrl[\"cut\"] = 999.0\n self.cntrl[\"igb\"] = 1\n self.cntrl[\"ntp\"] = 0\n self.cntrl[\"barostat\"] = 0", "def GetTotalPixSizeAndProportion(self, dock):\r\n\r\n totalPixsize = 0\r\n totalProportion = 0\r\n\r\n # determine the total proportion of all resizable panes,\r\n # and the total size of the dock minus the size of all\r\n # the fixed panes\r\n for tmpPane in dock.panes:\r\n \r\n if tmpPane.IsFixed():\r\n continue\r\n\r\n totalProportion += tmpPane.dock_proportion\r\n\r\n if dock.IsHorizontal():\r\n totalPixsize += tmpPane.rect.width\r\n else:\r\n totalPixsize += tmpPane.rect.height\r\n\r\n## if tmpPane.min_size.IsFullySpecified():\r\n## \r\n## if dock.IsHorizontal():\r\n## totalPixsize -= tmpPane.min_size.x\r\n## else:\r\n## totalPixsize -= tmpPane.min_size.y\r\n \r\n return totalPixsize, totalProportion", "def docked_time(self):\n return self._docked_time", "def setup(self):\r\n # productive\r\n profprint()\r\n #-----------------------------------------------------------------------------\r\n # Needle Finder Logic\r\n logic = self.logic\r\n\r\n #Report Frame########################################\r\n self.__reportFrame = ctk.ctkCollapsibleButton()\r\n self.__reportFrame.text = \"Segmentation Report\"\r\n self.__reportFrame.collapsed = 1\r\n reportFrame = qt.QFormLayout(self.__reportFrame)\r\n\r\n # segmentation report\r\n self.analysisGroupBox = qt.QGroupBox()\r\n self.analysisGroupBox.setFixedHeight(330)\r\n self.analysisGroupBox.setTitle('Segmentation Report')\r\n reportFrame.addRow(self.analysisGroupBox)\r\n self.analysisGroupBoxLayout = qt.QFormLayout(self.analysisGroupBox)\r\n\r\n #-----------------------------------------------------------------------------\r\n\r\n #Report Frame Control Point########################################\r\n self.__reportFrameCTL = ctk.ctkCollapsibleButton()\r\n self.__reportFrameCTL.text = \"Manual Segmentation Report\"\r\n self.__reportFrameCTL.collapsed = 1\r\n reportFrameCTL = qt.QFormLayout(self.__reportFrameCTL)\r\n\r\n # manual segmentation report\r\n self.analysisGroupBoxCTL = qt.QGroupBox()\r\n self.analysisGroupBoxCTL.setFixedHeight(330)\r\n self.analysisGroupBoxCTL.setTitle('Manual Segmentation Report')\r\n reportFrameCTL.addRow(self.analysisGroupBoxCTL)\r\n self.analysisGroupBoxLayoutCTL = qt.QFormLayout(self.analysisGroupBoxCTL)\r\n\r\n #-----------------------------------------------------------------------------\r\n\r\n #Segmentation Frame##########################################\r\n self.__segmentationFrame = ctk.ctkCollapsibleButton()\r\n self.__segmentationFrame.text = \"Segmentation\"\r\n self.__segmentationFrame.collapsed = 0\r\n segmentationFrame = qt.QFormLayout(self.__segmentationFrame)\r\n\r\n # 1 Define template\r\n self.templateSliceButton = qt.QPushButton('1. Select Current Axial Slice as Seg. Limit (current: None)')\r\n segmentationFrame.addRow(self.templateSliceButton)\r\n self.templateSliceButton.connect('clicked()', logic.placeAxialLimitMarker)\r\n self.templateSliceButton.setEnabled(1)\r\n\r\n # 2 give needle tips\r\n self.fiducialButton = qt.QPushButton('2. Start Giving Needle Tips [CTRL + ENTER]')\r\n self.fiducialButton.checkable = True\r\n segmentationFrame.addRow(self.fiducialButton)\r\n self.fiducialButton.connect('toggled(bool)', self.onStartStopGivingNeedleTipsToggled)\r\n self.fiducialButton.setEnabled(0)\r\n\r\n # New insertion - create new set of needles with different colors\r\n self.newInsertionButton = None\r\n # self.newInsertionButton = qt.QPushButton('New Needle Set')\r\n # segmentationFrame.addRow(self.newInsertionButton)\r\n # self.newInsertionButton.connect('clicked()', logic.newInsertionNeedleSet)\r\n # self.newInsertionButton.setEnabled(0)\r\n\r\n # Delete Needle Button\r\n self.deleteNeedleButton = qt.QPushButton('Delete Last Segmented Needle [Ctrl + Z]')\r\n segmentationFrame.addRow(self.deleteNeedleButton)\r\n # self.deleteNeedleButton.connect('clicked()', logic.deleteAllAutoNeedlesFromScene)\r\n self.deleteNeedleButton.connect('clicked()', logic.deleteLastNeedle)\r\n self.deleteNeedleButton.setEnabled(0)\r\n\r\n # Reset Needle Detection Button\r\n self.resetDetectionButton = qt.QPushButton('Reset Needle Detection (Start Over)')\r\n segmentationFrame.addRow(self.resetDetectionButton)\r\n self.resetDetectionButton.connect('clicked()', logic.resetNeedleDetection)\r\n self.resetDetectionButton.setEnabled(0)\r\n\r\n # auto segmentation report\r\n segmentationFrame.addRow(self.__reportFrame)\r\n\r\n #Validation Frame##########################################\r\n self.__validationFrame = ctk.ctkCollapsibleButton()\r\n self.__validationFrame.text = \"Validation\"\r\n self.__validationFrame.collapsed = 0 # <<<\r\n validationFrame = qt.QFormLayout(self.__validationFrame)\r\n\r\n self.startGivingControlPointsButton = qt.QPushButton('Start Giving Control Points')\r\n self.startGivingControlPointsButton.checkable = True\r\n self.startGivingControlPointsButton.setStyleSheet(\"QPushButton {background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #ccffcc, stop: 1 #f3fff3)}\"\r\n \"QPushButton:checked{background-color: red;}\")\r\n\r\n self.startGivingControlPointsButton.connect('toggled(bool)', self.onStartStopGivingValidationControlPointsToggled)\r\n\r\n self.startAssistModeButton = qt.QPushButton('Assisted Manual Segmentation')\r\n self.startAssistModeButton.checkable = True\r\n self.startAssistModeButton.connect('toggled(bool)', self.onStartAssistModeToggled)\r\n\r\n self.validationNeedleButton = qt.QPushButton('Next Validation Needle: (0)->(1)')\r\n self.validationNeedleButton.toolTip = \"By clicking on this button, you will increment the number of the needle\"\r\n self.validationNeedleButton.toolTip += \"that you want to manually segment. Thus, the points you will add will be used to draw a new needle.<br/>\"\r\n self.validationNeedleButton.toolTip += \"<b>Warning:<b> You can/'t add any more points to the current needle after clicking here\"\r\n\r\n self.validationNeedleButton.connect('clicked()', logic.validationNeedle)\r\n\r\n self.drawValidationNeedlesButton = qt.QPushButton('Render Manual Needle 0')\r\n self.drawValidationNeedlesButton.toolTip = \"Redraw every manually segmented needles. This is usefull for example if you moved a control point, or after you added a new needle\"\r\n\r\n self.drawValidationNeedlesButton.connect('clicked()', logic.drawValidationNeedles)\r\n\r\n self.startValidationButton = qt.QPushButton('Start Evaluation')\r\n self.startValidationButton.toolTip = \"Launch tracking algo. from the tip of the manually segmented needles\"\r\n\r\n self.startValidationButton.connect('clicked()', logic.startValidation)\r\n #self.startValidationButton.setStyleSheet(\"background-color: yellow\")\r\n self.startValidationButton.setStyleSheet(\"background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #f7f700, stop: 1 #dbdb00)\");\r\n\r\n # Reset Needle Validation Button\r\n self.resetValidationButton = qt.QPushButton('Reset Manual Segmentation')\r\n self.templateRegistrationButton = qt.QPushButton('[Beta] Template Registration')\r\n\r\n # Hide Markers Button\r\n self.hideAnnotationTextButton = qt.QPushButton('Hide Marker Texts')\r\n self.hideAnnotationTextButton.checkable = True\r\n\r\n # Undo Button\r\n self.undoButton = qt.QPushButton('Undo Fiducial Mvt')\r\n self.undoButton.checkable = False\r\n\r\n\r\n self.resetValidationButton.connect('clicked()', logic.resetNeedleValidation)\r\n self.templateRegistrationButton.connect('clicked()', logic.autoregistration)\r\n self.hideAnnotationTextButton.connect('clicked()', logic.hideAnnotations)\r\n self.undoButton.connect('clicked()', logic.undoFid)\r\n\r\n self.editNeedleTxtBox = qt.QSpinBox()\r\n self.editNeedleTxtBox.connect(\"valueChanged(int)\", logic.changeValue)\r\n editLabel = qt.QLabel('Choose Needle:')\r\n\r\n # Choose needle\r\n self.configFrameCTL = qt.QFrame()\r\n self.configFrameCTL.setLayout(qt.QHBoxLayout())\r\n\r\n self.configFrameCTL.layout().addWidget(editLabel)\r\n self.configFrameCTL.layout().addWidget(self.editNeedleTxtBox)\r\n self.configFrameCTL.layout().addWidget(self.validationNeedleButton)\r\n\r\n # validationFrame.addRow(editLabel, self.editNeedleTxtBox)\r\n # validationFrame.addRow(self.validationNeedleButton)\r\n validationFrame.layout().addRow(self.configFrameCTL)\r\n validationFrame.addRow(self.startGivingControlPointsButton)\r\n validationFrame.addRow(self.startAssistModeButton)\r\n validationFrame.addRow(self.drawValidationNeedlesButton)\r\n validationFrame.addRow(self.startValidationButton)\r\n validationFrame.addRow(self.resetValidationButton)\r\n validationFrame.addRow(self.hideAnnotationTextButton)\r\n validationFrame.addRow(self.undoButton)\r\n #validationFrame.addRow(self.templateRegistrationButton)\r\n validationFrame.addRow(self.__reportFrameCTL)\r\n\r\n # self.scrollPointButton = qt.QPushButton('Scroll Ctrl Pt for Needle ' + str(self.editNeedleTxtBox.value))\r\n # validationFrame.addRow(self.scrollPointButton)\r\n # self.scrollPointButton.connect('clicked()', logic.scrollPoint)\r\n\r\n # Needle detection parameters#################################\r\n self.__parameterFrame = ctk.ctkCollapsibleButton()\r\n self.__parameterFrame.text = \"Needle Detection Parameters (Developers)\"\r\n self.__parameterFrame.collapsed = 0\r\n parameterFrame = qt.QFormLayout(self.__parameterFrame)\r\n\r\n # Load/Save/Reset\r\n self.configFrame = qt.QFrame()\r\n self.configFrame.setLayout(qt.QHBoxLayout())\r\n parameterFrame.layout().addRow(self.configFrame)\r\n self.loadButton = qt.QPushButton()\r\n self.loadButton.text = \"Load Parameters\"\r\n self.loadButton.checkable = False\r\n self.loadButton.toolTip = \"Click to load parameters from a configuration file.\"\r\n self.loadButton.connect('clicked()', self.onLoad)\r\n self.saveButton = qt.QPushButton()\r\n self.saveButton.checkable = False\r\n self.saveButton.text = \"Save Parameters\"\r\n self.saveButton.toolTip = \"Click to save the parameters in a configuration file.\"\r\n self.saveButton.connect('clicked()', self.onSave)\r\n self.resetParametersButton = qt.QPushButton()\r\n self.resetParametersButton.checkable = False\r\n self.resetParametersButton.text = \"Reset Default Parameters\"\r\n self.resetParametersButton.toolTip = \"Click to reset the default parameters from default.cfg\"\r\n self.resetParametersButton.connect('clicked()', self.onResetParameters)\r\n self.configFrame.layout().addWidget(self.loadButton)\r\n self.configFrame.layout().addWidget(self.saveButton)\r\n self.configFrame.layout().addWidget(self.resetParametersButton)\r\n\r\n # Auto correct tip position?\r\n self.autoCorrectTip = qt.QCheckBox('Auto correct tip position?')\r\n parameterFrame.addRow(self.autoCorrectTip)\r\n self.autoCorrectTip.setChecked(0)\r\n\r\n # Look for needles in CT?\r\n self.invertedContrast = qt.QCheckBox('Search for bright needles (CT)?')\r\n parameterFrame.addRow(self.invertedContrast)\r\n # Compute gradient?\r\n self.gradient = qt.QCheckBox('Compute gradient?')\r\n self.gradient.setChecked(1)\r\n parameterFrame.addRow(self.gradient)\r\n\r\n # Filter ControlPoints?\r\n self.filterControlPoints = qt.QCheckBox('Filter Control Points?')\r\n self.filterControlPoints.setChecked(0)\r\n # parameterFrame.addRow(self.filterControlPoints)\r\n\r\n # Draw Fiducial Points?\r\n self.drawFiducialPoints = qt.QCheckBox('Draw Control Points?')\r\n self.drawFiducialPoints.setChecked(0)\r\n parameterFrame.addRow(self.drawFiducialPoints)\r\n\r\n # Auto find Tips: Tracking in +z and -z direction\r\n self.autoStopTip = qt.QCheckBox('Tracking in both directions')\r\n self.autoStopTip.setChecked(0)\r\n parameterFrame.addRow(self.autoStopTip)\r\n\r\n # Extend Needle to the wanted value\r\n self.extendNeedle = qt.QCheckBox('Extend Needle')\r\n self.extendNeedle.setChecked(0)\r\n parameterFrame.addRow(self.extendNeedle)\r\n\r\n # Real Needle Value (used to extend the needle)\r\n realNeedleLengthLabel = qt.QLabel('Real Needle Length (mm):')\r\n self.realNeedleLength = qt.QSpinBox()\r\n self.realNeedleLength.setMinimum(0.1)\r\n self.realNeedleLength.setMaximum(1500)\r\n self.realNeedleLength.setValue(240)\r\n parameterFrame.addRow(realNeedleLengthLabel, self.realNeedleLength)\r\n\r\n # Max Needle Length?\r\n self.maxLength = qt.QCheckBox('Max Needle Length?')\r\n self.maxLength.setChecked(1)\r\n parameterFrame.addRow(self.maxLength)\r\n\r\n # Add Gaussian Estimation?\r\n self.gaussianAttenuationButton = qt.QCheckBox('Add Gaussian Prob. Attenuation?')\r\n self.gaussianAttenuationButton.setChecked(1)\r\n parameterFrame.addRow(self.gaussianAttenuationButton)\r\n\r\n # nb points per line spin box\r\n # ## previously 4 - try with 20\r\n self.sigmaValue = qt.QSpinBox()\r\n self.sigmaValue.setMinimum(0.1)\r\n self.sigmaValue.setMaximum(500)\r\n self.sigmaValue.setValue(20)\r\n sigmaValueLabel = qt.QLabel(\"Sigma Value (exp(-x^2/(2*(sigma/10)^2))): \")\r\n parameterFrame.addRow(sigmaValueLabel, self.sigmaValue)\r\n\r\n # nb points per line spin box\r\n self.gradientPonderation = qt.QSpinBox()\r\n self.gradientPonderation.setMinimum(0.01)\r\n self.gradientPonderation.setMaximum(500)\r\n self.gradientPonderation.setValue(5)\r\n gradientPonderationLabel = qt.QLabel(\"Gradient Ponderation: \")\r\n parameterFrame.addRow(gradientPonderationLabel, self.gradientPonderation)\r\n\r\n # center accuentuation\r\n # ## previously 1, try with 2 ( avoids exiting catheter track)\r\n self.exponent = qt.QSpinBox()\r\n self.exponent.setMinimum(0.01)\r\n self.exponent.setMaximum(500)\r\n self.exponent.setValue(2)\r\n exponentLabel = qt.QLabel(\"Center Ponderation: \")\r\n parameterFrame.addRow(exponentLabel, self.exponent)\r\n\r\n # nb points per line spin box\r\n self.nbPointsPerLine = qt.QSpinBox()\r\n self.nbPointsPerLine.setMinimum(2)\r\n self.nbPointsPerLine.setMaximum(500)\r\n self.nbPointsPerLine.setValue(20)\r\n nbPointsPerLineLabel = qt.QLabel(\"Number of points per line: \")\r\n # parameterFrame.addRow( nbPointsPerLineLabel, self.nbPointsPerLine)\r\n\r\n # nb radius iteration spin box\r\n self.nbRadiusIterations = qt.QSpinBox()\r\n self.nbRadiusIterations.setMinimum(2)\r\n self.nbRadiusIterations.setMaximum(1000)\r\n self.nbRadiusIterations.setValue(13)\r\n nbRadiusIterationsLabel = qt.QLabel(\"Number of distance iterations: \")\r\n # parameterFrame.addRow( nbRadiusIterationsLabel, self.nbRadiusIterations)\r\n\r\n # distance max spin box\r\n self.radiusMax = qt.QSpinBox()\r\n self.radiusMax.setMinimum(0)\r\n self.radiusMax.setMaximum(1000)\r\n self.radiusMax.setValue(5)\r\n distanceMaxLabel = qt.QLabel(\"Radius of cone base (mm): \")\r\n parameterFrame.addRow(distanceMaxLabel, self.radiusMax)\r\n\r\n # nb rotating iterations spin box\r\n self.nbRotatingIterations = qt.QSpinBox()\r\n self.nbRotatingIterations.setMinimum(2)\r\n self.nbRotatingIterations.setMaximum(1000)\r\n self.nbRotatingIterations.setValue(35)\r\n nbRotatingIterationsLabel = qt.QLabel(\"Number of rotating steps: \")\r\n parameterFrame.addRow(nbRotatingIterationsLabel, self.nbRotatingIterations)\r\n\r\n # nb heights per needle spin box\r\n self.numberOfPointsPerNeedle = qt.QSpinBox()\r\n self.numberOfPointsPerNeedle.setMinimum(1)\r\n self.numberOfPointsPerNeedle.setMaximum(50)\r\n self.numberOfPointsPerNeedle.setValue(6)\r\n numberOfPointsPerNeedleLabel = qt.QLabel(\"Number of Control Points: \")\r\n parameterFrame.addRow(numberOfPointsPerNeedleLabel, self.numberOfPointsPerNeedle)\r\n\r\n # nb heights per needle spin box\r\n self.stepsize = qt.QSpinBox()\r\n self.stepsize.setMinimum(1)\r\n self.stepsize.setMaximum(500)\r\n self.stepsize.setValue(5)\r\n stepsizeLabel = qt.QLabel(\"Stepsize: \")\r\n # parameterFrame.addRow( stepsizeLabel, self.stepsize)\r\n\r\n # lenghtNeedle\r\n self.lenghtNeedleParameter = qt.QSpinBox()\r\n self.lenghtNeedleParameter.setMinimum(1)\r\n self.lenghtNeedleParameter.setMaximum(10000)\r\n self.lenghtNeedleParameter.setValue(100)\r\n stepsizeLabel = qt.QLabel(\"Lenght of the needles (mm): \")\r\n parameterFrame.addRow(stepsizeLabel, self.lenghtNeedleParameter)\r\n\r\n # radius\r\n self.radiusNeedleParameter = qt.QSpinBox()\r\n self.radiusNeedleParameter.setMinimum(1)\r\n self.radiusNeedleParameter.setMaximum(200)\r\n self.radiusNeedleParameter.setValue(2)\r\n radiusLabel = qt.QLabel(\"Radius of the needles (mm): \")\r\n parameterFrame.addRow(radiusLabel, self.radiusNeedleParameter)\r\n\r\n # algo\r\n self.algoVersParameter = qt.QSpinBox()\r\n self.algoVersParameter.setMinimum(0)\r\n self.algoVersParameter.setMaximum(9)\r\n self.algoVersParameter.setValue(0)\r\n algoLabel = qt.QLabel(\"Needle detection version: \")\r\n parameterFrame.addRow(algoLabel, self.algoVersParameter)\r\n\r\n # Research/dev. area#################################\r\n self.__devFrame = ctk.ctkCollapsibleButton()\r\n self.__devFrame.text = \"R&&D (Developers)\"\r\n self.__devFrame.collapsed = 0\r\n devFrame = qt.QFormLayout(self.__devFrame)\r\n\r\n # #Segment Needle Button\r\n # self.needleButton = qt.QPushButton('Segment Needles')\r\n # segmentationFrame.addRow(self.needleButton)\r\n # self.needleButton.connect('clicked()', self.needleSegmentation)\r\n # self.needleButton.setEnabled(0)\r\n\r\n # Segment Needle Button\r\n # self.needleButton2 = qt.QPushButton('Segment/Update Needles - Python')\r\n # segmentationFrame.addRow(self.needleButton2)\r\n # self.needleButton2.connect('clicked()', self.needleDetection)\r\n\r\n self.skipSegLimitButton = qt.QPushButton('Skip Giving Seg. Limit.')\r\n self.skipSegLimitButton.checkable = False\r\n self.skipSegLimitButton.connect('clicked(bool)', self.onSkipSegLimit)\r\n\r\n # Obturator needle tips\r\n self.fiducialObturatorButton = qt.QPushButton('Start Giving Obturator Needle Tips')\r\n self.fiducialObturatorButton.checkable = True\r\n self.fiducialObturatorButton.connect('toggled(bool)', self.onStartStopGivingObturatorNeedleTipsToggled)\r\n\r\n self.renderObturatorNeedlesButton = qt.QPushButton('Render Obturator Needles')\r\n self.renderObturatorNeedlesButton.checkable = False\r\n self.renderObturatorNeedlesButton.connect('clicked()', self.logic.drawObturatorNeedles)\r\n\r\n self.displayFiducialButton = qt.QPushButton('Display Labels On Needles')\r\n self.displayFiducialButton.connect('clicked()', logic.displayFiducial)\r\n\r\n self.displayContourButton = qt.QPushButton('Draw Radiation Isosurfaces')\r\n self.displayContourButton.checkable = False\r\n self.displayContourButton.connect('clicked()', logic.drawIsoSurfaces)\r\n\r\n self.hideContourButton = qt.QPushButton('Hide Radiation Isosurfaces')\r\n self.hideContourButton.checkable = True\r\n self.hideContourButton.connect('clicked()', logic.hideIsoSurfaces)\r\n self.hideContourButton.setEnabled(0)\r\n\r\n self.filterButton = qt.QPushButton('Preprocessing')\r\n self.filterButton.checkable = False\r\n self.filterButton.connect('clicked()', logic.filterWithSITK)\r\n self.filterButton.setEnabled(1)\r\n\r\n self.parSearchButton = qt.QPushButton('Parameter Search')\r\n self.parSearchButton.checkable = False\r\n self.parSearchButton.connect('clicked()', logic.parSearch)\r\n self.parSearchButton.setEnabled(1)\r\n\r\n self.setAsValNeedlesButton = qt.QPushButton('Use Needles for Validation')\r\n self.setAsValNeedlesButton.checkable = False\r\n self.setAsValNeedlesButton.connect('clicked()', logic.setAllNeedleTubesAsValidationNeedles)\r\n self.setAsValNeedlesButton.setEnabled(1)\r\n self.setAsValNeedlesButton.setStyleSheet(\"background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #f7f700, stop: 1 #dbdb00)\");\r\n\r\n # ## create segmentation editor environment:\r\n editorWidgetParent = slicer.qMRMLWidget()\r\n editorWidgetParent.setLayout(qt.QVBoxLayout())\r\n editorWidgetParent.setMRMLScene(slicer.mrmlScene)\r\n editorWidgetParent.hide()\r\n self.editorWidget = None\r\n # The order of statements is important here for resetNeedleDetection to work!!\r\n self.editorWidget = EditorWidget(editorWidgetParent, False)\r\n self.editUtil = None\r\n self.editUtil = self.editorWidget.editUtil # EditorLib.EditUtil.EditUtil()\r\n self.currentLabel = None\r\n self.setWandEffectOptions() # has to be done before setup():\r\n self.editUtil.setCurrentEffect(\"DefaultTool\")\r\n self.editorWidget.setup()\r\n # our mouse mode button\r\n self.editorWidget.toolsBox.actions[\"NeedleFinder\"] = qt.QAction(0) # dummy self.fiducialButton\r\n self.undoRedo = None\r\n self.undoRedo = self.editorWidget.toolsBox.undoRedo\r\n self.currentLabel = self.editUtil.getLabel()\r\n self.editorWidget.editLabelMapsFrame.setText(\"Edit Segmentation\")\r\n self.editorWidget.editLabelMapsFrame.connect('contentsCollapsed(bool)', self.onEditorCollapsed)\r\n editorWidgetParent.show()\r\n self.editUtil.setCurrentEffect(\"NeedleFinder\")\r\n\r\n self.scenePath = qt.QLineEdit()\r\n self.cleanSceneButton = qt.QPushButton('Clean Scene')\r\n self.cleanSceneButton.connect('clicked()', logic.cleanScene)\r\n\r\n # devFrame.addRow(self.displayFiducialButton)\r\n devFrame.addWidget(editorWidgetParent)\r\n devFrame.addRow(self.scenePath)\r\n devFrame.addRow(self.cleanSceneButton)\r\n devFrame.addRow(self.skipSegLimitButton)\r\n devFrame.addRow(self.fiducialObturatorButton)\r\n devFrame.addRow(self.renderObturatorNeedlesButton)\r\n devFrame.addRow(self.displayContourButton)\r\n devFrame.addRow(self.hideContourButton)\r\n devFrame.addRow(self.filterButton)\r\n devFrame.addRow(self.parSearchButton)\r\n devFrame.addRow(self.setAsValNeedlesButton)\r\n devFrame.addRow(self.templateRegistrationButton)\r\n\r\n #put frames on the tab########################################\r\n self.layout.addRow(self.__segmentationFrame)\r\n #self.layout.addRow(self.__reportFrame)\r\n # self.layout.addRow(self.__reportFrameCTL)\r\n self.layout.addRow(self.__validationFrame)\r\n self.layout.addRow(self.__parameterFrame)\r\n self.layout.addRow(self.__devFrame)\r\n\r\n # reset module\r\n resetButton = qt.QPushButton('Reset Module')\r\n resetButton.connect('clicked()', self.onReload)\r\n self.widget = slicer.qMRMLWidget()\r\n self.widget.setLayout(self.layout)\r\n self.layout2.addWidget(self.widget)\r\n\r\n # init table report\r\n self.initTableView() # init the report table\r\n self.initTableViewControlPoints() # init the report table\r\n\r\n # Lauren's feature request: set mainly unused coronal view to sagittal to display ground truth bitmap image (if available)\r\n # Usage after fresh slicer start: 1. Load scene and 2. reference jpg. 3. Then open NeedleFinder from Modules selector\r\n vnJPG = slicer.util.getNode(\"Case *\") # the naming convention for the ground truth JPG files: \"Case XXX.jpg\"\r\n if vnJPG:\r\n print \"showing ground 2d image truth in green view\"\r\n # show JPG image if available\r\n sw = slicer.app.layoutManager().sliceWidget(\"Green\")\r\n cn = sw.mrmlSliceCompositeNode()\r\n cn.SetBackgroundVolumeID(vnJPG.GetID())\r\n slicer.app.layoutManager().sliceWidget(\"Green\").sliceLogic().GetBackgroundLayer().Modified()\r\n sGreen = slicer.mrmlScene.GetNodeByID(\"vtkMRMLSliceNodeGreen\")\r\n if sGreen == None :\r\n sGreen = slicer.mrmlScene.GetNodeByID(\"vtkMRMLSliceNode2\")\r\n # set to axial view\r\n sGreen.SetSliceVisible(0)\r\n sGreen.SetOrientationToAxial()\r\n sw.fitSliceToBackground()\r\n sGreen.Modified()\r\n\r\n self.onResetParameters()\r\n self.setupShortcuts()", "def _placeDock(self, dock, pos=None, otherDock=None):\n if otherDock is not None and pos is not None:\n self.area.addDock(dock,pos,otherDock)\n elif pos is not None:\n self.area.addDock(dock,pos,otherDock)\n else:\n self.area.addDock(dock)\n return dock", "def onDockClosed(self): # used when Dock dialog is closed\n self.profile_dock = None", "def pcd(dw, qpts=50):\n w = w0+dw\n pcm.set_qpts(qpts)\n sml = pcm.sml_w(w)\n avgchi = pcm.avgchi\n pcm.set_qpts(0)\n sml2 = pcm.sml_w(w)\n print sml, log(sml) - pcm.offset, avgchi\n print sml2, log(sml2) - pcm.offset, pcm.avgchi", "def plot_prec_value1(self):\n# self.query_dict={'code':code.value,'exchange':exchange.value,\\\n# 'structure':struct.value,'element':element.value,'properties':prop.value}\n# print ('POSTING', self.query_dict)\n# self.query_api(endpoint='evk')\n\n #layout_doc.children[4].children[0] = self.plot_pade_figure()\n\n\n self.query_dict={'code':code.value,'exchange':exchange.value,\\\n 'structure':struct.value,'element':element.value,'properties':prop.value}\n print ('POSTING', self.query_dict)\n self.query_api(endpoint='evk')\n\n layout_doc.children[4].children[0] = self.plot_pade_figure()", "def GetDockingImage(direction, useAero, center):\r\n\r\n suffix = (center and [\"\"] or [\"_single\"])[0]\r\n prefix = \"\"\r\n if useAero == 2:\r\n # Whidbey docking guides\r\n prefix = \"whidbey_\"\r\n elif useAero == 1:\r\n # Aero docking style\r\n prefix = \"aero_\"\r\n \r\n if direction == wx.TOP:\r\n bmp_unfocus = eval(\"%sup%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sup_focus%s\"%(prefix, suffix)).GetBitmap()\r\n elif direction == wx.BOTTOM:\r\n bmp_unfocus = eval(\"%sdown%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sdown_focus%s\"%(prefix, suffix)).GetBitmap()\r\n elif direction == wx.LEFT:\r\n bmp_unfocus = eval(\"%sleft%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sleft_focus%s\"%(prefix, suffix)).GetBitmap()\r\n elif direction == wx.RIGHT:\r\n bmp_unfocus = eval(\"%sright%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%sright_focus%s\"%(prefix, suffix)).GetBitmap()\r\n else:\r\n bmp_unfocus = eval(\"%stab%s\"%(prefix, suffix)).GetBitmap()\r\n bmp_focus = eval(\"%stab_focus%s\"%(prefix, suffix)).GetBitmap()\r\n\r\n return bmp_unfocus, bmp_focus", "def setDetails(self):\n pwd = utils.get_cwd()\n production_idx = pwd.find('production')+13\n self.prodDir = pwd[0:production_idx]\n self.nd280Version = os.path.basename(os.getenv('ND280ROOT'))\n self.production = self.prodDir.split('production')[-1].strip('0')\n self.respin = utils.split_path(pwd[production_idx+1:])[0]\n self.nuMCType = utils.split_path(pwd[production_idx+1:])[2]\n self.fillFromCard('runInfo.card')\n\n self.usingNUCP = False\n if 'beam/' in pwd:\n self.beam = 'beam'\n if 'nue/' in pwd:\n self.beam = 'nue'\n self.nuType = 'nue'\n if 'run1/' in pwd:\n self.beam = 'run1'\n self.ecalPeriods = '1-2'\n if 'run2/' in pwd:\n self.beam = 'run2'\n self.tpcPeriods = 'runs2-3'\n self.ecalPeriods = '1-2'\n if 'run3/' in pwd:\n self.beam = 'run3'\n self.tpcPeriods = 'runs2-3'\n self.ecalPeriods = '3-4'\n if 'run4/' in pwd:\n self.beam = 'run4'\n self.tpcPeriods = 'runs2-3-4'\n self.ecalPeriods = '3-4'\n if 'run5/' in pwd:\n self.beam = 'run5'\n self.tpcPeriods = 'runs2-3-4'\n self.ecalPeriods = '3-4'\n if 'ccpiplus/' in pwd:\n self.beam = 'ccpiplus'\n self.nMesons = 0\n self.nLeptons = 1\n self.nMuMinus = 1\n self.nPiZero = 0\n self.nPiPlus = 1\n self.usingNUCP = True\n if 'ccpizero/' in pwd:\n self.beam = 'ccpizero'\n self.nMesons = 0\n self.nLeptons = 1\n self.nMuMinus = 1\n self.nPiZero = 1\n self.nPiPlus = 0\n self.usingNUCP = True\n if 'ncpiplus/' in pwd:\n self.beam = 'ncpiplus'\n self.nMesons = 0\n self.nLeptons = 0\n self.nMuMinus = 0\n self.nPiZero = 0\n self.nPiPlus = 1\n self.usingNUCP = True\n if 'ncpizero/' in pwd:\n self.beam = 'ncpizero'\n self.nMesons = 0\n self.nLeptons = 0\n self.nMuMinus = 0\n self.nPiZero = 1\n self.nPiPlus = 0\n self.usingNUCP = True\n if 'tpcgas/' in pwd:\n self.beam = 'tpcgas'\n if 'verify/' in pwd:\n self.verify = True\n if self.nuMCType == 'anti-genie':\n self.runprefix -= 10000000\n if 'genie' in pwd:\n self.mc = 'Genie'\n self.runprefix += 1000000\n self.respin = pwd[pwd.find(self.prodDir)+len(self.prodDir)+1:][0]\n if self.respin not in string.uppercase:\n print 'Respin', self.respin, 'doesn\\'t appear to be an UPPER CASE LETTER'\n if '2010-11' in pwd:\n self.baseline = '2010-11'\n\n if 'magnet/' in pwd:\n self.runN = int(pwd[pwd.find('/run')+4])\n self.runprefix += (self.runN-1)*100000\n\n if 'water' in pwd:\n self.fill = 'water'\n self.p0dwater = 1\n self.runprefix += 10000\n if 'basket/' in pwd:\n self.fluxVolume = 'basket'\n self.fluxMasterVolume = 'Basket'\n self.fluxName = 'basket'\n self.runN = 2\n self.runprefix += 101000\n if 'nue/' in pwd:\n self.fluxName = 'Nue'\n self.runprefix += 1000\n elif 'ncpizero/' in pwd:\n self.fluxName = 'NC1pi0'\n self.runprefix += 2000\n elif 'ccpizero/' in pwd:\n self.fluxName = 'CC1pi0'\n self.runprefix += 3000\n elif 'ncpiplus/' in pwd:\n self.fluxName = 'NC1pi+'\n self.runprefix += 4000\n elif 'ccpiplus/' in pwd:\n self.fluxName = 'CC1pi+'\n self.runprefix += 5000\n elif 'ncpizerofgd/' in pwd:\n self.fluxName = 'NCpi0FGD'\n self.fluxMasterVolume = 'FGD1'\n self.runprefix += 6000\n elif 'ccpicoh/' in pwd:\n self.fluxName = 'CCpicoh'\n self.fluxMasterVolume = 'FGD1'\n self.runprefix += 7000\n elif 'tpcgas/' in pwd:\n self.fluxName = 'TPCGas'\n # set this to mask ND280 geometry\n # the self.standalone option can be set to a single ND280 detector\n # and overrides the baseline setting. However, turns out that\n # setting master_volume to Half produces events only on argon so\n # we are using that instead.\n # self.standalone = 'TPC'\n self.fluxMasterVolume = 'Half'\n self.forceVolume = 'true'\n self.runprefix += 6000\n\n self.setBasePath()\n self.setNumcDir()\n self.setPassThruDir()\n self.setFluxDir()\n self.setFluxInfo()", "def fillDetInfo():\n print('here i am')\n # 1. maps of analysis channel to cpd, and pulser monitor channels\n detCH, pMons = {}, {}\n for ds in [0,1,2,3,4,5,6]:\n f = np.load(\"%s/data/ds%d_detChans.npz\" % (os.environ['LATDIR'], ds))\n detCH[ds] = f['arr_0'].item()\n pMons[ds] = f['arr_1'].item()\n\n # 2. maps of HV and TRAP threshold settings are stored in the DB.\n # make them global, and move them to the runSettings file.\n # FORMAT: {ds : {'det' : [(run1,val1),(run2,val2)...]} }\n detHV, detTH = {}, {}\n\n # load all possible values, as in settingsMgr\n detDB = db.TinyDB(\"%s/calDB-v2.json\" % dsi.latSWDir)\n detPars = db.Query()\n cal = dsi.CalInfo()\n for ds in [0,1,2,3,4,5,6]:\n # for ds in [0]:\n print(\"scanning ds\",ds)\n detTH[ds] = {}\n detHV[ds] = {}\n for key in cal.GetKeys(ds):\n mod = -1\n if \"m1\" in key: mod = 1\n if \"m2\" in key: mod = 2\n for cIdx in range(cal.GetIdxs(key)):\n\n # load the DB records\n dbKeyTH = \"trapThr_%s_c%d\" % (key, cIdx)\n dbValTH = dsi.getDBRecord(dbKeyTH,calDB=detDB,pars=detPars)\n\n dbKeyHV = \"hvBias_%s_c%d\" % (key, cIdx)\n dbValHV = dsi.getDBRecord(dbKeyHV,calDB=detDB,pars=detPars)\n\n # debug: print the record\n # for val in sorted(dbValTH):\n # if len(dbValTH[val])>0:\n # print(val, dbValTH[val])\n # return\n\n # fill the first value\n if len(detTH[ds])==0:\n detTH[ds] = dbValTH\n detHV[ds] = dbValHV\n continue\n\n # check for new threshold values.\n for cpd in detTH[ds]:\n nOld, nNew = len(detTH[ds][cpd]), len(dbValTH[cpd])\n\n # detector just came online\n if nOld==0 and nNew>0:\n detTH[ds][cpd] = dbValTH[cpd]\n continue\n # detector still offline\n if nOld==0 and nNew==0:\n continue\n # detector just went offline\n if nOld>0 and nNew==0:\n continue\n\n # check last run/trap pair against each new one\n prevRun, prevTH = detTH[ds][cpd][-1][0], detTH[ds][cpd][-1][1]\n for val in dbValTH[cpd]:\n thisRun, thisTH = val[0], val[1]\n if thisTH != prevTH:\n detTH[ds][cpd].append([thisRun,thisTH])\n prevTH = thisTH\n\n # check for new HV values.\n for cpd in detHV[ds]:\n\n nOld, nNew = len(detHV[ds][cpd]), len(dbValHV[cpd])\n\n # detector just came online\n if nOld==0 and nNew>0:\n detHV[ds][cpd] = dbValHV[cpd]\n continue\n # detector still offline\n if nOld==0 and nNew==0:\n continue\n # detector just went offline\n if nOld>0 and nNew==0:\n continue\n\n # check last run/trap pair against each new one\n prevRun, prevHV = detHV[ds][cpd][-1][0], detHV[ds][cpd][-1][1]\n for val in dbValHV[cpd]:\n thisRun, thisHV = val[0], val[1]\n if thisHV != prevHV:\n print(\"found HV diff. cpd %d prev %dV (run %d) new %dV (run %d)\" % (cpd, prevHV, prevRun, thisHV, thisRun))\n detHV[ds][cpd].append([thisRun,thisHV])\n prevHV = thisHV\n\n # return\n\n # # load the old file and compare\n # # GOAL: improve on this file.\n # # f = np.load(\"%s/data/runSettings.npz\" % dsi.latSWDir)\n # # detHVOld = f['arr_0'].item()\n # # detTHOld = f['arr_1'].item()\n # # detCHOld = f['arr_2'].item()\n # # pMonsOld = f['arr_3'].item()\n #\n # ds = 3\n # print(\"old results, ds\",ds)\n # for cpd in sorted(detTHOld[ds]):\n # if cpd!=\"122\":continue\n # if len(detTHOld[ds][cpd]) > 0:\n # print(cpd, detTHOld[ds][cpd])\n #\n # # for ds in [0,1,2,3,4,5,6]:\n # print(\"thresh results, ds:\",ds)\n # for cpd in sorted(detTH[ds]):\n # # if cpd!=122:continue\n # if len(detTH[ds][cpd]) > 0:\n # print(cpd, detTH[ds][cpd])\n\n\n np.savez(\"%s/data/runSettings-v2.npz\" % dsi.latSWDir,detHV,detTH,detCH,pMons)", "def port2_docking_date(self):\n return self._port2_docking_date", "def main(folder, quiet=0):\n\n if quiet:\n output_stream = StringIO()\n else:\n output_stream = sys.stdout\n\n\n\n color1 = \"I4\" #filter system for first color of CMD\n color2 = \"M1\" #filter system for second color of CMD\n zeromagc1 = zero.zero_mag[color1]\n zeromagc2 = zero.zero_mag[color2]\n min_mag = 8. #minimal observation limit\n max_mag = 0. #maximal observation limit\n\n#getting file list\n files = sorted(os.listdir('%s/%s' % (os.getcwdu(), folder))) \n out = []\n\n for fil in files:\n#only using files created by the automated simulation\n if fil.startswith('sim_') and not 'settings' in fil.encode(\"ascii\"):\n print(\"%s/%s\" % (folder,fil.encode(\"ascii\")), file=output_stream)\n \n\n # Read in\n hdulist = fits.open('%s/%s' %(folder,fil))\n data = hdulist[1].data\n\n #calculating magnitudes from fluxes and converting to CMD-data\n x = -2.5*(np.log10(data['c%s' % color1]/zeromagc1) - np.log10(data['c%s' % color2]/zeromagc2))\n y = -2.5*(np.log10(data['c%s' % color2]/zeromagc2))\n\n \n sel = np.logical_and( (y > -10./3. * (x-1.) + 10.), np.logical_and(max_mag < y, y < min_mag))\n sel = np.logical_and(sel, y < -x + 12.)\n n = sum(sel)\n t = Table(hdulist[1].data)\n if 'sel' in t.columns:\n t.remove_column('sel')\n t.add_column(Column(name='sel', data=sel.astype('int')))\n \n hdulist[1].data = np.array(t)\n tmp, av, apera, age = fil.split('_')\n fits.update('%s/%s' %(folder,fil), np.array(t), ext = 1, clobber=True)\n out.append([av, apera, age, n])\n\n #writing obtained data to \"folder/__expected_number\"\n head = ['#', 'AV', 'Aperature_size', 'Age', 'Expected_number']\n f = open('%s/__expected_number' % folder, 'w')\n f.write(','.join(head)+'\\n' )\n np.savetxt(f, np.asarray(out).astype(int))\n f.close()\n \n print (\"Analysed %s files and saved output to %s\" % (len(out),'%s/__expected_number' % folder), file=output_stream)", "def __init__(self, parent):\n\n assert isinstance(parent, FindingChartDialog)\n\n self.parent = parent\n self.db = self.parent.db\n\n builder = self.parent.builder\n builder.add_from_file(glade.CHART_PREFERENCES_DIALOG)\n\n self.dialog = builder.get_object('chart-preferences-dialog')\n self.dialog.set_transient_for(self.parent.dialog)\n self.dialog.set_title(\"Finding Chart: Preferences\")\n self.dialog.set_resizable(False)\n\n # Note: gtk.RESPONSE_SAVE doesn't exist; we use gtk.RESPONSE_OK\n self.close_button = self.dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)\n self.apply_button = self.dialog.add_button(gtk.STOCK_APPLY, gtk.RESPONSE_APPLY)\n self.save_button = self.dialog.add_button(gtk.STOCK_SAVE, gtk.RESPONSE_OK)\n self.dialog.set_default_response(gtk.RESPONSE_CLOSE)\n self.dialog.set_focus(self.close_button)\n\n text = \"Update chart with these parameters\"\n self.apply_button.set_tooltip_text(text)\n\n text = \"Store these parameters in the LEMONdB\"\n self.save_button.set_tooltip_text(text)\n\n # Spin buttons to select the value of Vmin / Vmax\n self.vmin_button = builder.get_object('vmin-spinbutton')\n self.vmax_button = builder.get_object('vmax-spinbutton')\n\n # If the values of both Vmin and Vmax are stored in the LEMONdB, assume\n # a logarithmic scale. Otherwise, use the normalization algorithm and\n # Vmin / Vmax values defined by the APLpyNormalize object of the\n # finding chart (parent.aplpy_plot.image.norm). Note that, by default,\n # FITSFigure.show_grayscale() uses a linear stretch.\n\n try:\n self.stretch = 'log'\n vmin = self.db.vmin\n vmax = self.db.vmax\n\n msg1 = \"Normalization parameters (vmin and vmax) read from LEMONdB\"\n msg2 = \"Assuming logarithmic normalization (stretch = 'log')\"\n for message in msg1, msg2:\n logging.debug(message)\n\n except AttributeError:\n normalize = self.parent.aplpy_plot.image.norm\n self.stretch = normalize.stretch\n vmin = normalize.vmin\n vmax = normalize.vmax\n\n msg1 = \"Normalization parameters not stored in the LEMONdB\"\n msg2 = \"Algorithm and values read from APLpyNormalize object\"\n for message in msg1, msg2:\n logging.debug(message)\n\n # Because of the linear normalization formula, which APLpyNormalize\n # uses by default, it may set a value of 'vmin' smaller than that\n # of the minimum pixel level of the finding chart (read from the\n # 'data_min' attribute of the parent FindingChartDialog object):\n #\n # vmin = -0.1 * (vmax - vmin) + vmin\n #\n # Therefore, we need to make sure to use the lowest of the two values:\n # 'vmin' and 'data_min', since (although strange at first) the former\n # may be smaller than the latter. Analogously, the value of 'vmax'\n # returned by APLpyNormalize can be greater than the maximum pixel\n # level, so we must take that into account.\n\n data_min = numpy.ceil (min(self.parent.data_min, vmin))\n data_max = numpy.floor(max(self.parent.data_max, vmax))\n assert hasattr(self, 'stretch')\n\n kwargs = dict(lower = data_min, upper = data_max, step_incr = 1)\n vmin_adjust = gtk.Adjustment(value = vmin, **kwargs)\n vmax_adjust = gtk.Adjustment(value = vmax, **kwargs)\n self.vmin_button.set_adjustment(vmin_adjust)\n self.vmax_button.set_adjustment(vmax_adjust)\n\n def ndigits(n):\n \"\"\" Return the number of digits of an integer \"\"\"\n return len(str(abs(n)))\n\n # The desired width of the button, in characters\n self.vmin_button.set_width_chars(ndigits(data_min))\n self.vmax_button.set_width_chars(ndigits(data_max))\n\n # Show the absolute minimum and maximum allowed values\n data_min_entry = builder.get_object('data-min-entry')\n data_min_entry.set_width_chars(ndigits(data_min))\n data_min_entry.set_text(str(data_min))\n data_min_entry.set_sensitive(False)\n\n data_max_entry = builder.get_object('data-max-entry')\n data_max_entry.set_width_chars(ndigits(data_max))\n data_max_entry.set_text(str(data_max))\n data_max_entry.set_sensitive(False)\n\n # Both spin buttons must be in the range [data_min, data_max], but\n # there is a second restriction: Vmin must be at all times <= Vmax.\n # Use the 'value-changed' signal, emitted when any of the settings\n # (i.e. value, digits) that change the display of the spinbutton are\n # changed, to enforce this. Every time that Vmin is changed we make\n # sure that it is <= Vmax; otherwise we set it to Vmax. The same is\n # done with Vmax, ensuring that it is always >= Vmin.\n\n def vmin_changed_callback(*args):\n upper = self.vmax_button.get_value()\n if self.vmin_button.get_value() > upper:\n self.vmin_button.set_value(upper)\n\n def vmax_changed_callback(*args):\n lower = self.vmin_button.get_value()\n if self.vmax_button.get_value() < lower:\n self.vmax_button.set_value(lower)\n\n self.vmin_button.connect('value-changed', vmin_changed_callback)\n self.vmax_button.connect('value-changed', vmax_changed_callback)\n self.dialog.connect('response', self.handle_response)", "def _dsurface_domega(self):\n\n dsdo = 0.\n\n return dsdo", "def dockprep(self, force_rerun=False):\n log.debug('{}: running dock preparation...'.format(self.id))\n\n prep_mol2 = op.join(self.dock_dir, '{}_prep.mol2'.format(self.id))\n prep_py = op.join(self.dock_dir, \"prep.py\")\n\n if ssbio.utils.force_rerun(flag=force_rerun, outfile=prep_mol2):\n with open(prep_py, \"w\") as f:\n f.write('import chimera\\n')\n f.write('from DockPrep import prep\\n')\n f.write('models = chimera.openModels.list(modelTypes=[chimera.Molecule])\\n')\n f.write('prep(models)\\n')\n f.write('from WriteMol2 import writeMol2\\n')\n f.write('writeMol2(models, \"{}\")\\n'.format(prep_mol2))\n\n cmd = 'chimera --nogui {} {}'.format(self.structure_path, prep_py)\n os.system(cmd)\n os.remove(prep_py)\n os.remove('{}c'.format(prep_py))\n\n if ssbio.utils.is_non_zero_file(prep_mol2):\n self.dockprep_path = prep_mol2\n log.debug('{}: successful dockprep execution'.format(self.dockprep_path))\n else:\n log.critical('{}: dockprep failed to run on PDB file'.format(self.structure_path))", "def ValidateNotebookDocking(self, valid):\r\n \r\n return 0", "def __init__(self, parent=None, pltw=None, cpos=None, stguess=None):\n super(pkFitDlg, self).__init__(parent)\n\n self.parent = parent\n self.title = 'Peak Fitting tool'\n self.pltw = pltw\n self.cpos = cpos\n self.maxparm = 5\n self.first = True\n self.npeaks = 0\n self.blkno = self.pltw.curvelist[cpos].yvinfo.blkpos\n self.xpos = self.pltw.curvelist[cpos].xvinfo.vidx\n self.ypos = self.pltw.curvelist[cpos].yvinfo.vidx\n self.data = np.vstack((self.pltw.blklst[self.blkno][self.xpos],\n self.pltw.blklst[self.blkno][self.ypos],\n self.pltw.blklst[self.blkno][self.ypos],\n np.zeros(len(self.pltw.blklst[self.blkno][self.xpos]))))\n (self.nvect, self.npt) = self.data.shape\n self.diffshift = abs(self.data[1].min() - getSpan(self.data[1]) * 0.15)\n\n if stguess is None:\n pkdlg = getPkDlg(parent, self)\n pkdlg.setModal(True)\n ret = pkdlg.exec()\n if ret:\n stguess = pkdlg.stguess\n\n if stguess is None:\n self.stguess = None\n self.close()\n return\n else:\n self.stguess = stguess.replace('\\n', ',')\n self.guessToParms(self.stguess)\n\n # Create the layout\n self.createLayout()\n # Connect buttons to callback functions\n self.exeBtn.clicked.connect(self.compute)\n self.okBtn.clicked.connect(self.validate)\n self.cancelBtn.clicked.connect(self.reject)\n self.setWindowTitle(self.title)\n\n self.updateParmsEdit()\n self.compute()\n QTimer.singleShot(5000, self.istest)", "def form_dictionary_by_pd(global_obj) -> dict:\n if isinstance(global_obj, Pd):\n global_obj.form_object()\n\n ddict = {}\n chi2, diffrn_radiation = None, None\n exclude, pd_background = None, None\n pd_instr_reflex_asymmetry, pd_instr_resolution = None, None,\n pd_meas, pd_peak = None, None\n pd_proc, phase = None, None\n range_, refine_ls = None, None\n refln, refln_susceptibility = None, None\n setup = None\n\n l_obj = take_items_by_class(global_obj, (Setup, ))\n if len(l_obj) > 0:\n setup = l_obj[0]\n\n l_obj = take_items_by_class(global_obj, (DiffrnRadiation, ))\n if len(l_obj) > 0:\n diffrn_radiation = l_obj[0]\n\n l_obj = take_items_by_class(global_obj, (ReflnL, ))\n if len(l_obj) > 0:\n refln = l_obj\n\n l_obj = take_items_by_class(global_obj, (ReflnSusceptibilityL, ))\n if len(l_obj) > 0:\n refln_susceptibility = l_obj\n\n\n ddict[\"name\"] = global_obj.get_name()\n if setup is not None:\n ddict[\"magnetic_field\"] = numpy.atleast_1d(setup.field)\n ddict[\"offset_ttheta\"] = numpy.atleast_1d(setup.offset_ttheta)\n ddict[\"wavelength\"] = numpy.atleast_1d(setup.wavelength)\n\n if diffrn_radiation is not None:\n ddict[\"beam_polarization\"] = numpy.atleast_1d(diffrn_radiation.polarization)\n ddict[\"flipper_efficiency\"] = numpy.atleast_1d(diffrn_radiation.efficiency)\n\n if refln is not None:\n for refln_phase in refln:\n phase_name = refln_phase.loop_name\n if (refln_phase.is_attribute(\"index_h\") and refln_phase.is_attribute(\"index_k\") and refln_phase.is_attribute(\"index_l\")):\n index_hkl = numpy.array([refln_phase.index_h, refln_phase.index_k, refln_phase.index_l], dtype=int)\n ddict[f\"index_hkl_{phase_name:}\"] = index_hkl\n if refln_phase.is_attribute(\"f_calc\"):\n f_calc = numpy.array(refln_phase.f_calc, dtype=complex)\n ddict[f\"f_nucl_{phase_name:}\"] = f_calc\n if (refln_phase.is_attribute(\"a_calc\") and refln_phase.is_attribute(\"b_calc\")):\n a_calc = numpy.array(refln_phase.a_calc, dtype=complex)\n b_calc = numpy.array(refln_phase.b_calc, dtype=complex)\n ddict[f\"f_nucl_{phase_name:}\"] = a_calc + 1j*b_calc\n\n if refln_susceptibility is not None:\n for refln_phase in refln_susceptibility:\n phase_name = refln_phase.loop_name\n if (refln_phase.is_attribute(\"index_h\") and refln_phase.is_attribute(\"index_k\") and refln_phase.is_attribute(\"index_l\")):\n index_hkl = numpy.array([refln_phase.index_h, refln_phase.index_k, refln_phase.index_l], dtype=int)\n ddict[f\"index_hkl_{phase_name:}\"] = index_hkl\n if refln_phase.is_attribute(\"chi_11_calc\"):\n chi_11 = numpy.array(refln_phase.chi_11_calc, dtype=complex)\n chi_12 = numpy.array(refln_phase.chi_12_calc, dtype=complex)\n chi_13 = numpy.array(refln_phase.chi_13_calc, dtype=complex)\n chi_21 = numpy.array(refln_phase.chi_21_calc, dtype=complex)\n chi_22 = numpy.array(refln_phase.chi_22_calc, dtype=complex)\n chi_23 = numpy.array(refln_phase.chi_23_calc, dtype=complex)\n chi_31 = numpy.array(refln_phase.chi_31_calc, dtype=complex)\n chi_32 = numpy.array(refln_phase.chi_32_calc, dtype=complex)\n chi_33 = numpy.array(refln_phase.chi_33_calc, dtype=complex)\n\n ddict[f\"sft_ccs_{phase_name:}\"] = numpy.stack([\n chi_11, chi_12, chi_13, chi_21, chi_22, chi_23, chi_31, chi_32, chi_33], axis=0)\n\n return ddict", "def __init__(self, parent=None):\n QtGui.QWidget.__init__(self, parent)\n \n self.setWindowTitle('The Visual Climate Data Analysis Tools - (VCDAT)')\n layout = QtGui.QVBoxLayout()\n self.setLayout(layout)\n\n # Init Menu Widget\n self.menuWidget = QMenuWidget(self)\n\n # Init File Widget\n vsplitter = QtGui.QSplitter(QtCore.Qt.Vertical) \n fileWidget = QLabeledWidgetContainer(QCDATFileWidget(),\n 'FILE VARIABLES')\n vsplitter.addWidget(fileWidget)\n\n # Init Defined Variables Widget\n definedVar = QLabeledWidgetContainer(QDefinedVariable(),\n 'DEFINED VARIABLES')\n vsplitter.addWidget(definedVar)\n hsplitter = QtGui.QSplitter(QtCore.Qt.Horizontal)\n hsplitter.addWidget(vsplitter)\n\n # Init Var Plotting Widget\n varView = QLabeledWidgetContainer(QVariableView(),\n 'PLOTTING')\n hsplitter.addWidget(varView)\n hsplitter.setStretchFactor(1, 1)\n layout.addWidget(hsplitter)\n\n # Init guiController\n guiController = GuiController(fileWidget.getWidget(),\n definedVar.getWidget(),\n varView.getWidget())\n guiController.initTeachingCommands()\n self.guiController = guiController # So guicontroller doesn't get garbage collected\n\n # Connect signals between self & GuiController\n self.connect(self, QtCore.SIGNAL('setRecordCommands'),\n guiController.setRecordCommands)\n self.connect(self, QtCore.SIGNAL('viewTeachingCommands'),\n guiController.viewTeachingCommands)\n self.connect(self, QtCore.SIGNAL('closeTeachingCommands'),\n guiController.closeTeachingCommands) \n\n # Connect Signals between QVariableView & QDefinedVariable\n varView.connect(definedVar.getWidget(), QtCore.SIGNAL('selectDefinedVariableEvent'),\n varView.getWidget().selectDefinedVariableEvent)\n varView.connect(definedVar.getWidget(), QtCore.SIGNAL('setupDefinedVariableAxes'),\n varView.getWidget().setupDefinedVariableAxes)\n definedVar.connect(varView.getWidget(), QtCore.SIGNAL('plotPressed'),\n definedVar.getWidget().defineQuickplot)\n definedVar.connect(varView.getWidget(), QtCore.SIGNAL('defineVariable'),\n definedVar.getWidget().defineVariable)\n\n # Connect Signals between QFileWidget & QVariableView\n varView.connect(fileWidget.getWidget(), QtCore.SIGNAL('variableChanged'),\n varView.getWidget().setupDefinedVariableAxes)\n varView.connect(fileWidget.getWidget(), QtCore.SIGNAL('defineVariableEvent'),\n varView.getWidget().defineVariableEvent)", "def print_design(x, D):\n\n N = round(x[0])\n ds = x[1]\n ws = x[2]\n wc = x[3]\n lc = x[4]\n g = x[5]\n\n # compute mass\n M = 2.0*(2.0*wc+ws+ds)*lc*wc*D.rowmc + \\\n (2*lc+2*wc+np.pi*ds)*ds*ws*D.kpf*D.rowwc\n # compute loss at rated current\n Prt = (2*lc+2*wc+np.pi*ds)*(N*D.irt) ** 2/(ds*ws*D.kpf*D.sigmawc)\n # compute inductance\n L = D.mu0*lc*wc*N ** 2/(2*g)\n # compute the flux density\n Brt = D.mu0*N*D.irt/(2*g)\n # current density\n Jrt = N*D.irt/(ws*ds*D.kpf)\n print('Design Data')\n print(f'Turns = {N}')\n print(f'Slot depth (m) = {ds}')\n print(f'Slot width (m) = {ws}')\n print(f'Core width (m) = {wc}')\n print(f'Core length (m) = {lc}')\n print(f'Air gap (m) = {g}')\n print(' ')\n print('Design Metrics')\n print(f'Mass (kg) = {M}')\n print(f'Loss at rated current (W) = {Prt}')\n print(' ')\n print('Constrained Quantities')\n print(f'Inductance (H) = {L}')\n print(f'Flux Density at Rated Current (T) = {Brt}')\n print(f'Current Density Rated Current (A/m**2) = {Jrt}')", "def GetToolBarDockOffsets(docks):\r\n\r\n top_left = wx.Size(0, 0)\r\n bottom_right = wx.Size(0, 0)\r\n\r\n for dock in docks:\r\n if dock.toolbar:\r\n dock_direction = dock.dock_direction\r\n if dock_direction == AUI_DOCK_LEFT:\r\n top_left.x += dock.rect.width\r\n bottom_right.x += dock.rect.width\r\n\r\n elif dock_direction == AUI_DOCK_TOP:\r\n top_left.y += dock.rect.height\r\n bottom_right.y += dock.rect.height\r\n\r\n elif dock_direction == AUI_DOCK_RIGHT:\r\n bottom_right.x += dock.rect.width\r\n \r\n elif dock_direction == AUI_DOCK_BOTTOM:\r\n bottom_right.y += dock.rect.height\r\n\r\n return top_left, bottom_right", "def SmartShrink(self, docks, direction):\r\n\r\n sashSize = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n clientSize = self._frame.GetClientSize()\r\n ourDocks = FindDocks(docks, direction, -1, -1)\r\n oppositeDocks = FindOppositeDocks(docks, direction)\r\n oppositeSize = self.GetOppositeDockTotalSize(docks, direction)\r\n ourSize = 0\r\n\r\n for dock in ourDocks:\r\n ourSize += dock.size\r\n\r\n if not dock.toolbar:\r\n ourSize += sashSize\r\n \r\n shrinkSize = ourSize + oppositeSize\r\n\r\n if direction == AUI_DOCK_TOP or direction == AUI_DOCK_BOTTOM:\r\n shrinkSize -= clientSize.y\r\n else:\r\n shrinkSize -= clientSize.x\r\n\r\n if shrinkSize <= 0:\r\n return docks\r\n\r\n # Combine arrays\r\n for dock in oppositeDocks:\r\n ourDocks.append(dock)\r\n \r\n oppositeDocks = []\r\n\r\n for dock in ourDocks:\r\n if dock.toolbar or not dock.resizable:\r\n continue\r\n\r\n dockRange = dock.size - dock.min_size\r\n\r\n if dock.min_size == 0:\r\n dockRange -= sashSize\r\n if direction == AUI_DOCK_TOP or direction == AUI_DOCK_BOTTOM:\r\n dockRange -= caption_size\r\n \r\n if dockRange >= shrinkSize:\r\n \r\n dock.size -= shrinkSize\r\n return docks\r\n \r\n else:\r\n \r\n dock.size -= dockRange\r\n shrinkSize -= dockRange\r\n \r\n return docks", "def DockFixed(self, b=True):\r\n\r\n return self.SetFlag(self.optionDockFixed, b)", "def do_dock6_flexible(self, ligand_path, force_rerun=False):\n log.debug('{}: running DOCK6...'.format(self.id))\n\n ligand_name = os.path.basename(ligand_path).split('.')[0]\n in_name = op.join(self.dock_dir, \"{}_{}_flexdock.in\".format(self.id, ligand_name))\n out_name = op.join(self.dock_dir, \"{}_{}_flexdock.out\".format(self.id, ligand_name))\n\n conformers_out = op.join(self.dock_dir, '{}_{}_flexdock_conformers.mol2'.format(self.id, ligand_name))\n scored_out = op.join(self.dock_dir, '{}_{}_flexdock_scored.mol2'.format(self.id, ligand_name))\n ranked_out = op.join(self.dock_dir, '{}_{}_flexdock_ranked.mol2'.format(self.id, ligand_name))\n\n if ssbio.utils.force_rerun(flag=force_rerun, outfile=ranked_out):\n with open(in_name, \"w\") as f:\n dock_text = \"\"\"ligand_atom_file {}\nlimit_max_ligands no\nskip_molecule no\nread_mol_solvation no\ncalculate_rmsd no\nuse_database_filter no\norient_ligand yes\nautomated_matching yes\nreceptor_site_file {}\nmax_orientations 500\ncritical_points no\nchemical_matching no\nuse_ligand_spheres no\nuse_internal_energy yes\ninternal_energy_rep_exp 12\nflexible_ligand yes\nuser_specified_anchor no\nlimit_max_anchors no\nmin_anchor_size 5\npruning_use_clustering yes\npruning_max_orients 100\npruning_clustering_cutoff 100\npruning_conformer_score_cutoff 100\nuse_clash_overlap no\nwrite_growth_tree no\nbump_filter yes\nbump_grid_prefix {}\nscore_molecules yes\ncontact_score_primary no\ncontact_score_secondary no\ngrid_score_primary yes\ngrid_score_secondary no\ngrid_score_rep_rad_scale 1\ngrid_score_vdw_scale 1\ngrid_score_es_scale 1\ngrid_score_grid_prefix {}\nmultigrid_score_secondary no\ndock3.5_score_secondary no\ncontinuous_score_secondary no\ndescriptor_score_secondary no\ngbsa_zou_score_secondary no\ngbsa_hawkins_score_secondary no\nSASA_descriptor_score_secondary no\namber_score_secondary no\nminimize_ligand yes\nminimize_anchor yes\nminimize_flexible_growth yes\nuse_advanced_simplex_parameters no\nsimplex_max_cycles 1\nsimplex_score_converge 0.1\nsimplex_cycle_converge 1.0\nsimplex_trans_step 1.0\nsimplex_rot_step 0.1\nsimplex_tors_step 10.0\nsimplex_anchor_max_iterations 500\nsimplex_grow_max_iterations 500\nsimplex_grow_tors_premin_iterations 0\nsimplex_random_seed 0\nsimplex_restraint_min yes\nsimplex_coefficient_restraint 10.0\natom_model all\nvdw_defn_file {}\nflex_defn_file {}\nflex_drive_file {}\nligand_outfile_prefix {}_{}_flexdock\nwrite_orientations no\nnum_scored_conformers 20\nwrite_conformations yes\ncluster_conformations yes\nrank_ligands yes\n \"\"\".format(ligand_path, op.basename(self.sphsel_path), op.splitext(op.basename(self.grid_path))[0],\n op.splitext(op.basename(self.grid_path))[0], self.amb_file, self.flex1_file, self.flex2_file,\n self.id, ligand_name)\n\n f.write(dock_text)\n\n os.chdir(self.dock_dir)\n cmd = \"dock6 -i {} -o {} -v\".format(in_name, out_name)\n os.system(cmd)\n\n if ssbio.utils.is_non_zero_file(ranked_out):\n self.dock_flexible_outfile = out_name\n self.dock_flexible_conformers_result = conformers_out\n self.dock_flexible_scored_result = scored_out\n log.debug('{}: successful docking!'.format(self.dock_flexible_outfile))\n else:\n log.error('{}+{}: empty DOCK6 ranked file, execution error (or ligand failed to dock)'.format(self.id,\n op.basename(ligand_path)))", "def port2_docking_date(self, port2_docking_date):\n\n self._port2_docking_date = port2_docking_date", "def _get_dc_offset(self):\n # apply this knowledge to reshape the spectroscopic values\n # remember to reshape such that the dimensions are arranged in reverse order (slow to fast)\n spec_vals_nd, success = reshape_to_n_dims(self._sho_spec_vals[self._sho_all_but_forc_inds,\n self._current_sho_spec_slice],\n h5_spec=self._sho_spec_inds[self._sho_all_but_forc_inds,\n self._current_sho_spec_slice])\n # This should result in a N+1 dimensional matrix where the first index contains the actual data\n # the other dimensions are present to easily slice the data\n spec_labels_sorted = np.hstack(('Dim', self.h5_main.spec_dim_labels))\n if self._verbose:\n print('Spectroscopic dimensions sorted by rate of change:')\n print(spec_labels_sorted)\n # slice the N dimensional dataset such that we only get the DC offset for default values of other dims\n fit_dim_pos = np.argwhere(spec_labels_sorted == self._fit_dim_name)[0][0]\n # fit_dim_slice = list()\n # for dim_ind in range(spec_labels_sorted.size):\n # if dim_ind == fit_dim_pos:\n # fit_dim_slice.append(slice(None))\n # else:\n # fit_dim_slice.append(slice(0, 1))\n\n fit_dim_slice = [fit_dim_pos]\n for idim, dim in enumerate(spec_labels_sorted[1:]):\n if dim == self._fit_dim_name:\n fit_dim_slice.append(slice(None))\n fit_dim_slice[0] = idim\n elif dim in ['FORC', 'FORC_repeat', 'FORC_Cycle']:\n continue\n else:\n fit_dim_slice.append(slice(0, 1))\n\n if self._verbose:\n print('slice to extract Vdc:')\n print(fit_dim_slice)\n\n self.fit_dim_vec = np.squeeze(spec_vals_nd[tuple(fit_dim_slice)])\n\n return", "def setup_evolution(self, evo_data):\n self.popup = tk.Tk()\n ws = self.popup.winfo_screenwidth()\n hs = self.popup.winfo_screenheight()\n w = 900\n h = 700\n x = (ws / 2) - (w / 2)\n y = (hs / 3) - (h / 3)\n self.popup.geometry('%dx%d+%d+%d' % (w, h, x, y))\n self.popup.wm_title(self.lang.VM_title)\n label_id = ttk.Label(self.popup, text=self.lang.VM_id + str(evo_data[\"id\"]), font=FONT_TITOL)\n label_id.pack(pady=0)\n label_location = ttk.Label(self.popup, text=self.lang.VM_location + str(evo_data[\"location\"]), font=FONT_TITOL)\n label_location.pack(pady=0)\n width = 0.3\n f = Figure(figsize=(7, 3), dpi=100)\n\n ax = f.add_subplot(121)\n ax.set_ylabel(self.lang.VM_cm)\n ax.set_title(self.lang.VM_perimeter_title)\n #ax.xticks(rotation=90)\n ax.bar(evo_data[\"date\"], evo_data[\"perimeter\"], width)\n\n ax1 = f.add_subplot(122)\n ax1.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax1.set_title(self.lang.VM_perimeter_area)\n ax1.bar(evo_data[\"date\"], evo_data[\"area_total\"], width)\n\n f.autofmt_xdate()\n f.tight_layout(pad=1, w_pad=0, h_pad=0)\n\n canvas = FigureCanvasTkAgg(f, master=self.popup)\n canvas.draw()\n canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1)\n\n f1 = Figure(figsize=(7, 3), dpi=100)\n\n ax2 = f1.add_subplot(131)\n ax2.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax2.set_title(self.lang.VM_granulation)\n ax2.bar(evo_data[\"date\"], evo_data[\"granulation\"], width, color=\"orange\")\n\n ax3 = f1.add_subplot(132)\n ax3.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax3.set_title(self.lang.VM_slough)\n ax3.bar(evo_data[\"date\"], evo_data[\"slough\"], width, color=\"orange\")\n\n ax4 = f1.add_subplot(133)\n ax4.set_ylabel(self.lang.VM_cm + \" * \" + self.lang.VM_cm)\n ax4.set_title(self.lang.VM_necrosis)\n ax4.bar(evo_data[\"date\"], evo_data[\"necrosis\"], width, color=\"orange\")\n\n f1.autofmt_xdate()\n f1.tight_layout(pad=1, w_pad=0, h_pad=0)\n\n canvas1 = FigureCanvasTkAgg(f1, master=self.popup)\n canvas1.draw()\n canvas1.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1)", "def docking_vina(self, ligand_file, docking_pdbqt_file, docking_log_file):\n\n run_line = '%s' % self.docking_program\n run_line += ' --config %s' % self.dock_config_file\n run_line += ' --ligand %s' % ligand_file\n run_line += ' --out %s' % docking_pdbqt_file\n if self.output_save:\n run_line += ' --log %s' % (docking_log_file)\n e = None\n try:\n result = subprocess.check_output(run_line.split(),\n stderr=subprocess.STDOUT,\n timeout=self.timeout_dock,\n universal_newlines=True)\n except Exception as e:\n return [99.999], e\n\n result_lines = result.split('\\n')\n\n check_result = False\n affinity_list = list()\n for result_line in result_lines:\n if result_line.startswith('-----+'):\n check_result = True\n continue\n if not check_result:\n continue\n if result_line.startswith('Writing output'):\n break\n if result_line.startswith('Refine time'):\n break\n lis = result_line.strip().split()\n if not lis[0].isdigit():\n break\n# mode = int(lis[0])\n affinity = float(lis[1])\n affinity_list += [affinity]\n if len(affinity_list) == 0:\n e = 'WARNING: Could not find any conformations.'\n return [99.999], e\n return affinity_list, e", "def port2_docked_time(self, port2_docked_time):\n\n self._port2_docked_time = port2_docked_time", "def printDesignVariables(self):\n print(\"-\" * 85)\n print(\"{:>30}{:>20}{:>20}\".format(\"CSM Design Parameter\", \"Name\", \"Value\"))\n print(\"-\" * 85)\n for dvName in self.DVs:\n DV = self.DVs[dvName]\n print(f\"{DV.csmDesPmtr:>30}{DV.name:>20}{DV.value:>20}\")", "def plot_prec_value2(self):\n self.query_dict={'code':code2.value,'exchange':exchange2.value,\\\n 'structure':struct2.value,'element':element2.value,'properties':prop2.value}\n print ('POSTING', self.query_dict)\n self.query_api(endpoint='evk')\n\n layout_doc.children[4].children[1] = self.plot_pade_figure()", "def __init__(self):\r\n\r\n object.__init__(self)\r\n \r\n self.dock_direction = 0\r\n self.dock_layer = 0\r\n self.dock_row = 0\r\n self.size = 0\r\n self.min_size = 0\r\n self.resizable = True\r\n self.fixed = False\r\n self.toolbar = False\r\n self.rect = wx.Rect()\r\n self.panes = []", "def build_period_rdi_chart(nuts_totals_df, start_date=None,\n end_date=None, charts_label=None,\n elem_fig_id=None,\n vits_fig_id=None,\n macros_fig_id=None):\n # calc num days\n if start_date is not None and end_date is not None:\n delta = end_date - start_date\n num_days = float(delta.days)\n print(f'num days: {num_days}')\n else:\n num_days = 1.\n\n usr_life_stg = ''\n usr_type = ''\n usr_age = ''\n usr_active_lvl = \"\"\n if current_user.is_authenticated:\n usr_life_stg = current_user.lifestage_grp\n usr_type = current_user.person_type\n usr_age = current_user.age\n usr_active_lvl = current_user.active_level\n\n # df of nuts by category with field values as %\n elems_df = pd.DataFrame(columns=list(rdi_elems_dict.keys()))\n vits_df = pd.DataFrame(columns=list(rdi_vits_dict.keys()))\n macros_df = pd.DataFrame(columns=list(rdi_macros_dict.keys()))\n\n # fill row 0 of each nut_type df with percentages\n for idx, row in nuts_totals_df.iterrows():\n # todo: need to process and take out brackets, extra words\n cnf_nut = row['Name'].lower()\n cnf_nut = preprocess_cnf_nuts(cnf_nut)\n cnf_amt = float(row['Value'])\n # todo: take out micro symbol from units but not used as units\n # taken from dicts_arrs in def find_type\n cnf_units = row['Units']\n if '\\xb5g' in cnf_units:\n cnf_units = cnf_units.replace(\"\\xb5g\", \"ug\")\n nut_type, rdi_nut, multiplier = find_type(cnf_nut, cnf_elems_dicts)\n if nut_type == \"\":\n nut_type, rdi_nut, multiplier = find_type(cnf_nut, cnf_vits_dicts)\n if nut_type == \"\":\n nut_type, rdi_nut, multiplier = find_type(cnf_nut, cnf_macros_dicts)\n\n # get start and exclusive end idx of rdi_df\n start_idx, end_idx = get_lifestage_idxs(usr_type)\n if nut_type == 'element':\n elems_df = fill_nut_df(nut_type, start_idx, end_idx, usr_life_stg,\n cnf_nut, cnf_amt, multiplier,\n elems_df,\n usr_type, usr_age, usr_active_lvl, num_days)\n\n elif nut_type == 'vitamin':\n vits_df = fill_nut_df(nut_type, start_idx, end_idx, usr_life_stg,\n cnf_nut, cnf_amt, multiplier,\n vits_df,\n usr_type, usr_age, usr_active_lvl, num_days)\n\n elif nut_type == 'macronutrient':\n macros_df = fill_nut_df(nut_type, start_idx, end_idx, usr_life_stg,\n cnf_nut, cnf_amt, multiplier,\n macros_df,\n usr_type, usr_age, usr_active_lvl, num_days)\n\n # make bar charts and html.Div containing them, return\n # style chart\n elem_colors = color_bars(elems_df)\n vits_colors = color_bars(vits_df)\n macros_colors = color_bars(macros_df)\n\n fig_elems = go.Figure(data=[go.Bar(\n x=list(elems_df.columns),\n y=list(elems_df.iloc[0]),\n marker_color=elem_colors\n )])\n fig_elems.update_layout(title_text=f'elements for{charts_label}')\n fig_vits = go.Figure(data=[go.Bar(x=list(vits_df.columns),\n y=list(vits_df.iloc[0]),\n marker_color=vits_colors)])\n fig_vits.update_layout(title_text=f'vitamins for{charts_label}')\n fig_macros = go.Figure(data=[go.Bar(x=list(macros_df.columns),\n y=list(macros_df.iloc[0]),\n marker_color=macros_colors)])\n fig_macros.update_layout(title_text=f\"macronutrients for{charts_label}\")\n\n figs_div = html.Div([\n dcc.Graph(\n figure=fig_elems,\n id=elem_fig_id\n ),\n dcc.Graph(\n figure=fig_vits,\n id=vits_fig_id\n ),\n dcc.Graph(\n figure=fig_macros,\n id=macros_fig_id\n )\n ])\n return figs_div", "def is_dock_msg(msg):\n return msg & 0xF0 == Dock.BASE", "def data(self):\n dico = {}\n for d_ in [\"flux\",\"var\",\"lbda\",\"mjd\",\"bandname\",\"zp\",\"zpsys\"]:\n dico[d_] = self.get(d_)\n return dico", "def GetOppositeDockTotalSize(self, docks, direction):\r\n \r\n sash_size = self._art.GetMetric(AUI_DOCKART_SASH_SIZE)\r\n caption_size = self._art.GetMetric(AUI_DOCKART_CAPTION_SIZE)\r\n pane_border_size = self._art.GetMetric(AUI_DOCKART_PANE_BORDER_SIZE)\r\n minSizeMax = 0\r\n result = sash_size\r\n vertical = False\r\n\r\n if direction in [AUI_DOCK_TOP, AUI_DOCK_BOTTOM]:\r\n vertical = True\r\n\r\n # Get minimum size of the most inner area\r\n for tmpDock in docks:\r\n \r\n if tmpDock.dock_layer != 0:\r\n continue\r\n\r\n if tmpDock.dock_direction != AUI_DOCK_CENTER and tmpDock.IsVertical() != vertical:\r\n continue\r\n\r\n for tmpPane in tmpDock.panes:\r\n \r\n minSize = pane_border_size*2 - sash_size\r\n\r\n if vertical:\r\n minSize += tmpPane.min_size.y + caption_size\r\n else:\r\n minSize += tmpPane.min_size.x\r\n\r\n if minSize > minSizeMax:\r\n minSizeMax = minSize\r\n \r\n result += minSizeMax\r\n\r\n # Get opposite docks\r\n oppositeDocks = FindOppositeDocks(docks, direction)\r\n\r\n # Sum size of the opposite docks and their sashes\r\n for dock in oppositeDocks:\r\n result += dock.size\r\n # if it's not a toolbar add the sash_size too\r\n if not dock.toolbar:\r\n result += sash_size\r\n \r\n return result", "def __init__(\n self, ctx, coll_j_range=Range('J', 0, Symbol('Jmax') + 1),\n coll_m_range=Range('M'),\n coll_j_dumms=tuple(\n Symbol('J{}'.format(i)) for i in range(1, 30)\n ),\n coll_m_dumms=tuple(\n Symbol('M{}'.format(i)) for i in range(1, 30)\n ),\n tilde_range=Range(r'\\tilde{Q}', 0, Symbol('Ntilde')),\n form_tilde=form_tilde,\n m_range=Range('m'), form_m=form_m, **kwargs\n ):\n super().__init__(ctx, **kwargs)\n\n # Convenient names for quantum number access functions inside drudge\n # scripts.\n self.set_name(\n n_=NOf, NOf=NOf, l_=LOf, LOf=LOf, j_=JOf, JOf=JOf,\n tilde_=TildeOf, TildeOf=TildeOf, m_=MOf, MOf=MOf,\n pi_=PiOf, PiOf=PiOf\n )\n\n self.coll_j_range = coll_j_range\n self.coll_m_range = coll_m_range\n self.coll_j_dumms = coll_j_dumms\n self.coll_m_dumms = coll_m_dumms\n self.set_dumms(coll_j_range, coll_j_dumms)\n self.set_dumms(coll_m_range, coll_m_dumms)\n\n self.tilde_range = tilde_range\n self.form_tilde = form_tilde\n self.tilde_dumms = tuple(form_tilde(i) for i in self.qp_dumms)\n self.set_dumms(tilde_range, self.tilde_dumms)\n\n self.m_range = m_range\n self.form_m = form_m\n self.m_dumms = tuple(form_m(i) for i in self.qp_dumms)\n self.set_dumms(m_range, self.m_dumms)\n\n self.add_resolver_for_dumms()\n\n # Add utility about CG coefficients and related things.\n self.set_name(\n CG=CG, Wigner3j=Wigner3j, Wigner6j=Wigner6j, Wigner9j=Wigner9j\n )\n\n self._am_sum_simplifiers = BCastVar(self.ctx, {\n # TODO: Add more simplifications here.\n 2: [_sum_2_3j_to_delta],\n 5: [_sum_4_3j_to_6j]\n })\n self.set_tensor_method('simplify_am', self.simplify_am)\n\n # All expressions for J/j, for merging of simple terms with factors in\n # J/j-hat style.\n self._j_exprs = frozenset(itertools.chain(self.coll_j_dumms, (\n JOf(i) for i in self.tilde_dumms\n )))\n\n # For angular momentum coupling.\n self.set_tensor_method('do_amc', self.do_amc)\n\n # Special simplification routines.\n self.set_tensor_method('simplify_pono', self.simplify_pono)\n self.set_tensor_method('deep_simplify', self.deep_simplify)\n self.set_tensor_method('merge_j', self.merge_j)", "def PMTandPiezoPlot(datadir,run,event,gain): \n en = event\n mu = gain\n e = sbc.DataHandling.GetSBCEvent.GetEvent(datadir+'/'+run,en)\n print(e[\"fastDAQ\"].keys())\n cgate = e[\"fastDAQ\"][\"CAMgate\"]\n dcam = np.diff(cgate)\n \n p0=e[\"fastDAQ\"][\"Piezo1\"]\n p1 = e[\"fastDAQ\"][\"Piezo2\"]\n fdt = e[\"fastDAQ\"][\"time\"]\n runreconpath = \"/pnfs/coupp/persistent/grid_output/SBC-17/output/%s/\"%run\n pmtdiffs = []\n diffs = []\n \n camOnTimes = [fdt[i] for i in range(len(dcam)) if dcam[i] < -0.5]\n camOffTimes = [fdt[i] for i in range(len(dcam)) if dcam[i] > 0.5]\n print(len(camOnTimes))\n print(len(camOffTimes))\n \n acousticfilename = runreconpath+\"AcousticAnalysis_%s.bin\"%run\n a = sbc.DataHandling.ReadBinary.ReadBlock(acousticfilename)\n bubt0 = a[\"bubble_t0\"]\n \n pmttracetime = e[\"PMTtraces\"][\"t0_sec\"][:,0]+e[\"PMTtraces\"][\"t0_frac\"][:,0]\n d=sbc.AnalysisModules.PMTfastDAQalignment.PMTandFastDAQalignment(e)\n pmtalign = d[\"PMT_trigt0_sec\"]+d[\"PMT_trigt0_frac\"]\n tracetimes = pmttracetime - pmtalign\n at0 = bubt0[en,0]\n at0_1 = bubt0[en,1]\n \n allxyzfname = \"/pnfs/coupp/persistent/grid_output/SBC-17/output/SimpleXYZ_all.bin\"\n xyzf = sbc.DataHandling.ReadBinary.ReadBlock(allxyzfname)\n indices = [i for i,x in enumerate(xyzf[\"runid\"]) if str(x[0])+\"_\"+str(x[1]) == run]\n xyz_reconstructed = True\n if len(indices) > 0:\n runposreco = {\"ev\":[xyzf[\"ev\"][indices]],\"x\":[xyzf[\"bubX\"][indices]],\n \"y\":[xyzf[\"bubY\"][indices]],\"z\":[xyzf[\"bubZ\"][indices]]}\n z = runposreco[\"z\"][0][int(int(en))]\n else:\n print(\"no handscan?\")\n z = 1.5\n xyz_reconstructed = False\n lag_expected = (-23.387649*z - 261.020495)*1e-6 # fit from other analysis\n t0_expected_p0 = at0 + lag_expected\n t0_expected_p1 = at0_1 + lag_expected\n \n i=0\n candidates = []\n candidate_times=[]\n for t in (tracetimes-at0):\n \n if t<0.2 and t>-0.2:\n lastCamOff = 0\n for k in range(len(camOffTimes)):\n if t+at0 > camOffTimes[k]:\n lastCamOff = camOffTimes[k]\n elif t+at0 < camOffTimes[k]:\n break\n if t+at0-lastCamOff > 25e-6:\n \n pmtdiffs.append(t)\n trace = np.fabs(e[\"PMTtraces\"][\"traces\"][i][0])\n if max(trace) == 128:\n trace = pi.stitchTraces(trace,np.fabs(e[\"PMTtraces\"][\"traces\"][i][1]))\n dt = e[\"PMTtraces\"][\"dt\"][i][0]\n #baseline = np.mean(trace[0:50])\n #trace = trace - baseline\n [phe,n,totInt,pktimes] = pi.SBC_pulse_integrator_bressler(trace,dt)\n \n if phe != None:\n phe /= mu\n candidates.append(phe)\n candidate_times.append(t)\n i+=1\n candidate_phe = 0\n the_index = 0\n i=0\n near_trace_indices = []\n for t in candidate_times:\n if t > -500e-6 and t <0:\n near_trace_indices.append(list(tracetimes-at0).index(t))\n if candidates[i]>candidate_phe:\n candidate_phe = candidates[i]\n the_index = i\n i+=1\n \n if len(candidates) != 0:\n if max(candidates)>0:\n diffs.append(candidate_times[candidates.index(max(candidates))])\n fig,ax1 = plt.subplots()\n ax2 = ax1.twinx()\n ax1.plot(fdt,p0,'b',alpha=0.6, label = 'piezo 0')\n ax1.plot(fdt,p1,'k',alpha=0.2, label= 'piezo 1')\n for i in range(len(candidates)):\n if i == the_index:\n ax2.plot([candidate_times[i]+at0,candidate_times[i]+at0],[0,candidates[i]],'r',lw=4)\n else:\n ax2.plot([candidate_times[i]+at0,candidate_times[i]+at0],[0,candidates[i]],'y',lw=4)\n #ax2.plot([min(candidate_times),max(candidate_times)],[0,0],linewidth=2)\n ax1.plot([at0,at0],[-0.5,0.5],'b',linewidth=2, label = 'acoustic t0, p0')\n ax1.plot([at0_1,at0_1],[-0.5,0.5],'k',linewidth=2, label = 'acoustic t0, p1')\n \"\"\"\n if xyz_reconstructed:\n ax1.plot([t0_expected_p0,t0_expected_p0],[-0.5,0.5],'b:',linewidth=2, label = 'expected PMT t0, p0')\n ax1.plot([t0_expected_p1,t0_expected_p1],[-0.5,0.5],'k:',linewidth=2, label = 'expected PMT t0, p1')\n else:\n ax1.plot([t0_expected_p0,t0_expected_p0],[-0.5,0.5],'b:',linewidth=2, label = 'expected PMT t0, p0, center of chamber')\n ax1.plot([t0_expected_p1,t0_expected_p1],[-0.5,0.5],'k:',linewidth=2, label = 'expected PMT t0, p1, center of chamber')\n \"\"\"\n ax1.plot(fdt,cgate,'c')\n ax1.plot(fdt[:-1],dcam,'m')\n ax2.set_ylabel('pmt signal (phe)',fontsize=20)\n ax1.set_xlabel('time (s)',fontsize=20)\n ax1.set_ylabel('Acoustic signa(V)',fontsize=20)\n ax1.set_ylim([min(p1),max(p1)])\n ax2.set_xlim([-0.1,0.1])\n #ax2.set_ylim([0,5])\n ax1.legend()\n plt.show\n \n for j in near_trace_indices:\n trace = e[\"PMTtraces\"][\"traces\"][j][0]\n dt = e[\"PMTtraces\"][\"dt\"]\n dt_tr = dt[j][0]\n tPMT = np.arange(len(trace))*dt_tr\n plt.figure()\n plt.plot(tPMT,trace)\n plt.xlabel(\"t (s)\")\n plt.ylabel(\"PMT signal\")\n plt.show\n \n plt.figure()\n plt.plot(e[\"fastDAQ\"][\"time\"],e[\"fastDAQ\"][\"VetoCoinc\"])\n plt.ylabel(\"Veto Coincidence signal\",fontsize=18)\n plt.xlabel(\"time (s)\")\n plt.show", "def DoDropFloatingPane(self, docks, panes, target, pt):\r\n \r\n screenPt = self._frame.ClientToScreen(pt)\r\n paneInfo = self.PaneHitTest(panes, pt)\r\n\r\n if paneInfo.IsMaximized():\r\n return False, target\r\n\r\n if paneInfo.window is None:\r\n return False, target\r\n\r\n # search the dock guides.\r\n # reverse order to handle the center first.\r\n for i in xrange(len(self._guides)-1, -1, -1):\r\n guide = self._guides[i]\r\n\r\n # do hit testing on the guide\r\n dir = guide.host.HitTest(screenPt.x, screenPt.y)\r\n\r\n if dir == -1: # point was outside of the dock guide\r\n continue\r\n\r\n if dir == wx.ALL: # target is a single dock guide\r\n return self.DoDropLayer(docks, target, guide.dock_direction)\r\n \r\n elif dir == wx.CENTER:\r\n\r\n if not target.IsNotebookDockable():\r\n continue\r\n if not paneInfo.IsNotebookDockable() and not paneInfo.IsNotebookControl():\r\n continue\r\n\r\n if not paneInfo.HasNotebook():\r\n \r\n # Add a new notebook pane with the original as a tab...\r\n self.CreateNotebookBase(panes, paneInfo)\r\n \r\n # Add new item to notebook\r\n target.NotebookPage(paneInfo.notebook_id)\r\n \r\n else:\r\n \r\n drop_pane = False\r\n drop_row = False\r\n\r\n insert_dir = paneInfo.dock_direction\r\n insert_layer = paneInfo.dock_layer\r\n insert_row = paneInfo.dock_row\r\n insert_pos = paneInfo.dock_pos\r\n\r\n if insert_dir == AUI_DOCK_CENTER:\r\n \r\n insert_layer = 0\r\n if dir == wx.LEFT:\r\n insert_dir = AUI_DOCK_LEFT\r\n elif dir == wx.UP:\r\n insert_dir = AUI_DOCK_TOP\r\n elif dir == wx.RIGHT:\r\n insert_dir = AUI_DOCK_RIGHT\r\n elif dir == wx.DOWN:\r\n insert_dir = AUI_DOCK_BOTTOM\r\n \r\n if insert_dir == AUI_DOCK_LEFT:\r\n \r\n drop_pane = (dir == wx.UP or dir == wx.DOWN)\r\n drop_row = (dir == wx.LEFT or dir == wx.RIGHT)\r\n if dir == wx.RIGHT:\r\n insert_row += 1\r\n elif dir == wx.DOWN:\r\n insert_pos += 1\r\n \r\n elif insert_dir == AUI_DOCK_RIGHT:\r\n \r\n drop_pane = (dir == wx.UP or dir == wx.DOWN)\r\n drop_row = (dir == wx.LEFT or dir == wx.RIGHT)\r\n if dir == wx.LEFT:\r\n insert_row += 1\r\n elif dir == wx.DOWN:\r\n insert_pos += 1\r\n \r\n elif insert_dir == AUI_DOCK_TOP:\r\n \r\n drop_pane = (dir == wx.LEFT or dir == wx.RIGHT)\r\n drop_row = (dir == wx.UP or dir == wx.DOWN)\r\n if dir == wx.DOWN:\r\n insert_row += 1\r\n elif dir == wx.RIGHT:\r\n insert_pos += 1\r\n \r\n elif insert_dir == AUI_DOCK_BOTTOM:\r\n \r\n drop_pane = (dir == wx.LEFT or dir == wx.RIGHT)\r\n drop_row = (dir == wx.UP or dir == wx.DOWN)\r\n if dir == wx.UP:\r\n insert_row += 1\r\n elif dir == wx.RIGHT:\r\n insert_pos += 1\r\n \r\n if paneInfo.dock_direction == AUI_DOCK_CENTER:\r\n insert_row = GetMaxRow(panes, insert_dir, insert_layer) + 1\r\n\r\n if drop_pane:\r\n return self.DoDropPane(panes, target, insert_dir, insert_layer, insert_row, insert_pos)\r\n\r\n if drop_row:\r\n return self.DoDropRow(panes, target, insert_dir, insert_layer, insert_row)\r\n \r\n return True, target\r\n \r\n return False, target", "def init_widget(self):\n super(WxDockPane, self).init_widget()\n d = self.declaration\n self.set_title(d.title)\n self.set_title_bar_visible(d.title_bar_visible)\n self.set_title_bar_orientation(d.title_bar_orientation)\n self.set_closable(d.closable)\n self.set_movable(d.movable)\n self.set_floatable(d.floatable)\n self.set_floating(d.floating)\n self.set_dock_area(d.dock_area)\n self.set_allowed_dock_areas(d.allowed_dock_areas)\n widget = self.widget\n widget.Bind(EVT_DOCK_PANE_FLOATED, self.on_floated)\n widget.Bind(EVT_DOCK_PANE_DOCKED, self.on_docked)\n widget.Bind(EVT_DOCK_PANE_CLOSED, self.on_closed)", "def phast_cmmd(self):\n temp = '{prog} -R {rho} -C {ecov} -E {elen} -N {chrom} -i MAF {maf} {model} > {wig}\\n'.format(**self.dict)\n return temp.format(fnum=self.fnum)", "def __init__(self, parent, direction=0):\r\n\r\n self._direction = direction\r\n\r\n style = wx.FRAME_TOOL_WINDOW | wx.STAY_ON_TOP | \\\r\n wx.FRAME_NO_TASKBAR | wx.NO_BORDER\r\n\r\n # Use of FRAME_SHAPED on wxMac causes the frame to be visible\r\n # breaking the docking hints.\r\n if wx.Platform != '__WXMAC__':\r\n style |= wx.FRAME_SHAPED\r\n\r\n AuiDockingGuide.__init__(self, parent, style=style, name=\"auiSingleDockTarget\")\r\n \r\n self.Hide()\r\n\r\n useAero = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_AERO_DOCKING_GUIDES\r\n useWhidbey = GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_WHIDBEY_DOCKING_GUIDES\r\n \r\n self._useAero = useAero or useWhidbey\r\n self._valid = True\r\n \r\n if useAero:\r\n sizeX, sizeY = aeroguideSizeX, aeroguideSizeY\r\n elif useWhidbey:\r\n sizeX, sizeY = whidbeySizeX, whidbeySizeY\r\n else:\r\n sizeX, sizeY = guideSizeX, guideSizeY\r\n\r\n if direction not in [wx.TOP, wx.BOTTOM]:\r\n sizeX, sizeY = sizeY, sizeX\r\n\r\n if self._useAero:\r\n self.CreateShapesWithStyle(useWhidbey)\r\n \r\n if wx.Platform == \"__WXGTK__\":\r\n self.Bind(wx.EVT_WINDOW_CREATE, self.SetGuideShape)\r\n else:\r\n self.SetGuideShape()\r\n \r\n self.SetSize(self.region.GetBox().GetSize())\r\n else:\r\n self.SetSize((sizeX, sizeY))\r\n \r\n self.rect = wx.Rect(0, 0, sizeX, sizeY)\r\n\r\n if self._useAero:\r\n useAero = (useWhidbey and [2] or [1])[0]\r\n else:\r\n useAero = 0\r\n \r\n self.target = AuiDockingGuideWindow(self, self.rect, direction, False, useAero)", "def set_dock_area(self, dock_area):\n self.widget.SetDockArea(_DOCK_AREA_MAP[dock_area])", "def _d_converter(self):\n units = {'um':1e-6, 'mm':1e-3, 'inch':2.54e-2, 'in':2.54e-2,\\\n 'micron':1e-6, 'mil':2.54e-5, 'm':1.0}\n for i in self.stack:\n i.thickness = i.thickness*units[i.units]\n return", "def process_meter_message(self, d):\n dpid = int(d.get(\"dpid\", 0))\n dp = self.dpset.get(dpid)\n if not dp:\n return \"Datapath does not exist!\"\n\n ofproto = dp.ofproto\n parser = dp.ofproto_parser\n\n command = {\n 'add': ofproto.OFPMC_ADD,\n 'mod': ofproto.OFPMC_MODIFY,\n 'del': ofproto.OFPMC_DELETE,\n }\n cmd = command.get(d[\"operation\"], ofproto.OFPMC_ADD)\n\n meter_id = d[\"meter_id\"]\n\n flags = 0\n bands = []\n if \"flags\" in d: # Ryu's format\n print(d['flags'])\n for f in d['flags']:\n flags += 0x01 if f == 'KBPS' else 0\n flags += 0x02 if f == 'PKTPS' else 0\n flags += 0x04 if f == 'BURST' else 0\n flags += 0x08 if f == 'STATS' else 0\n\n for band in d[\"bands\"]:\n if band['type'] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band['rate'],\n burst_size=band['burst_size'])]\n elif band['type'] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band['rate'],\n burst_size=band['burst_size'], prec_level=band['prec_level'])]\n\n else: # FlowManager's format\n flags += 0x01 if d['OFPMF_KBPS'] else 0\n flags += 0x02 if d['OFPMF_PKTPS'] else 0\n flags += 0x04 if d['OFPMF_BURST'] else 0\n flags += 0x08 if d['OFPMF_STATS'] else 0\n\n # Flags must have KBPS or PKTPS\n flags = flags if (flags & 0x03) else (flags | 0x01)\n\n for band in d[\"bands\"]:\n #mtype = type_convert.get(band[0])\n if band[0] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band[1],\n burst_size=band[2])]\n elif band[0] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band[1],\n burst_size=band[2], prec_level=band[3])]\n\n # TODO: catch some errors\n meter_mod = parser.OFPMeterMod(dp, cmd, flags, meter_id, bands)\n try:\n dp.send_msg(meter_mod)\n except KeyError as e:\n return e.__repr__()\n except Exception as e:\n return e.__repr__()\n\n return \"Message sent successfully.\"", "def get_measure_par(input_object):\r\n input_object.measurement_strategy = ui.measurement_strategy.currentIndex()\r\n input_object.len_total = ui.total_length.text()\r\n input_object.frequency = ui.frequency.text()\r\n input_object.num_of_mea = ui.num_of_mea.text()\r\n input_object.len_step = ui.length_step.text()\r\n input_object.time_step = ui.time_step.text()\r\n input_object.temperature = ui.temperature.text()\r\n input_object.humidity = ui.humidity.text()\r\n input_object.na_average_factor = ui.na_average_facotr.value()\r\n input_object.multi_measure = ui.multi_measure.value()\r\n if ui.NA_state.text().strip() != '':\r\n input_object.na_state = ui.NA_state.text().strip()\r\n else:\r\n input_object.na_state = None", "def calc_spindle_psd_i(self, psd_bandwidth, zpad=False, zpad_len=3):\n \n print('Calculating power spectra (this may take a few minutes)...')\n self.metadata['spindle_analysis']['psd_dtype'] = 'raw_individual'\n self.metadata['spindle_analysis']['psd_method'] = 'multitaper'\n self.metadata['spindle_analysis']['psd_bandwidth'] = psd_bandwidth\n self.metadata['spindle_analysis']['zeropad'] = zpad\n self.metadata['spindle_analysis']['zeropad_len_sec'] = zpad_len\n sf = self.metadata['analysis_info']['s_freq']\n \n spindles_zpad = {}\n spindle_psd = {}\n spindle_multitaper_calcs = {}\n for chan in self.spindles:\n spindles_zpad[chan] = {}\n spindle_psd[chan] = {}\n # waveform resolution is dependent on length of signal, regardless of zero-padding\n spindle_multitaper_calcs[chan] = pd.DataFrame(columns=['spin_samples', 'spin_seconds', 'zpad_samples', 'zpad_seconds', 'waveform_resoultion_Hz', \n 'psd_resolution_Hz', 'N_taper_len', 'W_bandwidth', 'K_tapers'])\n spindle_multitaper_calcs[chan].index.name = 'spindle_num'\n \n if len(self.spindles[chan]) > 0:\n for x in self.spindles[chan]:\n # subtract mean to zero-center spindle for zero-padding\n data = self.spindles[chan][x].Raw.values - np.mean(self.spindles[chan][x].Raw.values)\n zpad_samples=0\n zpad_seconds=0\n tx=0\n \n # option to zero-pad the spindle\n if zpad:\n total_len = zpad_len*sf\n zpad_samples = total_len - len(data)\n zpad_seconds = zpad_samples/sf\n if zpad_samples > 0:\n padding = np.repeat(0, zpad_samples)\n data_pad = np.append(data, padding)\n else:\n spin_len = len(data)/sf\n print(f'Spindle {chan}:{x} length {spin_len} seconds longer than pad length {zpad_len}')\n data_pad = data\n \n # or leave as-is\n else:\n data_pad = data\n \n # record PS params [K = 2NW-1]\n spin_samples = len(data)\n spin_seconds = len(data)/sf\n waveform_res = 1/spin_seconds\n psd_res = 1/(len(data_pad)/sf)\n N_taper_len = len(data_pad)/sf\n W_bandwidth = psd_bandwidth\n K_tapers = int((2*N_taper_len*W_bandwidth)-1)\n spindle_multitaper_calcs[chan].loc[x] = [spin_samples, spin_seconds, zpad_samples, zpad_seconds, waveform_res, psd_res, N_taper_len, W_bandwidth, K_tapers]\n\n # calculate power spectrum\n try:\n pwr, freqs = psd_array_multitaper(data_pad, sf, adaptive=True, bandwidth=psd_bandwidth, fmax=25, \n normalization='full', verbose=0)\n except ValueError:\n print(f'Specified bandwidth too small for data length. Skipping spindle {chan}:{x}.')\n continue\n \n # convert to series & add to dict\n psd = pd.Series(pwr, index=freqs)\n spindle_psd[chan][x] = psd\n spindles_zpad[chan][x] = data_pad\n \n self.spindles_zpad = spindles_zpad\n self.spindle_multitaper_calcs = spindle_multitaper_calcs\n self.spindle_psd_i = spindle_psd\n print('Done. \\nSpectra stored in obj.spindle_psd_i. Calculations stored in obj.spindle_multitaper_calcs. Zero-padded spindle data in obj.spindles_zpad.\\n')" ]
[ "0.5547273", "0.5520557", "0.5519151", "0.536733", "0.53612316", "0.53288966", "0.5309132", "0.52417874", "0.523785", "0.51947534", "0.51807857", "0.510445", "0.5047843", "0.49827212", "0.498094", "0.497829", "0.49675336", "0.49336413", "0.49266827", "0.49179497", "0.49072325", "0.49009404", "0.4858042", "0.48569328", "0.48405725", "0.48389372", "0.48317766", "0.48291573", "0.48161116", "0.4801181", "0.47824833", "0.47456732", "0.47233865", "0.47121626", "0.47065738", "0.46760145", "0.46482962", "0.46268156", "0.4603208", "0.45871946", "0.45859796", "0.45850587", "0.45814925", "0.45635623", "0.45599937", "0.4557533", "0.45544857", "0.45271665", "0.45211855", "0.45208097", "0.4516639", "0.45105413", "0.44929323", "0.44908404", "0.4489068", "0.44844162", "0.44810057", "0.44778457", "0.4476467", "0.44763088", "0.44738752", "0.4472332", "0.44722185", "0.44717377", "0.44690686", "0.44684765", "0.44644776", "0.44561058", "0.44525135", "0.44465527", "0.44385174", "0.4429142", "0.4424228", "0.44221905", "0.4418104", "0.44156143", "0.44085476", "0.44075924", "0.44017485", "0.4400586", "0.43964237", "0.43902105", "0.4380264", "0.43792155", "0.4373189", "0.43679172", "0.4363978", "0.43623683", "0.4355671", "0.43548664", "0.43520063", "0.43518078", "0.4348927", "0.4348234", "0.43473238", "0.43458962", "0.43447918", "0.43415308", "0.43391803", "0.4336701", "0.4330594" ]
0.0
-1
Actions for Pd2dMeas objects.
def action_pd2d_meas(obj: Pd2dMeas, thread: QtCore.QThread): w_actions = [] qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Plot gamma-nu") def func_plot_gn(obj): fig, ax = obj.plot_gamma_nu() fig.show() return (fig, ax) qtb_1.clicked.connect(lambda: func_plot_gn(obj)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Plot 2theta-phi") def func_plot_tp(obj): fig, ax = obj.plot_ttheta_phi() fig.show() return (fig, ax) qtb_1.clicked.connect(lambda: func_plot_tp(obj)) w_actions.append(qtb_1) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self,measurements,actions):\n raise NotImplementedError", "def getMeasures():", "def _add_or_update_measurement(self,eq,meas_type,mplane_param2value,period):\r\n meas = self._pvsr.create_pvsr_object(\"Measurement\")\r\n meas.ParentId = eq.Id\r\n meas.Type = meas_type\r\n if \"index_mplane_name\" in self._meas:\r\n if self._meas[\"index_mplane_name\"] not in mplane_param2value:\r\n raise ValueError(\"Missing {0} value\".format(self._meas[\"index_mplane_name\"]))\r\n meas.Index = mplane_param2value[self._meas[\"index_mplane_name\"]]\r\n else:\r\n meas.DescriptionToShow = self._meas[\"name\"] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n \r\n measA = self._pvsr.listMeasurements(meas)\r\n if len(measA) == 0:\r\n if \"index_mplane_name\" not in self._meas:\r\n meas.Index = self._meas[\"name\"]\r\n measA = self._pvsr.listMeasurements(meas)\r\n \r\n add2 = None\r\n \r\n if len(measA) == 0:\r\n #add\r\n if self._verb==mplane.model.VERB_QUERY:\r\n if \"index_mplane_name\" in self._meas:\r\n raise ValueError(\"The measurement does not exists: Index={0}\".format(meas.Index))\r\n else:\r\n raise ValueError(\"The measurement does not exists: Name={0}\".format(meas.DescriptionToShow))\r\n \r\n if \"index_mplane_name\" in self._meas:\r\n if eq.CollectorType == 'c':\r\n meas.DescriptionToShow = mplane_param2value[self._meas[\"index_mplane_name\"]] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n else:\r\n meas.DescriptionToShow = self._meas[\"name\"] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n \r\n if \"uda_constants\" in self._meas:\r\n for uda,value in self._meas[\"uda_constants\"].items():\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = value\r\n meas.Parameter.append(param)\r\n\r\n for mplane_param,uda in self._mplane2uda.items():\r\n if mplane_param in mplane_param2value and mplane_param2value[mplane_param] != \"\":\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = mplane_param2value[mplane_param]\r\n meas.Parameter.append(param)\r\n elif self._uda_name2uda[uda].Required == \"Yes\":\r\n raise ValueError(\"Missing required parameter: {0}\".format(mplane_param))\r\n \r\n logging.info(\"Creating measurement, eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n \r\n meas.Switched = \"No\"\r\n meas.RetainRawData = 365\r\n meas.IntervalInSec = period\r\n \r\n add2 = 1\r\n meas = self._pvsr.addMeasurement(meas)\r\n else:\r\n #update\r\n meas = measA[0]\r\n logging.info(\"Measurement already exists: eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n \r\n need_mod = False\r\n meas_param_name2value = {}\r\n if hasattr(meas,\"Parameter\"):\r\n for i in range(len(meas.Parameter)):\r\n meas_param_name2value[meas.Parameter[i].Name]=meas.Parameter[i].Value\r\n\r\n if \"check_udas\" in self._meas and self._meas[\"check_udas\"] == False:\r\n pass\r\n else:\r\n for mplane_param,uda in self._mplane2uda.items():\r\n if mplane_param in mplane_param2value and mplane_param2value[mplane_param] != \"\":\r\n if uda not in meas_param_name2value or meas_param_name2value[uda] != mplane_param2value[mplane_param]:\r\n if uda not in meas_param_name2value:\r\n logging.warn(\"Parameter mismatch: {0}: NULL != {1}\".format(uda,mplane_param2value[mplane_param]))\r\n else:\r\n logging.warn(\"Parameter mismatch: {0}: {1} != {2}\".format(uda,meas_param_name2value[uda],mplane_param2value[mplane_param]))\r\n index2remove=None\r\n for i in range(len(meas.Parameter)):\r\n if meas.Parameter[i].Name == uda:\r\n index2remove = i\r\n break\r\n del meas.Parameter[index2remove]\r\n need_mod = True\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = mplane_param2value[mplane_param]\r\n meas.Parameter.append(param)\r\n else:\r\n if uda in meas_param_name2value:\r\n index2remove=None\r\n for i in range(len(meas.Parameter)):\r\n if meas.Parameter[i].Name == uda:\r\n index2remove = i\r\n break\r\n if index2remove is not None:\r\n logging.warn(\"Parameter mismatch: {0}: {1} != NULL\".format(uda,meas_param_name2value[uda]))\r\n need_mod = True\r\n del meas.Parameter[index2remove]\r\n \r\n if meas.IntervalInSec != period:\r\n need_mod = True\r\n meas.IntervalInSec = period\r\n logging.warn(\"Parameter mismatch: IntervalInSec: {0} != {1}\".format(meas.IntervalInSec,period))\r\n \r\n if need_mod:\r\n if self._verb==mplane.model.VERB_QUERY:\r\n raise ValueError(\"The measurement parameters do not match: Name={0}\".format(meas.DescriptionToShow))\r\n \r\n logging.warn(\"Modifying measurement: eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n meas = self._pvsr.modMeasurement(meas)\r\n add2 = 2\r\n else:\r\n add2 = 0\r\n \r\n return (meas,add2)", "def _fill_meas_result(self,meas,from_time,to_time,meas_data):\r\n input=self._pvsr.create_pvsr_object(\"GetMeasuredValuesInput\")\r\n input.ObjType = \"Measurement\"\r\n input.ObjId = meas.Id\r\n input.From = datetime.datetime.fromtimestamp(from_time)\r\n input.To = datetime.datetime.fromtimestamp(to_time)\r\n logging.info(\"Get values, eq: {0}, type: {1}, index: {2}, name: {3}, {4} -> {5}\".format(self._meas[\"equipment\"],meas.Type,meas.Index,meas.DescriptionToShow,input.From,input.To))\r\n meas_res=self._pvsr.getMeasuredValues(input)\r\n \r\n index2mplane_name={}\r\n multiply = None\r\n if \"first\" in self._meas[\"types\"][meas.Type]:\r\n index2mplane_name[0]=self._meas[\"types\"][meas.Type][\"first\"]\r\n if \"second\" in self._meas[\"types\"][meas.Type]:\r\n index2mplane_name[1]=self._meas[\"types\"][meas.Type][\"second\"]\r\n if \"multiply\" in self._meas[\"types\"][meas.Type]:\r\n multiply=int(self._meas[\"types\"][meas.Type][\"multiply\"])\r\n\r\n if hasattr(meas_res,\"D\"):\r\n for d in meas_res.D:\r\n if d.T not in meas_data:\r\n meas_data[d.T]={}\r\n for index,mplane_name in index2mplane_name.items():\r\n if index < len(d.V):\r\n if multiply is not None:\r\n d.V[index]*=multiply\r\n meas_data[d.T][mplane_name]=d.V[index]\r\n else:\r\n meas_data[d.T][mplane_name]=None", "def measure(self):\n pass", "def measure(self,command_exe, command_args, measure_out):\n pass", "def read_metric_values(self):\n inv_objs = self._inventory_mgr.current_inventory()\n monitored_metrics = self._metric_mgr.get_monitored_metrics()\n perf_manager = self._si.RetrieveServiceContent().perfManager\n for mor in inv_objs.keys():\n for inv_obj in inv_objs[mor]:\n inv_obj_metrics = inv_obj.metric_id_map\n desired_keys = list(set(inv_obj_metrics.keys()) & set(monitored_metrics[mor].keys()))\n if not len(desired_keys) == 0:\n metric_id_objs = [inv_obj_metrics[key] for key in desired_keys]\n query_spec = vim.PerformanceManager.QuerySpec(\n entity=inv_obj.mor, metricId=metric_id_objs,\n intervalId=inv_obj.INSTANT_INTERVAL,\n maxSample=1, format='normal'\n )\n try:\n results = perf_manager.QueryPerf(querySpec=[query_spec])\n except Exception as e:\n self._logger.error(\"Exception while making performance query : {0}\".format(e))\n if results:\n dps = self._parse_query(inv_obj, results, monitored_metrics[mor])\n payload = self._build_payload(dps)\n self._dispatch_metrics(payload)\n else:\n self._logger.warning(\"Empty result from query : {0}\".format(query_spec))", "def _measurement_update(self):\n pass", "def compute_metrics(self):\n pass", "def set_metrics(self):", "def get_measurements(self, pipeline, object_name, category):\n result = self.get_object_measurements(pipeline, object_name, category,\n {self.object_name.value: [] })\n return result", "def metrics(self):\n raise NotImplementedError(\"metrics\")", "def get_measured_data_ouputs(self, t): # TODO: SPELLING\n obsOut = numpy.zeros(shape=(1, self.get_num_measured_outputs()))\n i = 0\n for o in self.outputs:\n if o.is_measured_output():\n obsOut[0,i] = o.read_from_data_series(t)\n i += 1\n return obsOut", "def __init__(self, meas, verb,pvsr, default_site,delete_created_measurements,pvsr_default_conf_check_cycle,pvsr_meas_types):\r\n \r\n logging.info(\"adding capability: {0}\".format(meas[\"name\"]))\r\n \r\n self._verb=verb\r\n if verb==mplane.model.VERB_QUERY:\r\n cap = mplane.model.Capability(label=meas[\"name\"]+\"-query\", when = \"past ... now / 15s\", verb=mplane.model.VERB_QUERY)\r\n elif verb==mplane.model.VERB_MEASURE:\r\n cap = mplane.model.Capability(label=meas[\"name\"]+\"-measure\", when = \"now ... future / 15s\", verb=mplane.model.VERB_MEASURE)\r\n else:\r\n raise ValueError(\"Verb is not supported: {0}\".format(verb))\r\n cap.add_result_column(\"time\")\r\n \r\n self._mplane2uda={}\r\n self._uda_name2uda = {}\r\n \r\n self._pvsr_default_conf_check_cycle=pvsr_default_conf_check_cycle\r\n \r\n try:\r\n for k in sorted(meas[\"types\"].keys()):\r\n if \"first\" in meas[\"types\"][k]:\r\n logging.debug(\" result colum: {0}\".format(meas[\"types\"][k][\"first\"]))\r\n cap.add_result_column(meas[\"types\"][k][\"first\"])\r\n if \"second\" in meas[\"types\"][k]:\r\n logging.debug(\" result colum: {0}\".format(meas[\"types\"][k][\"second\"]))\r\n cap.add_result_column(meas[\"types\"][k][\"second\"])\r\n \r\n if \"PropertyType\" in pvsr_meas_types[k]:\r\n for i in range(len(pvsr_meas_types[k][\"PropertyType\"])):\r\n self._uda_name2uda[pvsr_meas_types[k][\"PropertyType\"][i][\"Name\"]]=pvsr_meas_types[k][\"PropertyType\"][i]\r\n \r\n if \"index_mplane_name\" in meas:\r\n logging.debug(\" parameter: {0}\".format(meas[\"index_mplane_name\"]))\r\n cap.add_parameter(meas[\"index_mplane_name\"])\r\n \r\n if \"mplane_constants\" in meas:\r\n for k,v in sorted(meas[\"mplane_constants\"].items()):\r\n logging.debug(\" parameter: {0} with value {1}\".format(k,v))\r\n cap.add_parameter(k,v)\r\n \r\n if \"uda_name2mplane_name\" in meas:\r\n for k,v in sorted(meas[\"uda_name2mplane_name\"].items()):\r\n if k in self._uda_name2uda:\r\n logging.debug(\" parameter: {0}\".format(v))\r\n cap.add_parameter(v)\r\n self._mplane2uda[v]=k\r\n else:\r\n logging.error(\" unknown UDA: {0}\".format(v))\r\n except Exception as e:\r\n logging.critical(\"Error during capability creation: {0}\".format(e))\r\n raise e\r\n\r\n super(PvsrService, self).__init__(cap)\r\n \r\n self._pvsr = pvsr\r\n self._meas = meas\r\n self._default_site = default_site\r\n self._delete_created_measurements = delete_created_measurements\r\n self._pvsr_meas_types = pvsr_meas_types", "def measure():\n print(\"alias, timestamp, current, total, power, voltage, err_code\")\n message_str = MeasurementRequest(None).to_json()\n socket_object = UdpSocket()\n s = UDPSendThread(message_str, socket_object)\n r = UDPRecvThread(socket_object, measurement_output_parser)\n s.start()\n r.start()\n\n wait((s, r))", "def overviewCommand(self):\n plt.figure(11)\n plt.clf()\n ax = plt.subplot(211)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET'),\n color='r', label='FUOFFSET',\n linewidth=1, alpha=1) \n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='r', linewidth=3, alpha=0.5,\n label=self.DLtrack+'-PSP')\n plt.legend()\n plt.subplot(212, sharex=ax)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET')-\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='k', label='$\\Delta$',\n linewidth=1, alpha=1) \n \n signal = self.raw['OPDC'].data.field('FUOFFSET')\n plt.figure(12)\n plt.clf()\n ax2 = plt.subplot(111)\n Fs = 1e6/np.diff(self.raw['OPDC'].data.field('TIME')).mean()\n print Fs\n ax2.psd(signal[:50000], NFFT=5000, Fs=Fs, label='FUOFFSET',scale_by_freq=0)\n plt.legend()", "def measobs(self, measfct, stampsize=200, skipdone=True):\n\t\t\n\t\tincatfilepaths = glob.glob(os.path.join(self.workobsdir, \"*-inputcat.pkl\"))\n\t\toutcatfilepaths = [incat.replace(\"inputcat\", \"meascat\") for incat in incatfilepaths]\n\t\t\n\t\tlogger.info(\"Measuring %i cats...\" % len(incatfilepaths))\n\t\n\t\tmeasfctkwargs = {\"stampsize\":stampsize}\n\t\n\t\tmegalut.meas.run.general(incatfilepaths, outcatfilepaths, measfct, measfctkwargs, ncpu=self.ncpu, skipdone=skipdone)", "def read_measurement(self):\n return self.execute(SdpI2cCmdReadMeasurement())", "def make_metrics(self):\n num_batches = self.data_loader.number_of_batches()\n dose_score_vec = np.zeros(num_batches)\n\n # Only make calculations if data_loader is not empty\n if not self.data_loader.file_paths_list:\n print('No patient information was given to calculate metrics')\n else:\n # Change batch size to 1\n self.data_loader.batch_size = 1 # Loads data related to ground truth patient information\n if self.dose_loader is not None:\n self.dose_loader.batch_size = 1 # Loads data related to ground truth patient information\n\n for idx in tqdm.tqdm(range(num_batches)):\n # Get roi masks for patient\n self.get_constant_patient_features(idx)\n # Get dose tensors for reference dose and evaluate criteria\n reference_dose = self.get_patient_dose_tensor(self.data_loader)\n if reference_dose is not None:\n self.reference_dose_metric_df = self.calculate_metrics(self.reference_dose_metric_df, reference_dose)\n # If a dose loader was provided, calculate the score\n if self.dose_loader is not None:\n new_dose = self.get_patient_dose_tensor(self.dose_loader)\n # Make metric data frames\n self.new_dose_metric_df = self.calculate_metrics(self.new_dose_metric_df, new_dose)\n # Evaluate mean absolute error of 3D dose\n dose_score_vec[idx] = np.sum(np.abs(reference_dose - new_dose)) / np.sum(self.possible_dose_mask)\n # Save metrics at the patient level (this is a template for how DVH stream participants could save\n # their files\n # self.dose_metric_df.loc[self.patient_list[0]].to_csv('{}.csv'.format(self.patient_list[0]))\n\n if self.dose_loader is not None:\n dvh_score = np.nanmean(np.abs(self.reference_dose_metric_df - self.new_dose_metric_df).values)\n dose_score = dose_score_vec.mean()\n return dvh_score, dose_score\n else:\n print('No new dose provided. Metrics were only calculated for the provided dose.')", "def measurements(self) -> NONEARRAY:\n pass", "def get_measurements(self):\n metrics = {}\n for key in self.fields.keys():\n metrics[key] = []\n # What's in output:\n # proc_pid date virt res shrd cpu mem power gpus_power\n while not self.queue.empty():\n data = self.queue.get().strip().split()\n for field in self.fields:\n tp = self.fields[field]['type']\n idx = self.fields[field]['index']\n count = self.fields[field]['count']\n if count == -1:\n metrics[field].append(ResourceMonitor.str_to_type(data[idx], tp))\n elif count == 0:\n metrics[field].append([ResourceMonitor.str_to_type(data[idx], tp)])\n else:\n metrics[field].append([\n ResourceMonitor.str_to_type(data[index], tp) for index in xrange(idx, idx+count)\n ])\n return metrics", "def get_all_measurements(self, start_time, end_time):\n return", "def test_measurment(self):\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"km\"), 6.214)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"m\"), 10.936)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"cm\"), 0.328)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"mm\"), 0.394)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"mi\"), 16.093)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"yd\"), 9.144)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"ft\"), 304.8)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"in\"), 254)", "def action_pd(obj: Pd, thread: QtCore.QThread):\n w_actions = []\n f_meas = obj.is_attribute(\"pd_meas\")\n f_chi2 = obj.is_attribute(\"chi2\")\n f_phase = obj.is_attribute(\"phase\")\n\n l_pd_peak = []\n if f_phase:\n phase = obj.phase\n for item in phase.items:\n try:\n pd_peak = getattr(obj, f\"pd_peak_{item.label.lower():}\")\n l_pd_peak.append(pd_peak)\n except AttributeError:\n pass\n\n f_setup = obj.is_attribute(\"setup\")\n f_pd_instr_resolution = obj.is_attribute(\"pd_instr_resolution\")\n f_pd_background = obj.is_attribute(\"pd_background\")\n f_range = obj.is_attribute(\"range\")\n\n if not(f_chi2 & f_meas & f_setup & f_pd_instr_resolution & f_phase &\n f_pd_background & f_range):\n if not f_chi2:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add chi2\")\n qtb_1.clicked.connect(lambda: add_items(obj, [Chi2()], thread))\n w_actions.append(qtb_1)\n\n if not f_meas:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add pd_meas\")\n qtb_1.clicked.connect(lambda: add_items(obj, [PdMeasL()], thread))\n w_actions.append(qtb_1)\n\n if not f_setup:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add setup\")\n qtb_1.clicked.connect(lambda: add_items(obj, [Setup()], thread))\n w_actions.append(qtb_1)\n\n if not f_pd_instr_resolution:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add pd_instr_resolution\")\n qtb_1.clicked.connect(lambda: add_items(obj, [PdInstrResolution()],\n thread))\n w_actions.append(qtb_1)\n\n if not f_phase:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add phase\")\n vv = PhaseL()\n vv.items = [Phase(label=\"phase\", igsize=0., scale=1.)]\n qtb_1.clicked.connect(lambda: add_items(obj, [vv], thread))\n w_actions.append(qtb_1)\n\n if not f_pd_background:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add pd_background\")\n qtb_1.clicked.connect(lambda: add_items(obj, [PdBackgroundL()],\n thread))\n w_actions.append(qtb_1)\n\n if not f_range:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add range\")\n qtb_1.clicked.connect(lambda: add_items(obj, [Range(\n ttheta_min=2, ttheta_max=100.)], thread))\n w_actions.append(qtb_1)\n return w_actions", "def getAllMeasurement(self): \n return self.measurement", "def get_all_DLP_measurements(self):\n pass", "def runMeasurement(self): \n if isI1DisplayDiffuserOn():\n print'Please remove diffuser for normal measurement\\n'\n else:\n self.measurement.append(getI1DisplayMesure(self.ambiant, self._raw, self._device))", "def measurements(self):\n # get available measurement types for this node\n measurement_types = self.measurement_types()\n\n # retrieve measurement for each type\n return list(self.measurement(t) for t in measurement_types)", "def measure_test(self):\n return self.execute(Sgp40I2cCmdExecuteSelfTest())", "async def update_measures(self):\n\n def function():\n return self._api.get_measures()\n\n self._measures = await self.call(function, throttle_domain=\"update_measures\")\n\n return self._measures", "def measureDispatch(self):\n\n if self.ui.measureDevice.currentText().startswith('Built-In'):\n self.app.measure.startMeasurement()\n self.app.message.emit('Measurement enabled', 0)\n self.deviceStat['measure'] = True\n self.ui.measureDevice.setStyleSheet(self.BACK_GREEN)\n else:\n self.app.measure.stopMeasurement()\n self.app.message.emit('Measurement disabled', 0)\n self.deviceStat['measure'] = None\n self.ui.measureDevice.setStyleSheet(self.BACK_NORM)\n\n return True", "def test_get_measure_parameters(self):\n pass", "def __init__(self, measure):\n self.measure = measure # Dictionary of the measurement steps\n self.devices = {} # Dictionary holding all the devices\n self.output_devices = [] # List of devices with output capabilities\n self.daqs = {} # Dictionary that holds for each daq the inputs and outputs.\n self.rotation_stages = [] # If there are rotation stages present, they will show up in this list.\n # This short block is going to become useful in the future, when interfacing with a GUI\n for d in self.measure:\n setattr(self, d, self.measure[d])", "def _plot_metrics(self):\n if len(self._episode_q_means) > 0:\n mean_q = np.asscalar(np.mean(self._episode_q_means))\n self._metrics_writer.write_value('Mean Q per ep.', mean_q, self._num_actions_taken)\n\n if len(self._episode_q_stddev) > 0:\n std_q = np.asscalar(np.mean(self._episode_q_stddev))\n self._metrics_writer.write_value('Mean Std Q per ep.', std_q, self._num_actions_taken)\n\n self._metrics_writer.write_value('Sum rewards per ep.', sum(self._episode_rewards), self._num_actions_taken)", "def measure(self):\n # --- perform repeated runs\n for i_run in range(self.n_runs):\n if self.verbosity > 0:\n print(\"Run {0} / {1} ...\".format(i_run, self.n_runs), end = '')\n tdelta = self._timed_execute()\n self._run_times[i_run] = tdelta\n\t\t\t\n if self.verbosity == 2:\n print(tdelta)\n \n # calculate mean\n self._tmean = np.mean(self._run_times)\n # calculate standard deviation\n self._tstdev = np.std(self._run_times)\n # allow access to results\n self.__hasrun = True", "def compute(self) -> Any:\n # ddp hotfix, could be done better\n # but metric must handle DDP on it's own\n if self._ddp_backend == \"xla\":\n device = get_device()\n for key in self.statistics:\n key_statistics = torch.tensor([self.statistics[key]], device=device)\n key_statistics = xm.all_gather(key_statistics).sum(dim=0).cpu().numpy()\n self.statistics[key] = key_statistics\n elif self._ddp_backend == \"ddp\":\n for key in self.statistics:\n value: List[np.ndarray] = all_gather(self.statistics[key])\n value: np.ndarray = np.sum(np.vstack(value), axis=0)\n self.statistics[key] = value\n\n per_class, micro, macro, weighted = get_aggregated_metrics(\n tp=self.statistics[\"tp\"],\n fp=self.statistics[\"fp\"],\n fn=self.statistics[\"fn\"],\n support=self.statistics[\"support\"],\n zero_division=self.zero_division,\n )\n if self.compute_per_class_metrics:\n return per_class, micro, macro, weighted\n else:\n return [], micro, macro, weighted", "def compute(self) -> Any:\n # ddp hotfix, could be done better\n # but metric must handle DDP on it's own\n if self._ddp_backend == \"xla\":\n device = get_device()\n for key in self.statistics:\n key_statistics = torch.tensor([self.statistics[key]], device=device)\n key_statistics = xm.all_gather(key_statistics).sum(dim=0).cpu().numpy()\n self.statistics[key] = key_statistics\n elif self._ddp_backend == \"ddp\":\n for key in self.statistics:\n value: List[np.ndarray] = all_gather(self.statistics[key])\n value: np.ndarray = np.sum(np.vstack(value), axis=0)\n self.statistics[key] = value\n\n per_class, micro, macro, weighted = get_aggregated_metrics(\n tp=self.statistics[\"tp\"],\n fp=self.statistics[\"fp\"],\n fn=self.statistics[\"fn\"],\n support=self.statistics[\"support\"],\n zero_division=self.zero_division,\n )\n if self.compute_per_class_metrics:\n return per_class, micro, macro, weighted\n else:\n return [], micro, macro, weighted", "def observe(self, env: dm_env.Environment, timestep: dm_env.TimeStep,\n action: np.ndarray) -> None:\n self._accumulate_metrics(env)", "def __call__(self, output, target, *args, **kwargs):\n _, y_pred = output.topk(1, 1, True, True)\n y_pred = y_pred.t().detach().cpu().numpy()[0]\n y_true = target.detach().cpu().numpy()\n self.pfm = self.metric_func(y_true, y_pred)\n return self.pfm", "def _process_measure(self, node):\n id0 = self._process_bit_id(node.children[0])\n id1 = self._process_bit_id(node.children[1])\n if len(id0) != len(id1):\n raise UnrollerError(\"internal error: reg size mismatch\",\n \"line=%s\" % node.line, \"file=%s\" % node.file)\n for idx, idy in zip(id0, id1):\n self.backend.measure(idx, idy)", "def addMeasArgs(parser):\n parser.add_argument(\"infile\", help=\"Input file\", type=Path)\n parser.add_argument(\"-o\", \"--output\", help=\"Output file\", type=Path, dest=\"outfile\")\n parser.add_argument(\"--overwrite\", action=\"store_true\",\n help=\"Overwrite existing output file.\")\n return parser", "def stats(self):", "def getPhysicalSamples(self, **kwargs):\n # initialise chans, startSample and endSample with the whole dataset\n options = self.parseGetDataKeywords(kwargs)\n # get data\n timeData = self.getUnscaledSamples(\n chans=options[\"chans\"],\n startSample=options[\"startSample\"],\n endSample=options[\"endSample\"],\n )\n # Scalars are applied in getUnscaledSamples to convert to mV - this is for ease of calculation and because each data file in the run might have a separate scaling\n # all that is left is to divide by the dipole length in km and remove the average\n for chan in options[\"chans\"]:\n if chan == \"Ex\":\n # multiply by 1000/self.getChanDx same as dividing by dist in km\n timeData[chan] = 1000 * timeData[chan] / self.getChanDx(chan)\n timeData.addComment(\n \"Dividing channel {} by electrode distance {} km to give mV/km\".format(\n chan, self.getChanDx(chan) / 1000.0\n )\n )\n if chan == \"Ey\":\n # multiply by 1000/self.getChanDy same as dividing by dist in km\n timeData[chan] = 1000 * timeData[chan] / self.getChanDy(chan)\n timeData.addComment(\n \"Dividing channel {} by electrode distance {} km to give mV/km\".format(\n chan, self.getChanDy(chan) / 1000.0\n )\n )\n\n # if remove zeros - False by default\n if options[\"remzeros\"]:\n timeData[chan] = removeZerosChan(timeData[chan])\n # if remove nans - False by default\n if options[\"remnans\"]:\n timeData[chan] = removeNansChan(timeData[chan])\n # remove the average from the data - True by default\n if options[\"remaverage\"]:\n timeData[chan] = timeData[chan] - np.average(\n timeData[chan]\n )\n\n # add comments\n timeData.addComment(\n \"Remove zeros: {}, remove nans: {}, remove average: {}\".format(\n options[\"remzeros\"], options[\"remnans\"], options[\"remaverage\"]\n )\n )\n return timeData", "def to_metric(self):\r\n if self.units != 'metric':\r\n self.units = 'metric'\r\n for statement in self.statements:\r\n statement.to_metric()\r\n for tool in iter(self.tools.values()):\r\n tool.to_metric()\r\n for primitive in self.primitives:\r\n primitive.to_metric()\r\n for hit in self.hits:\r\n hit.to_metric()", "def get_measurement_objects(self, pipeline, object_name, category, \n measurement):\n return self.get_threshold_measurement_objects(pipeline, object_name,\n category, measurement)", "def metrics(self):\n \n if self.mse.shape[0]>1:\n raise ValueError('Metrics can only handle single observations.')\n \n if self.N==1:\n pred = float('nan')\n err = float('nan')\n y_true = float('nan')\n else:\n pred = int(self._predictions[-1])\n err = self._mse[-1]\n y_true = int(self.label[0])\n \n is_outlier = {\"type\":\"GAUGE\",\"key\":\"is_outlier\",\"value\":pred}\n mse = {\"type\":\"GAUGE\",\"key\":\"mse\",\"value\":err}\n obs = {\"type\":\"GAUGE\",\"key\":\"observation\",\"value\":self.N - 1}\n threshold = {\"type\":\"GAUGE\",\"key\":\"threshold\",\"value\":self.threshold}\n \n label = {\"type\":\"GAUGE\",\"key\":\"label\",\"value\":y_true}\n \n accuracy_tot = {\"type\":\"GAUGE\",\"key\":\"accuracy_tot\",\"value\":self.metric[4]}\n precision_tot = {\"type\":\"GAUGE\",\"key\":\"precision_tot\",\"value\":self.metric[5]}\n recall_tot = {\"type\":\"GAUGE\",\"key\":\"recall_tot\",\"value\":self.metric[6]}\n f1_score_tot = {\"type\":\"GAUGE\",\"key\":\"f1_tot\",\"value\":self.metric[7]}\n f2_score_tot = {\"type\":\"GAUGE\",\"key\":\"f2_tot\",\"value\":self.metric[8]}\n \n accuracy_roll = {\"type\":\"GAUGE\",\"key\":\"accuracy_roll\",\"value\":self.metric[9]}\n precision_roll = {\"type\":\"GAUGE\",\"key\":\"precision_roll\",\"value\":self.metric[10]}\n recall_roll = {\"type\":\"GAUGE\",\"key\":\"recall_roll\",\"value\":self.metric[11]}\n f1_score_roll = {\"type\":\"GAUGE\",\"key\":\"f1_roll\",\"value\":self.metric[12]}\n f2_score_roll = {\"type\":\"GAUGE\",\"key\":\"f2_roll\",\"value\":self.metric[13]}\n \n true_negative = {\"type\":\"GAUGE\",\"key\":\"true_negative\",\"value\":self.metric[0]}\n false_positive = {\"type\":\"GAUGE\",\"key\":\"false_positive\",\"value\":self.metric[1]}\n false_negative = {\"type\":\"GAUGE\",\"key\":\"false_negative\",\"value\":self.metric[2]}\n true_positive = {\"type\":\"GAUGE\",\"key\":\"true_positive\",\"value\":self.metric[3]}\n \n nb_outliers_roll = {\"type\":\"GAUGE\",\"key\":\"nb_outliers_roll\",\"value\":self.metric[14]}\n nb_labels_roll = {\"type\":\"GAUGE\",\"key\":\"nb_labels_roll\",\"value\":self.metric[15]}\n nb_outliers_tot = {\"type\":\"GAUGE\",\"key\":\"nb_outliers_tot\",\"value\":self.metric[16]}\n nb_labels_tot = {\"type\":\"GAUGE\",\"key\":\"nb_labels_tot\",\"value\":self.metric[17]}\n \n return [is_outlier,mse,obs,threshold,label,\n accuracy_tot,precision_tot,recall_tot,f1_score_tot,f2_score_tot,\n accuracy_roll,precision_roll,recall_roll,f1_score_roll,f2_score_roll,\n true_negative,false_positive,false_negative,true_positive,\n nb_outliers_roll,nb_labels_roll,nb_outliers_tot,nb_labels_tot]", "def compute_metrics(self, metric_ids=None, probes=None):\n url = \"/projects/%s/managedfolders/%s/actions\" % (self.project_key, self.odb_id)\n if metric_ids is not None:\n return self.client._perform_json(\n \"POST\" , \"%s/computeMetricsFromIds\" % url,\n body={\"metricIds\" : metric_ids})\n elif probes is not None:\n return self.client._perform_json(\n \"POST\" , \"%s/computeMetrics\" % url,\n body=probes)\n else:\n return self.client._perform_json(\n \"POST\" , \"%s/computeMetrics\" % url)", "def measurements(self):\n # TODO: add in empty measurements for assays that have none?\n return self._measure_queryset", "def advancedStats():", "def measure(self, lastMeasure=None, m=None):\n if m is None:\n m = {}\n m['_time'] = time.time()\n if lastMeasure is not None:\n m['_stepDuration'] = time.time() - lastMeasure['_time']\n else:\n m['_stepDuration'] = time.time() - self._start_t\n self._msr(m)\n return m", "def runMeasurement(self):\n triggerI1ProMeasurement()\n self.spectrum.append(getI1ProSpectrum())\n self.tristimulus.append(getI1ProTriStimulus())", "def dataStats(self):\n print (\"Performing statistical analysis of the data\")\n # stuff to do", "def get_measured_outputs_values(self):\n obsOut = numpy.zeros(self.get_num_measured_outputs())\n i = 0\n for o in self.outputs:\n if o.is_measured_output():\n obsOut[i] = o.read_value_in_fmu(self.fmu)\n i += 1\n return obsOut", "def raw_measure(self) -> List[int]:\n # name, command, signals, delay\n return self._run_profile((\"raw_measure\", [0x20, 0x50], 2, 0.025))", "def measures(self):\n return self._measures", "def run(self, d):\n\n feeds,feedidx,_ = self.getFeeds(d,'all')\n\n tod_shape = d[f'{self.level2}/averaged_tod'].shape\n \n scanedges = d[f'{self.level2}/Statistics/scan_edges'][...]\n nfeeds = 20\n nchannels = 8\n \n self.all_tod = np.zeros((20, nchannels, tod_shape[-1])) \n self.all_weights = np.zeros((20, nchannels, tod_shape[-1])) \n self.all_frequency = np.zeros((nchannels)) \n self.all_auto = np.zeros((20,nchannels)) \n self.all_mask = np.zeros((20,tod_shape[-1]))\n self.all_cal_factors = np.zeros((20,4,64))\n # Read in data from each feed\n for ifeed,feed in enumerate(tqdm(feeds,desc='Looping over feeds')):\n if feeds[ifeed] == 20:\n continue\n feed_tod,feed_weights,mask = self.clean_tod(d,ifeed,feed)\n\n if self.astro_cal:\n feed_tod,feed_weights,cal_factors = self.calibrate_tod(d,feed_tod,feed_weights,ifeed,feed)\n else:\n cal_factors = 1\n\n self.all_tod[feed-1],self.all_weights[feed-1], self.all_auto[feed-1], self.all_frequency = self.average_tod(d,feed_tod,feed_weights,mask) \n self.all_mask[feed-1] = mask\n self.all_cal_factors[feed-1] = cal_factors", "def m2m_changed_metrics(sender, **kwargs):\r\n if 'action' not in kwargs:\r\n return\r\n\r\n action = {\r\n 'post_add': 'm2m.added',\r\n 'post_remove': 'm2m.removed',\r\n 'post_clear': 'm2m.cleared',\r\n }.get(kwargs['action'])\r\n\r\n if not action:\r\n return\r\n\r\n tags = _database_tags(action, sender, kwargs)\r\n\r\n if 'model' in kwargs:\r\n tags.append('target_class:{}'.format(kwargs['model'].__name__))\r\n\r\n pk_set = kwargs.get('pk_set', []) or []\r\n\r\n dog_stats_api.increment(\r\n 'edxapp.db.model',\r\n value=len(pk_set),\r\n tags=tags\r\n )", "def measure(mode, x, y, x0, x1, thresh = 0):\n xt = x.view(numpy.ndarray) # strip Metaarray stuff -much faster!\n v = y.view(numpy.ndarray)\n \n xm = ma.masked_outside(xt, x0, x1).T\n ym = ma.array(v, mask = ma.getmask(xm))\n if mode == 'mean':\n r1 = ma.mean(ym)\n r2 = ma.std(ym)\n if mode == 'max' or mode == 'maximum':\n r1 = ma.max(ym)\n r2 = xm[ma.argmax(ym)]\n if mode == 'min' or mode == 'minimum':\n r1 = ma.min(ym)\n r2 = xm[ma.argmin(ym)]\n if mode == 'median':\n r1 = ma.median(ym)\n r2 = 0\n if mode == 'p2p': # peak to peak\n r1 = ma.ptp(ym)\n r2 = 0\n if mode == 'std': # standard deviation\n r1 = ma.std(ym)\n r2 = 0\n if mode == 'var': # variance\n r1 = ma.var(ym)\n r2 = 0\n if mode == 'cumsum': # cumulative sum\n r1 = ma.cumsum(ym) # Note: returns an array\n r2 = 0\n if mode == 'anom': # anomalies = difference from averge\n r1 = ma.anom(ym) # returns an array\n r2 = 0\n if mode == 'sum':\n r1 = ma.sum(ym)\n r2 = 0\n if mode == 'area' or mode == 'charge':\n r1 = ma.sum(ym)/(ma.max(xm)-ma.min(xm))\n r2 = 0\n if mode == 'latency': # return first point that is > threshold\n sm = ma.nonzero(ym > thresh)\n r1 = -1 # use this to indicate no event detected\n r2 = 0\n if ma.count(sm) > 0:\n r1 = sm[0][0]\n r2 = len(sm[0])\n if mode == 'count':\n r1 = ma.count(ym)\n r2 = 0\n if mode == 'maxslope':\n return(0,0)\n slope = numpy.array([])\n win = ma.flatnotmasked_contiguous(ym)\n st = int(len(win)/20) # look over small ranges\n for k in win: # move through the slope measurementwindow\n tb = range(k-st, k+st) # get tb array\n newa = numpy.array(self.dat[i][j, thisaxis, tb])\n ppars = numpy.polyfit(x[tb], ym[tb], 1) # do a linear fit - smooths the slope measures\n slope = numpy.append(slope, ppars[0]) # keep track of max slope\n r1 = numpy.amax(slope)\n r2 = numpy.argmax(slope)\n return(r1, r2)", "def _write_measurements(summary_writer, labels_and_values, step):\n\n # Write TF Summaries Measurements.\n with summary_writer.as_default():\n for (label, value) in labels_and_values:\n tf.summary.scalar(label, value, step=step)", "def run(self):\n self.run_measurement()\n self.run_analysis()\n if self.get_param_value('update'):\n self.run_update()", "def trigger_measurement_with_mass_flow_t_comp_and_averaging(self):\n return self.execute(SdpI2cCmdTriggerMeasurementWithMassFlowTCompAndAveraging())", "def measure(mode, x, y, x0, x1):\n xm = ma.masked_outside(x, x0, x1)\n ym = ma.array(y, mask = ma.getmask(xm))\n if mode == 'mean':\n r1 = ma.mean(ym)\n r2 = ma.std(ym)\n if mode == 'max':\n r1 = ma.max(ym)\n r2 = 0\n if mode == 'min':\n r1 = ma.min(ym)\n r2 = 0\n if mode == 'median':\n r1 = ma.median(ym)\n r2 = 0\n if mode == 'p2p': # peak to peak\n r1 = ma.ptp(ym)\n r2 = 0\n return(r1, r2)", "def MeasureMultipleDigitalEdges(self, channel1, channel2, edgeType1, edgeType2, points1, points2, timeout=0.1,\n **kwargs):\n self.H.__sendByte__(CP.TIMING)\n self.H.__sendByte__(CP.TIMING_MEASUREMENTS)\n timeout_msb = int((timeout * 64e6)) >> 16\n # print ('timeout',timeout_msb)\n self.H.__sendInt__(timeout_msb)\n self.H.__sendByte__(self.__calcDChan__(channel1) | (self.__calcDChan__(channel2) << 4))\n params = 0\n if edgeType1 == 'rising':\n params |= 3\n elif edgeType1 == 'falling':\n params |= 2\n else:\n params |= 4\n\n if edgeType2 == 'rising':\n params |= 3 << 3\n elif edgeType2 == 'falling':\n params |= 2 << 3\n else:\n params |= 4 << 3\n\n if ('SQR1' in kwargs): # User wants to toggle SQ1 before starting the timer\n params |= (1 << 6)\n if kwargs['SQR1'] == 'HIGH': params |= (1 << 7)\n self.H.__sendByte__(params)\n if points1 > 4: points1 = 4\n if points2 > 4: points2 = 4\n self.H.__sendByte__(points1 | (points2 << 4)) # Number of points to fetch from either channel\n\n self.H.waitForData(timeout)\n\n A = np.array([self.H.__getLong__() for a in range(points1)])\n B = np.array([self.H.__getLong__() for a in range(points2)])\n tmt = self.H.__getInt__()\n self.H.__get_ack__()\n # print(A,B)\n if (tmt >= timeout_msb): return None, None\n rtime = lambda t: t / 64e6\n if (kwargs.get('zero', True)): # User wants set a reference timestamp\n return rtime(A - A[0]), rtime(B - A[0])\n else:\n return rtime(A), rtime(B)", "def compute_statistics(self):", "def CreateMeasuringContext(*args):\n return _gdi_.GraphicsContext_CreateMeasuringContext(*args)", "def compute_metrics(self):\n self.finalize_output_dict()\n self.metric_dict = {\n key: value(self.output_dict[\"labels\"], self.output_dict[\"pred_probs\"])\n for key, value in self.metric_fns.items()\n }", "def testMethodProfile2D(self):\n\n toolBar = self.plot.getProfileToolbar()\n\n toolBar.vLineAction.trigger()\n plot2D = self.plot.getPlotWidget().getWidgetHandle()\n pos1 = plot2D.width() * 0.5, plot2D.height() * 0.5\n self.mouseClick(plot2D, qt.Qt.LeftButton, pos=pos1)\n\n manager = toolBar.getProfileManager()\n roi = manager.getCurrentRoi()\n roi.setProfileMethod(\"mean\")\n roi.setProfileType(\"2D\")\n roi.setProfileLineWidth(3)\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n # check 2D 'mean' profile\n profilePlot = roi.getProfileWindow().getCurrentPlotWidget()\n data = profilePlot.getAllImages()[0].getData()\n expected = numpy.array([[1, 4], [7, 10], [13, 16]])\n numpy.testing.assert_almost_equal(data, expected)", "def performance(self, id):", "def _get_measurements_with_derived_metrics(self, measurements):\n\n now = time.time()\n\n def metrics_available(*names):\n return all(name in self._event_names and name in measurements\n and name in self._prev_measurements for name in names)\n\n def delta(*names):\n return [measurements[name] - self._prev_measurements[name] for name in names]\n\n # if specific pairs are available calculate derived metrics\n if self._prev_measurements is not None:\n time_delta = now - self._prev_ts\n\n if metrics_available(MetricName.INSTRUCTIONS, MetricName.CYCLES):\n inst_delta, cycles_delta = delta(MetricName.INSTRUCTIONS,\n MetricName.CYCLES)\n if cycles_delta > 0:\n measurements[DerivedMetricName.IPC] = float(inst_delta) / cycles_delta\n\n if time_delta > 0:\n measurements[DerivedMetricName.IPS] = float(inst_delta) / time_delta\n\n if metrics_available(MetricName.INSTRUCTIONS, MetricName.CACHE_MISSES):\n inst_delta, cache_misses_delta = delta(MetricName.INSTRUCTIONS,\n MetricName.CACHE_MISSES)\n if inst_delta > 0:\n measurements[DerivedMetricName.CACHE_MISSES_PER_KILO_INSTRUCTIONS] = \\\n float(cache_misses_delta) * 1000 / inst_delta\n\n if metrics_available(MetricName.CACHE_REFERENCES, MetricName.CACHE_MISSES):\n cache_ref_delta, cache_misses_delta = delta(MetricName.CACHE_REFERENCES,\n MetricName.CACHE_MISSES)\n if cache_ref_delta > 0:\n cache_hits_count = cache_ref_delta - cache_misses_delta\n measurements[DerivedMetricName.CACHE_HIT_RATIO] = (\n float(cache_hits_count) / cache_ref_delta)\n\n self._prev_measurements = measurements\n self._prev_ts = now\n\n return measurements", "def _evaluate_actions(self, *args, **kwargs):\n # DistributedDataParallel moves all tensors to the device (or devices)\n # So we need to make anything that is on the CPU into a numpy array\n # This is needed for older versions of pytorch that haven't deprecated\n # the single-process multi-device version of DDP\n return self._evaluate_actions_wrapper.ddp(\n *_cpu_to_numpy(args), **_cpu_to_numpy(kwargs)\n )", "def measure_dict():\n out = base_dict()\n out['mro']['current'] = ['Measure']\n out['name']['current'] = 'Measure'\n ao(out, 'nSamples', 'Integer', 1, readLevel=3)\n ao(out, 'id', 'String', 'Conversion source ID', readLevel=3)\n ao(out, 'uid', 'String', 'Unique ID', readLevel=5)\n ao(out, 'date', 'Date', '00:00:00 01/01/2000', name='Test date')\n ao(out, 'zerotime', 'Float', name='Acquisition starting time', readLevel=4)\n ao(out, 'elapsed', 'Float', name='Test duration', unit='second')\n ao(out, 'operator', 'String', name='Operator')\n return out", "def list_metrics(self):\n pass", "def measurement():\n try:\n meas_id = request.args.get('id', type=int)\n if meas_id is None:\n raise Exception(f'no valid id for pv module')\n meas = db.session.query(Measurement).get(meas_id)\n meas_values = db.session.query(MeasurementValues).filter(MeasurementValues.measurement_id == meas_id).all()\n print(meas_values)\n if meas is None:\n raise Exception(f'no measurement with id {meas_id} exists')\n return render_template('measurement/measurement.html', measurement=meas, measurement_values=meas_values)\n except Exception as e:\n flash(str(e), category='danger')\n return redirect('measurements')", "def metrics_group():", "def read_metrics(self):\n raise NotImplementedError()", "def _fill_results(self,spec,measurements,period,duration):\r\n logging.info(\"Fill measurements for spec {0}\".format(spec))\r\n \r\n if self._verb==mplane.model.VERB_QUERY:\r\n \"\"\"\r\n Query according to the time specified in the specification\r\n \"\"\"\r\n (first_time,last_time) = spec.when().datetimes()\r\n first_time=int(first_time.replace(tzinfo=datetime.timezone.utc).timestamp())\r\n last_time=int(last_time.replace(tzinfo=datetime.timezone.utc).timestamp())\r\n sleep_time = 0\r\n else:\r\n \"\"\"\r\n Query from NOW\r\n \"\"\"\r\n first_time = int(time.time())\r\n if (len(measurements[1])>0 or len(measurements[2])>0) and period<=self._pvsr_default_conf_check_cycle:\r\n #there are newly created or modified measurements\r\n first_time = first_time + self._pvsr_default_conf_check_cycle\r\n if first_time % period > 0:\r\n first_time = first_time - (first_time % period)\r\n last_time = first_time + int(duration / period) * period\r\n sleep_time = duration\r\n\r\n logging.debug(\"From: {0}, To: {1}\".format(datetime.datetime.fromtimestamp(first_time),datetime.datetime.fromtimestamp(last_time)))\r\n \r\n meas_data = {}\r\n\r\n while True:\r\n logging.info(\"Wait {0} seconds\".format(sleep_time))\r\n time.sleep(sleep_time)\r\n sleep_time = 30\r\n \r\n loaded_until=self._pvsr.getLastLoadedDataTimestamp(period)\r\n if int(loaded_until.timestamp())>=last_time or time.time()>last_time+period+300:\r\n for i in (0,1,2):\r\n for j in range(len(measurements[i])):\r\n self._fill_meas_result(measurements[i][j],first_time,last_time,meas_data)\r\n break\r\n else:\r\n logging.debug(\"last loaded is still {0}\".format(loaded_until))\r\n \r\n res = mplane.model.Result(specification=spec)\r\n res.set_when(mplane.model.When(a = datetime.datetime.utcfromtimestamp(first_time+period), b = datetime.datetime.utcfromtimestamp(last_time)))\r\n \r\n tmp_time=first_time+period\r\n row_index=0\r\n while tmp_time<=last_time:\r\n tmp_time2 = datetime.datetime.fromtimestamp(tmp_time)\r\n tmp_time3 = datetime.datetime.utcfromtimestamp(tmp_time)\r\n res.set_result_value(\"time\", tmp_time3, row_index)\r\n if tmp_time2 in meas_data:\r\n for mplane_name in meas_data[tmp_time2]:\r\n value = str(meas_data[tmp_time2][mplane_name])\r\n res.set_result_value(mplane_name, value, row_index)\r\n row_index+=1\r\n tmp_time+=period\r\n \r\n return res", "def compute_analysis(self):\r\n def get_mean(self):\r\n \"\"\"\r\n Compute mean in all sensors\r\n \"\"\"\r\n for i in range(1,len(self.data[0])):\r\n self.prom.append(np.mean(self.data[:,i])) \r\n\r\n \r\n def get_stddev(self):\r\n \"\"\"\r\n Compute mean in all sensors\r\n \"\"\"\r\n for i in range(1,len(self.data[0])):\r\n self.stddev.append(np.std(self.data[:,i])) \r\n \r\n # Get the values\r\n get_mean(self)\r\n get_stddev(self)\r\n \r\n # Check condition\r\n [(self.out_of_3stddev.append(i)) \r\n for (i) in (self.data[:,0:4]) \r\n if (any(\r\n (i[1:4] > 3*np.array(self.stddev)+np.array(self.prom))|\r\n (i[1:4] < -3*np.array(self.stddev)+np.array(self.prom))\r\n ))]", "def getMeasures(unique_name=None):", "def computeBasicStatistics(self, targetLabels, actualLabels):\r\n self.basicMeasures = reduce(self._cbe, map(lambda x,y:(x,y), targetLabels,\r\n actualLabels), (0,0,0,0))\r\n return self.basicMeasures", "def updatefig(*args):\n p1.set_array(turn(grid))\n p2.set_data(tally['time'], tally['sickos'])\n p3.set_data(tally['time'], tally['immune'])\n p4.set_data(tally['time'], tally['dead'])\n ax2.set_xlim(0, max(tally['time']))\n # ax2.set_ylim(0, max(max(sickos), max(immune)))\n # End sim if the disease is gone\n if tally['sickos'][-1] == 0:\n ani.event_source.stop()\n end_time = time.process_time()\n show_summary()\n print(\"Process time:\", end_time - start_time)\n return p1, p2, p3, p4,", "def update(self, labels, preds):\n labels, preds = check_label_shapes(labels, preds, True)\n\n for label, pred in zip(labels, preds):\n self.metrics.update_binary_stats(label, pred)\n\n if self.average == \"macro\":\n self.sum_metric += self.metrics.fscore\n self.num_inst += 1\n self.metrics.reset_stats()\n else:\n self.sum_metric = self.metrics.fscore * self.metrics.total_examples\n self.num_inst = self.metrics.total_examples", "def process_meter_message(self, d):\n dpid = int(d.get(\"dpid\", 0))\n dp = self.dpset.get(dpid)\n if not dp:\n return \"Datapath does not exist!\"\n\n ofproto = dp.ofproto\n parser = dp.ofproto_parser\n\n command = {\n 'add': ofproto.OFPMC_ADD,\n 'mod': ofproto.OFPMC_MODIFY,\n 'del': ofproto.OFPMC_DELETE,\n }\n cmd = command.get(d[\"operation\"], ofproto.OFPMC_ADD)\n\n meter_id = d[\"meter_id\"]\n\n flags = 0\n bands = []\n if \"flags\" in d: # Ryu's format\n print(d['flags'])\n for f in d['flags']:\n flags += 0x01 if f == 'KBPS' else 0\n flags += 0x02 if f == 'PKTPS' else 0\n flags += 0x04 if f == 'BURST' else 0\n flags += 0x08 if f == 'STATS' else 0\n\n for band in d[\"bands\"]:\n if band['type'] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band['rate'],\n burst_size=band['burst_size'])]\n elif band['type'] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band['rate'],\n burst_size=band['burst_size'], prec_level=band['prec_level'])]\n\n else: # FlowManager's format\n flags += 0x01 if d['OFPMF_KBPS'] else 0\n flags += 0x02 if d['OFPMF_PKTPS'] else 0\n flags += 0x04 if d['OFPMF_BURST'] else 0\n flags += 0x08 if d['OFPMF_STATS'] else 0\n\n # Flags must have KBPS or PKTPS\n flags = flags if (flags & 0x03) else (flags | 0x01)\n\n for band in d[\"bands\"]:\n #mtype = type_convert.get(band[0])\n if band[0] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band[1],\n burst_size=band[2])]\n elif band[0] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band[1],\n burst_size=band[2], prec_level=band[3])]\n\n # TODO: catch some errors\n meter_mod = parser.OFPMeterMod(dp, cmd, flags, meter_id, bands)\n try:\n dp.send_msg(meter_mod)\n except KeyError as e:\n return e.__repr__()\n except Exception as e:\n return e.__repr__()\n\n return \"Message sent successfully.\"", "def _measure():\n return {\n 'type' : 'class',\n 'name' : 'measure',\n 'base' : None,\n 'is_abstract' : False,\n 'doc' : None,\n 'properties' : [\n ('description', 'str', '0.1', None),\n ('identification', 'str', '0.1', None),\n ('name', 'str', '0.1', None),\n ],\n 'decodings' : [\n ('description', 'child::cim:measureDescription'),\n ('identification', 'child::cim:measureIdentification/gmd:code/gco:CharacterString'),\n ('name', 'child::cim:nameOfMeasure'),\n\n # Hacks due to DKRZ misimplementation.\n ('description', 'parent::cim:report/child::gmd:measureDescription/gco:CharacterString'),\n ('name', 'parent::cim:report/child::gmd:nameOfMeasure/gco:CharacterString'),\n ]\n }", "def measure(self, recommender):", "def calc_psd(self):\n psd2d = np.array(self.calc_psd2d())\n\n print(\"Azimuthally averaging 2D power spectral density ... \",\n end=\"\", flush=True)\n dim = self.shape[0]\n dim_half = (dim+1) // 2\n # NOTE:\n # The zero-frequency component is shifted to position of index\n # (0-based): (ceil((n-1) / 2), ceil((m-1) / 2))\n px = np.arange(dim_half-dim, dim_half)\n x, y = np.meshgrid(px, px)\n rho = np.sqrt(x**2 + y**2)\n\n radii = self.radii\n nr = len(radii)\n if nr > 100:\n print(\"\\n ... %d data points, may take a while ... \" % nr,\n end=\"\", flush=True)\n else:\n print(\" %d data points ... \" % nr, end=\"\", flush=True)\n psd1d = np.zeros(shape=(nr, 4))\n psd1d[:, 0] = self.frequencies\n\n for i, r in enumerate(radii):\n if (i+1) % 100 == 0:\n percent = 100 * (i+1) / nr\n print(\"%.1f%% ... \" % percent, end=\"\", flush=True)\n ii, jj = (rho <= r).nonzero()\n rho[ii, jj] = np.inf\n cells = psd2d[ii, jj]\n psd1d[i, 3] = len(cells)\n if self.meanstd:\n psd1d[i, 1] = np.mean(cells)\n psd1d[i, 2] = np.std(cells)\n else:\n median = np.median(cells)\n mad = np.median(np.abs(cells - median))\n psd1d[i, 1] = median\n psd1d[i, 2] = mad * 1.4826\n print(\"DONE\", flush=True)\n\n self.psd1d = psd1d\n return psd1d", "def updateMeter(self, name1, name2, op):\r\n mini = 0\r\n maxi = 100\r\n pos = (self.var.get() - mini) / (maxi - mini)\r\n self.updateMeterLine(pos * 0.6 + 0.2)", "def measurements(self):\n return self._measurements", "def iaq_measure(self) -> List[int]:\n # name, command, signals, delay\n return self._run_profile((\"iaq_measure\", [0x20, 0x08], 2, 0.05))", "def measure(x, y):\n return dotc_gpu(x, y)", "def update(self, phase, targets, outputs):\n iou, dice, dice_neg, dice_pos, _, _ = self.metric(outputs, targets)\n self.base_dice_scores[phase].append(dice)\n self.dice_pos_scores[phase].append(dice_pos)\n self.dice_neg_scores[phase].append(dice_neg)\n self.iou_scores[phase].append(iou)", "def getMeasurements(self):\n return self._Measurements", "def computePRMeasures(self, targetLabels, actualLabels):\r\n if self.basicMeasures is None:\r\n self.basicMeasures = self.computeBasicStatistics(targetLabels, actualLabels)\r\n if self.basicMeasures[0] == 0:\r\n self.prMeasures = (0,0)\r\n else:\r\n self.prMeasures = ((0.0 + self.basicMeasures[0]) / (self.basicMeasures[0] + self.basicMeasures[1]),\r\n (0.0 + self.basicMeasures[0]) / (self.basicMeasures[0] + self.basicMeasures[3]))\r\n return self.prMeasures", "def test_get_measure_parameters_by_id(self):\n pass", "def calculate_dataset_metrics(self):\n pass", "def add_meas(self, ros_meas, output=False):\n msg_id = self._get_meas_identifier(ros_meas)\n # Main filter fuses all measurements\n if self.is_main_filter:\n pass\n elif ros_meas.src_asset != self.my_name:\n pass\n elif self._is_shareable(ros_meas.src_asset, ros_meas.meas_type): \n pass\n elif msg_id in self.meas_types_received:\n return\n else: # Don't fuse (e.g. depth, sonar_z)\n return -1\n self.meas_types_received.append(msg_id)\n\n ledger_ind = self._get_meas_ledger_index( ros_meas.stamp )\n\n # Check for Event-Triggering\n if self._is_shareable(ros_meas.src_asset, ros_meas.meas_type):\n if \"implicit\" not in ros_meas.meas_type:\n src_id = self.asset2id[ros_meas.src_asset]\n measured_id = self.asset2id[ros_meas.measured_asset]\n ros_meas.et_delta = self._get_meas_et_delta(ros_meas.meas_type)\n meas = get_internal_meas_from_ros_meas(ros_meas, src_id, measured_id)\n\n implicit, innovation = self.filter.check_implicit(meas)\n if implicit:\n\n ros_meas.meas_type += \"_implicit\"\n\n # print(\"@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ IMPLICIT @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\")\n # print(ros_meas)\n # print(vars(meas))\n # print(self.filter.x_hat)\n else:\n self.explicit_count += 1\n if output:\n expected = meas.data - innovation\n meas_id = self._get_meas_identifier(ros_meas)\n last_update_time = self.ledger[len(self.ledger)-1][\"time\"]\n # print(\"Explicit {} {} : expected: {}, got: {}\".format(last_update_time.to_sec(), meas_id, expected, meas.data))\n # print(self.meas_types_received)\n # print(self.filter.x_hat.T)\n # print(\"Explicit #{} {} : {}\".format(self.explicit_count, self.delta_multiplier, ros_meas.meas_type))\n # print(ros_meas)\n # print(vars(meas))\n # print(self.filter.x_hat)\n\n\n \n # Append to the ledger\n self.ledger[ ledger_ind ][\"meas\"].append( ros_meas )\n return ledger_ind", "def trigger_measurement_with_diff_pressure_t_comp_and_averaging(self):\n return self.execute(SdpI2cCmdTriggerMeasurementWithDiffPressureTComp())", "def GraphicsContext_CreateMeasuringContext(*args):\n return _gdi_.GraphicsContext_CreateMeasuringContext(*args)", "def test_add_two_sources(self):\n metric = self.metric()\n measurement = self.measurement(\n metric,\n sources=[self.source(metric, value=\"10\"), self.source(metric, value=\"20\")],\n )\n self.assertEqual(\"30\", measurement[\"count\"][\"value\"])", "def get_measurements_by_time(self):\n data_path = os.path.abspath(\n os.path.join(\n os.path.dirname(os.path.realpath(__file__)),\n \"..\",\n \"data/NVB_rescale_dataset.p\",\n )\n )\n self.log_print([\"Getting experimental data from {}\".format(data_path)])\n self.measurements = pickle.load(open(data_path, \"rb\"))\n return self.measurements", "def measures(self) -> Tuple[Union[ReportMeasures, str], ...]:\n return self.__measures" ]
[ "0.5662687", "0.55383396", "0.55314547", "0.5518812", "0.5434003", "0.5369537", "0.53463304", "0.52960706", "0.52332073", "0.5206542", "0.5196402", "0.51885736", "0.511476", "0.5113631", "0.5077229", "0.50592333", "0.50523233", "0.502988", "0.5012061", "0.5006214", "0.49517575", "0.49413022", "0.49155375", "0.4913395", "0.48931843", "0.4863291", "0.48569956", "0.48539814", "0.48399273", "0.4834789", "0.48333988", "0.48004332", "0.47934532", "0.47800586", "0.4766804", "0.47651473", "0.47651473", "0.47448257", "0.4732587", "0.4729131", "0.4727091", "0.47259647", "0.47257107", "0.47236335", "0.47193697", "0.47183764", "0.47176751", "0.47155684", "0.47124255", "0.47107166", "0.46921325", "0.46806234", "0.4680236", "0.4671654", "0.46583372", "0.46577626", "0.46567297", "0.46538457", "0.46462414", "0.46434677", "0.46328658", "0.46282452", "0.4621489", "0.46212277", "0.46125236", "0.4610449", "0.46092424", "0.46075568", "0.46035498", "0.45974514", "0.45912126", "0.45900428", "0.45874253", "0.45841077", "0.45803326", "0.4578691", "0.4577833", "0.4575053", "0.45620576", "0.45616537", "0.45608526", "0.454738", "0.45468542", "0.4541515", "0.45364973", "0.45284235", "0.45273423", "0.45240712", "0.45195365", "0.45170772", "0.45168114", "0.45148408", "0.45115885", "0.45096728", "0.45012897", "0.45010716", "0.45004857", "0.44837415", "0.44819823", "0.44815332" ]
0.542207
5
Actions for Pd2dMeas objects.
def action_pd2d_proc(obj: Pd2dProc, thread: QtCore.QThread): w_actions = [] qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Plot gamma-nu") def func_plot_gn(obj): fig, ax = obj.plot_gamma_nu() fig.show() return (fig, ax) qtb_1.clicked.connect(lambda: func_plot_gn(obj)) w_actions.append(qtb_1) qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Plot 2theta-phi") def func_plot_tp(obj): fig, ax = obj.plot_ttheta_phi() fig.show() return (fig, ax) qtb_1.clicked.connect(lambda: func_plot_tp(obj)) w_actions.append(qtb_1) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self,measurements,actions):\n raise NotImplementedError", "def getMeasures():", "def _add_or_update_measurement(self,eq,meas_type,mplane_param2value,period):\r\n meas = self._pvsr.create_pvsr_object(\"Measurement\")\r\n meas.ParentId = eq.Id\r\n meas.Type = meas_type\r\n if \"index_mplane_name\" in self._meas:\r\n if self._meas[\"index_mplane_name\"] not in mplane_param2value:\r\n raise ValueError(\"Missing {0} value\".format(self._meas[\"index_mplane_name\"]))\r\n meas.Index = mplane_param2value[self._meas[\"index_mplane_name\"]]\r\n else:\r\n meas.DescriptionToShow = self._meas[\"name\"] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n \r\n measA = self._pvsr.listMeasurements(meas)\r\n if len(measA) == 0:\r\n if \"index_mplane_name\" not in self._meas:\r\n meas.Index = self._meas[\"name\"]\r\n measA = self._pvsr.listMeasurements(meas)\r\n \r\n add2 = None\r\n \r\n if len(measA) == 0:\r\n #add\r\n if self._verb==mplane.model.VERB_QUERY:\r\n if \"index_mplane_name\" in self._meas:\r\n raise ValueError(\"The measurement does not exists: Index={0}\".format(meas.Index))\r\n else:\r\n raise ValueError(\"The measurement does not exists: Name={0}\".format(meas.DescriptionToShow))\r\n \r\n if \"index_mplane_name\" in self._meas:\r\n if eq.CollectorType == 'c':\r\n meas.DescriptionToShow = mplane_param2value[self._meas[\"index_mplane_name\"]] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n else:\r\n meas.DescriptionToShow = self._meas[\"name\"] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n \r\n if \"uda_constants\" in self._meas:\r\n for uda,value in self._meas[\"uda_constants\"].items():\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = value\r\n meas.Parameter.append(param)\r\n\r\n for mplane_param,uda in self._mplane2uda.items():\r\n if mplane_param in mplane_param2value and mplane_param2value[mplane_param] != \"\":\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = mplane_param2value[mplane_param]\r\n meas.Parameter.append(param)\r\n elif self._uda_name2uda[uda].Required == \"Yes\":\r\n raise ValueError(\"Missing required parameter: {0}\".format(mplane_param))\r\n \r\n logging.info(\"Creating measurement, eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n \r\n meas.Switched = \"No\"\r\n meas.RetainRawData = 365\r\n meas.IntervalInSec = period\r\n \r\n add2 = 1\r\n meas = self._pvsr.addMeasurement(meas)\r\n else:\r\n #update\r\n meas = measA[0]\r\n logging.info(\"Measurement already exists: eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n \r\n need_mod = False\r\n meas_param_name2value = {}\r\n if hasattr(meas,\"Parameter\"):\r\n for i in range(len(meas.Parameter)):\r\n meas_param_name2value[meas.Parameter[i].Name]=meas.Parameter[i].Value\r\n\r\n if \"check_udas\" in self._meas and self._meas[\"check_udas\"] == False:\r\n pass\r\n else:\r\n for mplane_param,uda in self._mplane2uda.items():\r\n if mplane_param in mplane_param2value and mplane_param2value[mplane_param] != \"\":\r\n if uda not in meas_param_name2value or meas_param_name2value[uda] != mplane_param2value[mplane_param]:\r\n if uda not in meas_param_name2value:\r\n logging.warn(\"Parameter mismatch: {0}: NULL != {1}\".format(uda,mplane_param2value[mplane_param]))\r\n else:\r\n logging.warn(\"Parameter mismatch: {0}: {1} != {2}\".format(uda,meas_param_name2value[uda],mplane_param2value[mplane_param]))\r\n index2remove=None\r\n for i in range(len(meas.Parameter)):\r\n if meas.Parameter[i].Name == uda:\r\n index2remove = i\r\n break\r\n del meas.Parameter[index2remove]\r\n need_mod = True\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = mplane_param2value[mplane_param]\r\n meas.Parameter.append(param)\r\n else:\r\n if uda in meas_param_name2value:\r\n index2remove=None\r\n for i in range(len(meas.Parameter)):\r\n if meas.Parameter[i].Name == uda:\r\n index2remove = i\r\n break\r\n if index2remove is not None:\r\n logging.warn(\"Parameter mismatch: {0}: {1} != NULL\".format(uda,meas_param_name2value[uda]))\r\n need_mod = True\r\n del meas.Parameter[index2remove]\r\n \r\n if meas.IntervalInSec != period:\r\n need_mod = True\r\n meas.IntervalInSec = period\r\n logging.warn(\"Parameter mismatch: IntervalInSec: {0} != {1}\".format(meas.IntervalInSec,period))\r\n \r\n if need_mod:\r\n if self._verb==mplane.model.VERB_QUERY:\r\n raise ValueError(\"The measurement parameters do not match: Name={0}\".format(meas.DescriptionToShow))\r\n \r\n logging.warn(\"Modifying measurement: eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n meas = self._pvsr.modMeasurement(meas)\r\n add2 = 2\r\n else:\r\n add2 = 0\r\n \r\n return (meas,add2)", "def _fill_meas_result(self,meas,from_time,to_time,meas_data):\r\n input=self._pvsr.create_pvsr_object(\"GetMeasuredValuesInput\")\r\n input.ObjType = \"Measurement\"\r\n input.ObjId = meas.Id\r\n input.From = datetime.datetime.fromtimestamp(from_time)\r\n input.To = datetime.datetime.fromtimestamp(to_time)\r\n logging.info(\"Get values, eq: {0}, type: {1}, index: {2}, name: {3}, {4} -> {5}\".format(self._meas[\"equipment\"],meas.Type,meas.Index,meas.DescriptionToShow,input.From,input.To))\r\n meas_res=self._pvsr.getMeasuredValues(input)\r\n \r\n index2mplane_name={}\r\n multiply = None\r\n if \"first\" in self._meas[\"types\"][meas.Type]:\r\n index2mplane_name[0]=self._meas[\"types\"][meas.Type][\"first\"]\r\n if \"second\" in self._meas[\"types\"][meas.Type]:\r\n index2mplane_name[1]=self._meas[\"types\"][meas.Type][\"second\"]\r\n if \"multiply\" in self._meas[\"types\"][meas.Type]:\r\n multiply=int(self._meas[\"types\"][meas.Type][\"multiply\"])\r\n\r\n if hasattr(meas_res,\"D\"):\r\n for d in meas_res.D:\r\n if d.T not in meas_data:\r\n meas_data[d.T]={}\r\n for index,mplane_name in index2mplane_name.items():\r\n if index < len(d.V):\r\n if multiply is not None:\r\n d.V[index]*=multiply\r\n meas_data[d.T][mplane_name]=d.V[index]\r\n else:\r\n meas_data[d.T][mplane_name]=None", "def measure(self):\n pass", "def action_pd2d_meas(obj: Pd2dMeas, thread: QtCore.QThread):\n w_actions = []\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Plot gamma-nu\")\n\n def func_plot_gn(obj):\n fig, ax = obj.plot_gamma_nu()\n fig.show()\n return (fig, ax)\n\n qtb_1.clicked.connect(lambda: func_plot_gn(obj))\n w_actions.append(qtb_1)\n\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Plot 2theta-phi\")\n\n def func_plot_tp(obj):\n fig, ax = obj.plot_ttheta_phi()\n fig.show()\n return (fig, ax)\n\n qtb_1.clicked.connect(lambda: func_plot_tp(obj))\n w_actions.append(qtb_1)\n return w_actions", "def measure(self,command_exe, command_args, measure_out):\n pass", "def read_metric_values(self):\n inv_objs = self._inventory_mgr.current_inventory()\n monitored_metrics = self._metric_mgr.get_monitored_metrics()\n perf_manager = self._si.RetrieveServiceContent().perfManager\n for mor in inv_objs.keys():\n for inv_obj in inv_objs[mor]:\n inv_obj_metrics = inv_obj.metric_id_map\n desired_keys = list(set(inv_obj_metrics.keys()) & set(monitored_metrics[mor].keys()))\n if not len(desired_keys) == 0:\n metric_id_objs = [inv_obj_metrics[key] for key in desired_keys]\n query_spec = vim.PerformanceManager.QuerySpec(\n entity=inv_obj.mor, metricId=metric_id_objs,\n intervalId=inv_obj.INSTANT_INTERVAL,\n maxSample=1, format='normal'\n )\n try:\n results = perf_manager.QueryPerf(querySpec=[query_spec])\n except Exception as e:\n self._logger.error(\"Exception while making performance query : {0}\".format(e))\n if results:\n dps = self._parse_query(inv_obj, results, monitored_metrics[mor])\n payload = self._build_payload(dps)\n self._dispatch_metrics(payload)\n else:\n self._logger.warning(\"Empty result from query : {0}\".format(query_spec))", "def _measurement_update(self):\n pass", "def compute_metrics(self):\n pass", "def set_metrics(self):", "def get_measurements(self, pipeline, object_name, category):\n result = self.get_object_measurements(pipeline, object_name, category,\n {self.object_name.value: [] })\n return result", "def metrics(self):\n raise NotImplementedError(\"metrics\")", "def get_measured_data_ouputs(self, t): # TODO: SPELLING\n obsOut = numpy.zeros(shape=(1, self.get_num_measured_outputs()))\n i = 0\n for o in self.outputs:\n if o.is_measured_output():\n obsOut[0,i] = o.read_from_data_series(t)\n i += 1\n return obsOut", "def __init__(self, meas, verb,pvsr, default_site,delete_created_measurements,pvsr_default_conf_check_cycle,pvsr_meas_types):\r\n \r\n logging.info(\"adding capability: {0}\".format(meas[\"name\"]))\r\n \r\n self._verb=verb\r\n if verb==mplane.model.VERB_QUERY:\r\n cap = mplane.model.Capability(label=meas[\"name\"]+\"-query\", when = \"past ... now / 15s\", verb=mplane.model.VERB_QUERY)\r\n elif verb==mplane.model.VERB_MEASURE:\r\n cap = mplane.model.Capability(label=meas[\"name\"]+\"-measure\", when = \"now ... future / 15s\", verb=mplane.model.VERB_MEASURE)\r\n else:\r\n raise ValueError(\"Verb is not supported: {0}\".format(verb))\r\n cap.add_result_column(\"time\")\r\n \r\n self._mplane2uda={}\r\n self._uda_name2uda = {}\r\n \r\n self._pvsr_default_conf_check_cycle=pvsr_default_conf_check_cycle\r\n \r\n try:\r\n for k in sorted(meas[\"types\"].keys()):\r\n if \"first\" in meas[\"types\"][k]:\r\n logging.debug(\" result colum: {0}\".format(meas[\"types\"][k][\"first\"]))\r\n cap.add_result_column(meas[\"types\"][k][\"first\"])\r\n if \"second\" in meas[\"types\"][k]:\r\n logging.debug(\" result colum: {0}\".format(meas[\"types\"][k][\"second\"]))\r\n cap.add_result_column(meas[\"types\"][k][\"second\"])\r\n \r\n if \"PropertyType\" in pvsr_meas_types[k]:\r\n for i in range(len(pvsr_meas_types[k][\"PropertyType\"])):\r\n self._uda_name2uda[pvsr_meas_types[k][\"PropertyType\"][i][\"Name\"]]=pvsr_meas_types[k][\"PropertyType\"][i]\r\n \r\n if \"index_mplane_name\" in meas:\r\n logging.debug(\" parameter: {0}\".format(meas[\"index_mplane_name\"]))\r\n cap.add_parameter(meas[\"index_mplane_name\"])\r\n \r\n if \"mplane_constants\" in meas:\r\n for k,v in sorted(meas[\"mplane_constants\"].items()):\r\n logging.debug(\" parameter: {0} with value {1}\".format(k,v))\r\n cap.add_parameter(k,v)\r\n \r\n if \"uda_name2mplane_name\" in meas:\r\n for k,v in sorted(meas[\"uda_name2mplane_name\"].items()):\r\n if k in self._uda_name2uda:\r\n logging.debug(\" parameter: {0}\".format(v))\r\n cap.add_parameter(v)\r\n self._mplane2uda[v]=k\r\n else:\r\n logging.error(\" unknown UDA: {0}\".format(v))\r\n except Exception as e:\r\n logging.critical(\"Error during capability creation: {0}\".format(e))\r\n raise e\r\n\r\n super(PvsrService, self).__init__(cap)\r\n \r\n self._pvsr = pvsr\r\n self._meas = meas\r\n self._default_site = default_site\r\n self._delete_created_measurements = delete_created_measurements\r\n self._pvsr_meas_types = pvsr_meas_types", "def measure():\n print(\"alias, timestamp, current, total, power, voltage, err_code\")\n message_str = MeasurementRequest(None).to_json()\n socket_object = UdpSocket()\n s = UDPSendThread(message_str, socket_object)\n r = UDPRecvThread(socket_object, measurement_output_parser)\n s.start()\n r.start()\n\n wait((s, r))", "def overviewCommand(self):\n plt.figure(11)\n plt.clf()\n ax = plt.subplot(211)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET'),\n color='r', label='FUOFFSET',\n linewidth=1, alpha=1) \n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='r', linewidth=3, alpha=0.5,\n label=self.DLtrack+'-PSP')\n plt.legend()\n plt.subplot(212, sharex=ax)\n plt.plot(self.raw['OPDC'].data.field('TIME'),\n 1e6*self.raw['OPDC'].data.field('FUOFFSET')-\n 1e6*(self.raw['OPDC'].data.field(self.DLtrack)-\n self.raw['OPDC'].data.field('PSP')),\n color='k', label='$\\Delta$',\n linewidth=1, alpha=1) \n \n signal = self.raw['OPDC'].data.field('FUOFFSET')\n plt.figure(12)\n plt.clf()\n ax2 = plt.subplot(111)\n Fs = 1e6/np.diff(self.raw['OPDC'].data.field('TIME')).mean()\n print Fs\n ax2.psd(signal[:50000], NFFT=5000, Fs=Fs, label='FUOFFSET',scale_by_freq=0)\n plt.legend()", "def measobs(self, measfct, stampsize=200, skipdone=True):\n\t\t\n\t\tincatfilepaths = glob.glob(os.path.join(self.workobsdir, \"*-inputcat.pkl\"))\n\t\toutcatfilepaths = [incat.replace(\"inputcat\", \"meascat\") for incat in incatfilepaths]\n\t\t\n\t\tlogger.info(\"Measuring %i cats...\" % len(incatfilepaths))\n\t\n\t\tmeasfctkwargs = {\"stampsize\":stampsize}\n\t\n\t\tmegalut.meas.run.general(incatfilepaths, outcatfilepaths, measfct, measfctkwargs, ncpu=self.ncpu, skipdone=skipdone)", "def read_measurement(self):\n return self.execute(SdpI2cCmdReadMeasurement())", "def make_metrics(self):\n num_batches = self.data_loader.number_of_batches()\n dose_score_vec = np.zeros(num_batches)\n\n # Only make calculations if data_loader is not empty\n if not self.data_loader.file_paths_list:\n print('No patient information was given to calculate metrics')\n else:\n # Change batch size to 1\n self.data_loader.batch_size = 1 # Loads data related to ground truth patient information\n if self.dose_loader is not None:\n self.dose_loader.batch_size = 1 # Loads data related to ground truth patient information\n\n for idx in tqdm.tqdm(range(num_batches)):\n # Get roi masks for patient\n self.get_constant_patient_features(idx)\n # Get dose tensors for reference dose and evaluate criteria\n reference_dose = self.get_patient_dose_tensor(self.data_loader)\n if reference_dose is not None:\n self.reference_dose_metric_df = self.calculate_metrics(self.reference_dose_metric_df, reference_dose)\n # If a dose loader was provided, calculate the score\n if self.dose_loader is not None:\n new_dose = self.get_patient_dose_tensor(self.dose_loader)\n # Make metric data frames\n self.new_dose_metric_df = self.calculate_metrics(self.new_dose_metric_df, new_dose)\n # Evaluate mean absolute error of 3D dose\n dose_score_vec[idx] = np.sum(np.abs(reference_dose - new_dose)) / np.sum(self.possible_dose_mask)\n # Save metrics at the patient level (this is a template for how DVH stream participants could save\n # their files\n # self.dose_metric_df.loc[self.patient_list[0]].to_csv('{}.csv'.format(self.patient_list[0]))\n\n if self.dose_loader is not None:\n dvh_score = np.nanmean(np.abs(self.reference_dose_metric_df - self.new_dose_metric_df).values)\n dose_score = dose_score_vec.mean()\n return dvh_score, dose_score\n else:\n print('No new dose provided. Metrics were only calculated for the provided dose.')", "def measurements(self) -> NONEARRAY:\n pass", "def get_measurements(self):\n metrics = {}\n for key in self.fields.keys():\n metrics[key] = []\n # What's in output:\n # proc_pid date virt res shrd cpu mem power gpus_power\n while not self.queue.empty():\n data = self.queue.get().strip().split()\n for field in self.fields:\n tp = self.fields[field]['type']\n idx = self.fields[field]['index']\n count = self.fields[field]['count']\n if count == -1:\n metrics[field].append(ResourceMonitor.str_to_type(data[idx], tp))\n elif count == 0:\n metrics[field].append([ResourceMonitor.str_to_type(data[idx], tp)])\n else:\n metrics[field].append([\n ResourceMonitor.str_to_type(data[index], tp) for index in xrange(idx, idx+count)\n ])\n return metrics", "def get_all_measurements(self, start_time, end_time):\n return", "def test_measurment(self):\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"km\"), 6.214)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"m\"), 10.936)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"cm\"), 0.328)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"mm\"), 0.394)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"mi\"), 16.093)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"yd\"), 9.144)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"ft\"), 304.8)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"in\"), 254)", "def action_pd(obj: Pd, thread: QtCore.QThread):\n w_actions = []\n f_meas = obj.is_attribute(\"pd_meas\")\n f_chi2 = obj.is_attribute(\"chi2\")\n f_phase = obj.is_attribute(\"phase\")\n\n l_pd_peak = []\n if f_phase:\n phase = obj.phase\n for item in phase.items:\n try:\n pd_peak = getattr(obj, f\"pd_peak_{item.label.lower():}\")\n l_pd_peak.append(pd_peak)\n except AttributeError:\n pass\n\n f_setup = obj.is_attribute(\"setup\")\n f_pd_instr_resolution = obj.is_attribute(\"pd_instr_resolution\")\n f_pd_background = obj.is_attribute(\"pd_background\")\n f_range = obj.is_attribute(\"range\")\n\n if not(f_chi2 & f_meas & f_setup & f_pd_instr_resolution & f_phase &\n f_pd_background & f_range):\n if not f_chi2:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add chi2\")\n qtb_1.clicked.connect(lambda: add_items(obj, [Chi2()], thread))\n w_actions.append(qtb_1)\n\n if not f_meas:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add pd_meas\")\n qtb_1.clicked.connect(lambda: add_items(obj, [PdMeasL()], thread))\n w_actions.append(qtb_1)\n\n if not f_setup:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add setup\")\n qtb_1.clicked.connect(lambda: add_items(obj, [Setup()], thread))\n w_actions.append(qtb_1)\n\n if not f_pd_instr_resolution:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add pd_instr_resolution\")\n qtb_1.clicked.connect(lambda: add_items(obj, [PdInstrResolution()],\n thread))\n w_actions.append(qtb_1)\n\n if not f_phase:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add phase\")\n vv = PhaseL()\n vv.items = [Phase(label=\"phase\", igsize=0., scale=1.)]\n qtb_1.clicked.connect(lambda: add_items(obj, [vv], thread))\n w_actions.append(qtb_1)\n\n if not f_pd_background:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add pd_background\")\n qtb_1.clicked.connect(lambda: add_items(obj, [PdBackgroundL()],\n thread))\n w_actions.append(qtb_1)\n\n if not f_range:\n qtb_1 = QtWidgets.QToolButton()\n qtb_1.setText(\"Add range\")\n qtb_1.clicked.connect(lambda: add_items(obj, [Range(\n ttheta_min=2, ttheta_max=100.)], thread))\n w_actions.append(qtb_1)\n return w_actions", "def getAllMeasurement(self): \n return self.measurement", "def get_all_DLP_measurements(self):\n pass", "def runMeasurement(self): \n if isI1DisplayDiffuserOn():\n print'Please remove diffuser for normal measurement\\n'\n else:\n self.measurement.append(getI1DisplayMesure(self.ambiant, self._raw, self._device))", "def measurements(self):\n # get available measurement types for this node\n measurement_types = self.measurement_types()\n\n # retrieve measurement for each type\n return list(self.measurement(t) for t in measurement_types)", "def measure_test(self):\n return self.execute(Sgp40I2cCmdExecuteSelfTest())", "async def update_measures(self):\n\n def function():\n return self._api.get_measures()\n\n self._measures = await self.call(function, throttle_domain=\"update_measures\")\n\n return self._measures", "def measureDispatch(self):\n\n if self.ui.measureDevice.currentText().startswith('Built-In'):\n self.app.measure.startMeasurement()\n self.app.message.emit('Measurement enabled', 0)\n self.deviceStat['measure'] = True\n self.ui.measureDevice.setStyleSheet(self.BACK_GREEN)\n else:\n self.app.measure.stopMeasurement()\n self.app.message.emit('Measurement disabled', 0)\n self.deviceStat['measure'] = None\n self.ui.measureDevice.setStyleSheet(self.BACK_NORM)\n\n return True", "def test_get_measure_parameters(self):\n pass", "def __init__(self, measure):\n self.measure = measure # Dictionary of the measurement steps\n self.devices = {} # Dictionary holding all the devices\n self.output_devices = [] # List of devices with output capabilities\n self.daqs = {} # Dictionary that holds for each daq the inputs and outputs.\n self.rotation_stages = [] # If there are rotation stages present, they will show up in this list.\n # This short block is going to become useful in the future, when interfacing with a GUI\n for d in self.measure:\n setattr(self, d, self.measure[d])", "def _plot_metrics(self):\n if len(self._episode_q_means) > 0:\n mean_q = np.asscalar(np.mean(self._episode_q_means))\n self._metrics_writer.write_value('Mean Q per ep.', mean_q, self._num_actions_taken)\n\n if len(self._episode_q_stddev) > 0:\n std_q = np.asscalar(np.mean(self._episode_q_stddev))\n self._metrics_writer.write_value('Mean Std Q per ep.', std_q, self._num_actions_taken)\n\n self._metrics_writer.write_value('Sum rewards per ep.', sum(self._episode_rewards), self._num_actions_taken)", "def measure(self):\n # --- perform repeated runs\n for i_run in range(self.n_runs):\n if self.verbosity > 0:\n print(\"Run {0} / {1} ...\".format(i_run, self.n_runs), end = '')\n tdelta = self._timed_execute()\n self._run_times[i_run] = tdelta\n\t\t\t\n if self.verbosity == 2:\n print(tdelta)\n \n # calculate mean\n self._tmean = np.mean(self._run_times)\n # calculate standard deviation\n self._tstdev = np.std(self._run_times)\n # allow access to results\n self.__hasrun = True", "def compute(self) -> Any:\n # ddp hotfix, could be done better\n # but metric must handle DDP on it's own\n if self._ddp_backend == \"xla\":\n device = get_device()\n for key in self.statistics:\n key_statistics = torch.tensor([self.statistics[key]], device=device)\n key_statistics = xm.all_gather(key_statistics).sum(dim=0).cpu().numpy()\n self.statistics[key] = key_statistics\n elif self._ddp_backend == \"ddp\":\n for key in self.statistics:\n value: List[np.ndarray] = all_gather(self.statistics[key])\n value: np.ndarray = np.sum(np.vstack(value), axis=0)\n self.statistics[key] = value\n\n per_class, micro, macro, weighted = get_aggregated_metrics(\n tp=self.statistics[\"tp\"],\n fp=self.statistics[\"fp\"],\n fn=self.statistics[\"fn\"],\n support=self.statistics[\"support\"],\n zero_division=self.zero_division,\n )\n if self.compute_per_class_metrics:\n return per_class, micro, macro, weighted\n else:\n return [], micro, macro, weighted", "def compute(self) -> Any:\n # ddp hotfix, could be done better\n # but metric must handle DDP on it's own\n if self._ddp_backend == \"xla\":\n device = get_device()\n for key in self.statistics:\n key_statistics = torch.tensor([self.statistics[key]], device=device)\n key_statistics = xm.all_gather(key_statistics).sum(dim=0).cpu().numpy()\n self.statistics[key] = key_statistics\n elif self._ddp_backend == \"ddp\":\n for key in self.statistics:\n value: List[np.ndarray] = all_gather(self.statistics[key])\n value: np.ndarray = np.sum(np.vstack(value), axis=0)\n self.statistics[key] = value\n\n per_class, micro, macro, weighted = get_aggregated_metrics(\n tp=self.statistics[\"tp\"],\n fp=self.statistics[\"fp\"],\n fn=self.statistics[\"fn\"],\n support=self.statistics[\"support\"],\n zero_division=self.zero_division,\n )\n if self.compute_per_class_metrics:\n return per_class, micro, macro, weighted\n else:\n return [], micro, macro, weighted", "def observe(self, env: dm_env.Environment, timestep: dm_env.TimeStep,\n action: np.ndarray) -> None:\n self._accumulate_metrics(env)", "def __call__(self, output, target, *args, **kwargs):\n _, y_pred = output.topk(1, 1, True, True)\n y_pred = y_pred.t().detach().cpu().numpy()[0]\n y_true = target.detach().cpu().numpy()\n self.pfm = self.metric_func(y_true, y_pred)\n return self.pfm", "def _process_measure(self, node):\n id0 = self._process_bit_id(node.children[0])\n id1 = self._process_bit_id(node.children[1])\n if len(id0) != len(id1):\n raise UnrollerError(\"internal error: reg size mismatch\",\n \"line=%s\" % node.line, \"file=%s\" % node.file)\n for idx, idy in zip(id0, id1):\n self.backend.measure(idx, idy)", "def addMeasArgs(parser):\n parser.add_argument(\"infile\", help=\"Input file\", type=Path)\n parser.add_argument(\"-o\", \"--output\", help=\"Output file\", type=Path, dest=\"outfile\")\n parser.add_argument(\"--overwrite\", action=\"store_true\",\n help=\"Overwrite existing output file.\")\n return parser", "def getPhysicalSamples(self, **kwargs):\n # initialise chans, startSample and endSample with the whole dataset\n options = self.parseGetDataKeywords(kwargs)\n # get data\n timeData = self.getUnscaledSamples(\n chans=options[\"chans\"],\n startSample=options[\"startSample\"],\n endSample=options[\"endSample\"],\n )\n # Scalars are applied in getUnscaledSamples to convert to mV - this is for ease of calculation and because each data file in the run might have a separate scaling\n # all that is left is to divide by the dipole length in km and remove the average\n for chan in options[\"chans\"]:\n if chan == \"Ex\":\n # multiply by 1000/self.getChanDx same as dividing by dist in km\n timeData[chan] = 1000 * timeData[chan] / self.getChanDx(chan)\n timeData.addComment(\n \"Dividing channel {} by electrode distance {} km to give mV/km\".format(\n chan, self.getChanDx(chan) / 1000.0\n )\n )\n if chan == \"Ey\":\n # multiply by 1000/self.getChanDy same as dividing by dist in km\n timeData[chan] = 1000 * timeData[chan] / self.getChanDy(chan)\n timeData.addComment(\n \"Dividing channel {} by electrode distance {} km to give mV/km\".format(\n chan, self.getChanDy(chan) / 1000.0\n )\n )\n\n # if remove zeros - False by default\n if options[\"remzeros\"]:\n timeData[chan] = removeZerosChan(timeData[chan])\n # if remove nans - False by default\n if options[\"remnans\"]:\n timeData[chan] = removeNansChan(timeData[chan])\n # remove the average from the data - True by default\n if options[\"remaverage\"]:\n timeData[chan] = timeData[chan] - np.average(\n timeData[chan]\n )\n\n # add comments\n timeData.addComment(\n \"Remove zeros: {}, remove nans: {}, remove average: {}\".format(\n options[\"remzeros\"], options[\"remnans\"], options[\"remaverage\"]\n )\n )\n return timeData", "def stats(self):", "def to_metric(self):\r\n if self.units != 'metric':\r\n self.units = 'metric'\r\n for statement in self.statements:\r\n statement.to_metric()\r\n for tool in iter(self.tools.values()):\r\n tool.to_metric()\r\n for primitive in self.primitives:\r\n primitive.to_metric()\r\n for hit in self.hits:\r\n hit.to_metric()", "def get_measurement_objects(self, pipeline, object_name, category, \n measurement):\n return self.get_threshold_measurement_objects(pipeline, object_name,\n category, measurement)", "def compute_metrics(self, metric_ids=None, probes=None):\n url = \"/projects/%s/managedfolders/%s/actions\" % (self.project_key, self.odb_id)\n if metric_ids is not None:\n return self.client._perform_json(\n \"POST\" , \"%s/computeMetricsFromIds\" % url,\n body={\"metricIds\" : metric_ids})\n elif probes is not None:\n return self.client._perform_json(\n \"POST\" , \"%s/computeMetrics\" % url,\n body=probes)\n else:\n return self.client._perform_json(\n \"POST\" , \"%s/computeMetrics\" % url)", "def metrics(self):\n \n if self.mse.shape[0]>1:\n raise ValueError('Metrics can only handle single observations.')\n \n if self.N==1:\n pred = float('nan')\n err = float('nan')\n y_true = float('nan')\n else:\n pred = int(self._predictions[-1])\n err = self._mse[-1]\n y_true = int(self.label[0])\n \n is_outlier = {\"type\":\"GAUGE\",\"key\":\"is_outlier\",\"value\":pred}\n mse = {\"type\":\"GAUGE\",\"key\":\"mse\",\"value\":err}\n obs = {\"type\":\"GAUGE\",\"key\":\"observation\",\"value\":self.N - 1}\n threshold = {\"type\":\"GAUGE\",\"key\":\"threshold\",\"value\":self.threshold}\n \n label = {\"type\":\"GAUGE\",\"key\":\"label\",\"value\":y_true}\n \n accuracy_tot = {\"type\":\"GAUGE\",\"key\":\"accuracy_tot\",\"value\":self.metric[4]}\n precision_tot = {\"type\":\"GAUGE\",\"key\":\"precision_tot\",\"value\":self.metric[5]}\n recall_tot = {\"type\":\"GAUGE\",\"key\":\"recall_tot\",\"value\":self.metric[6]}\n f1_score_tot = {\"type\":\"GAUGE\",\"key\":\"f1_tot\",\"value\":self.metric[7]}\n f2_score_tot = {\"type\":\"GAUGE\",\"key\":\"f2_tot\",\"value\":self.metric[8]}\n \n accuracy_roll = {\"type\":\"GAUGE\",\"key\":\"accuracy_roll\",\"value\":self.metric[9]}\n precision_roll = {\"type\":\"GAUGE\",\"key\":\"precision_roll\",\"value\":self.metric[10]}\n recall_roll = {\"type\":\"GAUGE\",\"key\":\"recall_roll\",\"value\":self.metric[11]}\n f1_score_roll = {\"type\":\"GAUGE\",\"key\":\"f1_roll\",\"value\":self.metric[12]}\n f2_score_roll = {\"type\":\"GAUGE\",\"key\":\"f2_roll\",\"value\":self.metric[13]}\n \n true_negative = {\"type\":\"GAUGE\",\"key\":\"true_negative\",\"value\":self.metric[0]}\n false_positive = {\"type\":\"GAUGE\",\"key\":\"false_positive\",\"value\":self.metric[1]}\n false_negative = {\"type\":\"GAUGE\",\"key\":\"false_negative\",\"value\":self.metric[2]}\n true_positive = {\"type\":\"GAUGE\",\"key\":\"true_positive\",\"value\":self.metric[3]}\n \n nb_outliers_roll = {\"type\":\"GAUGE\",\"key\":\"nb_outliers_roll\",\"value\":self.metric[14]}\n nb_labels_roll = {\"type\":\"GAUGE\",\"key\":\"nb_labels_roll\",\"value\":self.metric[15]}\n nb_outliers_tot = {\"type\":\"GAUGE\",\"key\":\"nb_outliers_tot\",\"value\":self.metric[16]}\n nb_labels_tot = {\"type\":\"GAUGE\",\"key\":\"nb_labels_tot\",\"value\":self.metric[17]}\n \n return [is_outlier,mse,obs,threshold,label,\n accuracy_tot,precision_tot,recall_tot,f1_score_tot,f2_score_tot,\n accuracy_roll,precision_roll,recall_roll,f1_score_roll,f2_score_roll,\n true_negative,false_positive,false_negative,true_positive,\n nb_outliers_roll,nb_labels_roll,nb_outliers_tot,nb_labels_tot]", "def measurements(self):\n # TODO: add in empty measurements for assays that have none?\n return self._measure_queryset", "def measure(self, lastMeasure=None, m=None):\n if m is None:\n m = {}\n m['_time'] = time.time()\n if lastMeasure is not None:\n m['_stepDuration'] = time.time() - lastMeasure['_time']\n else:\n m['_stepDuration'] = time.time() - self._start_t\n self._msr(m)\n return m", "def advancedStats():", "def runMeasurement(self):\n triggerI1ProMeasurement()\n self.spectrum.append(getI1ProSpectrum())\n self.tristimulus.append(getI1ProTriStimulus())", "def get_measured_outputs_values(self):\n obsOut = numpy.zeros(self.get_num_measured_outputs())\n i = 0\n for o in self.outputs:\n if o.is_measured_output():\n obsOut[i] = o.read_value_in_fmu(self.fmu)\n i += 1\n return obsOut", "def dataStats(self):\n print (\"Performing statistical analysis of the data\")\n # stuff to do", "def raw_measure(self) -> List[int]:\n # name, command, signals, delay\n return self._run_profile((\"raw_measure\", [0x20, 0x50], 2, 0.025))", "def run(self, d):\n\n feeds,feedidx,_ = self.getFeeds(d,'all')\n\n tod_shape = d[f'{self.level2}/averaged_tod'].shape\n \n scanedges = d[f'{self.level2}/Statistics/scan_edges'][...]\n nfeeds = 20\n nchannels = 8\n \n self.all_tod = np.zeros((20, nchannels, tod_shape[-1])) \n self.all_weights = np.zeros((20, nchannels, tod_shape[-1])) \n self.all_frequency = np.zeros((nchannels)) \n self.all_auto = np.zeros((20,nchannels)) \n self.all_mask = np.zeros((20,tod_shape[-1]))\n self.all_cal_factors = np.zeros((20,4,64))\n # Read in data from each feed\n for ifeed,feed in enumerate(tqdm(feeds,desc='Looping over feeds')):\n if feeds[ifeed] == 20:\n continue\n feed_tod,feed_weights,mask = self.clean_tod(d,ifeed,feed)\n\n if self.astro_cal:\n feed_tod,feed_weights,cal_factors = self.calibrate_tod(d,feed_tod,feed_weights,ifeed,feed)\n else:\n cal_factors = 1\n\n self.all_tod[feed-1],self.all_weights[feed-1], self.all_auto[feed-1], self.all_frequency = self.average_tod(d,feed_tod,feed_weights,mask) \n self.all_mask[feed-1] = mask\n self.all_cal_factors[feed-1] = cal_factors", "def measures(self):\n return self._measures", "def m2m_changed_metrics(sender, **kwargs):\r\n if 'action' not in kwargs:\r\n return\r\n\r\n action = {\r\n 'post_add': 'm2m.added',\r\n 'post_remove': 'm2m.removed',\r\n 'post_clear': 'm2m.cleared',\r\n }.get(kwargs['action'])\r\n\r\n if not action:\r\n return\r\n\r\n tags = _database_tags(action, sender, kwargs)\r\n\r\n if 'model' in kwargs:\r\n tags.append('target_class:{}'.format(kwargs['model'].__name__))\r\n\r\n pk_set = kwargs.get('pk_set', []) or []\r\n\r\n dog_stats_api.increment(\r\n 'edxapp.db.model',\r\n value=len(pk_set),\r\n tags=tags\r\n )", "def measure(mode, x, y, x0, x1, thresh = 0):\n xt = x.view(numpy.ndarray) # strip Metaarray stuff -much faster!\n v = y.view(numpy.ndarray)\n \n xm = ma.masked_outside(xt, x0, x1).T\n ym = ma.array(v, mask = ma.getmask(xm))\n if mode == 'mean':\n r1 = ma.mean(ym)\n r2 = ma.std(ym)\n if mode == 'max' or mode == 'maximum':\n r1 = ma.max(ym)\n r2 = xm[ma.argmax(ym)]\n if mode == 'min' or mode == 'minimum':\n r1 = ma.min(ym)\n r2 = xm[ma.argmin(ym)]\n if mode == 'median':\n r1 = ma.median(ym)\n r2 = 0\n if mode == 'p2p': # peak to peak\n r1 = ma.ptp(ym)\n r2 = 0\n if mode == 'std': # standard deviation\n r1 = ma.std(ym)\n r2 = 0\n if mode == 'var': # variance\n r1 = ma.var(ym)\n r2 = 0\n if mode == 'cumsum': # cumulative sum\n r1 = ma.cumsum(ym) # Note: returns an array\n r2 = 0\n if mode == 'anom': # anomalies = difference from averge\n r1 = ma.anom(ym) # returns an array\n r2 = 0\n if mode == 'sum':\n r1 = ma.sum(ym)\n r2 = 0\n if mode == 'area' or mode == 'charge':\n r1 = ma.sum(ym)/(ma.max(xm)-ma.min(xm))\n r2 = 0\n if mode == 'latency': # return first point that is > threshold\n sm = ma.nonzero(ym > thresh)\n r1 = -1 # use this to indicate no event detected\n r2 = 0\n if ma.count(sm) > 0:\n r1 = sm[0][0]\n r2 = len(sm[0])\n if mode == 'count':\n r1 = ma.count(ym)\n r2 = 0\n if mode == 'maxslope':\n return(0,0)\n slope = numpy.array([])\n win = ma.flatnotmasked_contiguous(ym)\n st = int(len(win)/20) # look over small ranges\n for k in win: # move through the slope measurementwindow\n tb = range(k-st, k+st) # get tb array\n newa = numpy.array(self.dat[i][j, thisaxis, tb])\n ppars = numpy.polyfit(x[tb], ym[tb], 1) # do a linear fit - smooths the slope measures\n slope = numpy.append(slope, ppars[0]) # keep track of max slope\n r1 = numpy.amax(slope)\n r2 = numpy.argmax(slope)\n return(r1, r2)", "def _write_measurements(summary_writer, labels_and_values, step):\n\n # Write TF Summaries Measurements.\n with summary_writer.as_default():\n for (label, value) in labels_and_values:\n tf.summary.scalar(label, value, step=step)", "def run(self):\n self.run_measurement()\n self.run_analysis()\n if self.get_param_value('update'):\n self.run_update()", "def trigger_measurement_with_mass_flow_t_comp_and_averaging(self):\n return self.execute(SdpI2cCmdTriggerMeasurementWithMassFlowTCompAndAveraging())", "def measure(mode, x, y, x0, x1):\n xm = ma.masked_outside(x, x0, x1)\n ym = ma.array(y, mask = ma.getmask(xm))\n if mode == 'mean':\n r1 = ma.mean(ym)\n r2 = ma.std(ym)\n if mode == 'max':\n r1 = ma.max(ym)\n r2 = 0\n if mode == 'min':\n r1 = ma.min(ym)\n r2 = 0\n if mode == 'median':\n r1 = ma.median(ym)\n r2 = 0\n if mode == 'p2p': # peak to peak\n r1 = ma.ptp(ym)\n r2 = 0\n return(r1, r2)", "def MeasureMultipleDigitalEdges(self, channel1, channel2, edgeType1, edgeType2, points1, points2, timeout=0.1,\n **kwargs):\n self.H.__sendByte__(CP.TIMING)\n self.H.__sendByte__(CP.TIMING_MEASUREMENTS)\n timeout_msb = int((timeout * 64e6)) >> 16\n # print ('timeout',timeout_msb)\n self.H.__sendInt__(timeout_msb)\n self.H.__sendByte__(self.__calcDChan__(channel1) | (self.__calcDChan__(channel2) << 4))\n params = 0\n if edgeType1 == 'rising':\n params |= 3\n elif edgeType1 == 'falling':\n params |= 2\n else:\n params |= 4\n\n if edgeType2 == 'rising':\n params |= 3 << 3\n elif edgeType2 == 'falling':\n params |= 2 << 3\n else:\n params |= 4 << 3\n\n if ('SQR1' in kwargs): # User wants to toggle SQ1 before starting the timer\n params |= (1 << 6)\n if kwargs['SQR1'] == 'HIGH': params |= (1 << 7)\n self.H.__sendByte__(params)\n if points1 > 4: points1 = 4\n if points2 > 4: points2 = 4\n self.H.__sendByte__(points1 | (points2 << 4)) # Number of points to fetch from either channel\n\n self.H.waitForData(timeout)\n\n A = np.array([self.H.__getLong__() for a in range(points1)])\n B = np.array([self.H.__getLong__() for a in range(points2)])\n tmt = self.H.__getInt__()\n self.H.__get_ack__()\n # print(A,B)\n if (tmt >= timeout_msb): return None, None\n rtime = lambda t: t / 64e6\n if (kwargs.get('zero', True)): # User wants set a reference timestamp\n return rtime(A - A[0]), rtime(B - A[0])\n else:\n return rtime(A), rtime(B)", "def compute_statistics(self):", "def CreateMeasuringContext(*args):\n return _gdi_.GraphicsContext_CreateMeasuringContext(*args)", "def compute_metrics(self):\n self.finalize_output_dict()\n self.metric_dict = {\n key: value(self.output_dict[\"labels\"], self.output_dict[\"pred_probs\"])\n for key, value in self.metric_fns.items()\n }", "def performance(self, id):", "def testMethodProfile2D(self):\n\n toolBar = self.plot.getProfileToolbar()\n\n toolBar.vLineAction.trigger()\n plot2D = self.plot.getPlotWidget().getWidgetHandle()\n pos1 = plot2D.width() * 0.5, plot2D.height() * 0.5\n self.mouseClick(plot2D, qt.Qt.LeftButton, pos=pos1)\n\n manager = toolBar.getProfileManager()\n roi = manager.getCurrentRoi()\n roi.setProfileMethod(\"mean\")\n roi.setProfileType(\"2D\")\n roi.setProfileLineWidth(3)\n\n for _ in range(20):\n self.qWait(200)\n if not manager.hasPendingOperations():\n break\n\n # check 2D 'mean' profile\n profilePlot = roi.getProfileWindow().getCurrentPlotWidget()\n data = profilePlot.getAllImages()[0].getData()\n expected = numpy.array([[1, 4], [7, 10], [13, 16]])\n numpy.testing.assert_almost_equal(data, expected)", "def _get_measurements_with_derived_metrics(self, measurements):\n\n now = time.time()\n\n def metrics_available(*names):\n return all(name in self._event_names and name in measurements\n and name in self._prev_measurements for name in names)\n\n def delta(*names):\n return [measurements[name] - self._prev_measurements[name] for name in names]\n\n # if specific pairs are available calculate derived metrics\n if self._prev_measurements is not None:\n time_delta = now - self._prev_ts\n\n if metrics_available(MetricName.INSTRUCTIONS, MetricName.CYCLES):\n inst_delta, cycles_delta = delta(MetricName.INSTRUCTIONS,\n MetricName.CYCLES)\n if cycles_delta > 0:\n measurements[DerivedMetricName.IPC] = float(inst_delta) / cycles_delta\n\n if time_delta > 0:\n measurements[DerivedMetricName.IPS] = float(inst_delta) / time_delta\n\n if metrics_available(MetricName.INSTRUCTIONS, MetricName.CACHE_MISSES):\n inst_delta, cache_misses_delta = delta(MetricName.INSTRUCTIONS,\n MetricName.CACHE_MISSES)\n if inst_delta > 0:\n measurements[DerivedMetricName.CACHE_MISSES_PER_KILO_INSTRUCTIONS] = \\\n float(cache_misses_delta) * 1000 / inst_delta\n\n if metrics_available(MetricName.CACHE_REFERENCES, MetricName.CACHE_MISSES):\n cache_ref_delta, cache_misses_delta = delta(MetricName.CACHE_REFERENCES,\n MetricName.CACHE_MISSES)\n if cache_ref_delta > 0:\n cache_hits_count = cache_ref_delta - cache_misses_delta\n measurements[DerivedMetricName.CACHE_HIT_RATIO] = (\n float(cache_hits_count) / cache_ref_delta)\n\n self._prev_measurements = measurements\n self._prev_ts = now\n\n return measurements", "def _evaluate_actions(self, *args, **kwargs):\n # DistributedDataParallel moves all tensors to the device (or devices)\n # So we need to make anything that is on the CPU into a numpy array\n # This is needed for older versions of pytorch that haven't deprecated\n # the single-process multi-device version of DDP\n return self._evaluate_actions_wrapper.ddp(\n *_cpu_to_numpy(args), **_cpu_to_numpy(kwargs)\n )", "def measure_dict():\n out = base_dict()\n out['mro']['current'] = ['Measure']\n out['name']['current'] = 'Measure'\n ao(out, 'nSamples', 'Integer', 1, readLevel=3)\n ao(out, 'id', 'String', 'Conversion source ID', readLevel=3)\n ao(out, 'uid', 'String', 'Unique ID', readLevel=5)\n ao(out, 'date', 'Date', '00:00:00 01/01/2000', name='Test date')\n ao(out, 'zerotime', 'Float', name='Acquisition starting time', readLevel=4)\n ao(out, 'elapsed', 'Float', name='Test duration', unit='second')\n ao(out, 'operator', 'String', name='Operator')\n return out", "def list_metrics(self):\n pass", "def measurement():\n try:\n meas_id = request.args.get('id', type=int)\n if meas_id is None:\n raise Exception(f'no valid id for pv module')\n meas = db.session.query(Measurement).get(meas_id)\n meas_values = db.session.query(MeasurementValues).filter(MeasurementValues.measurement_id == meas_id).all()\n print(meas_values)\n if meas is None:\n raise Exception(f'no measurement with id {meas_id} exists')\n return render_template('measurement/measurement.html', measurement=meas, measurement_values=meas_values)\n except Exception as e:\n flash(str(e), category='danger')\n return redirect('measurements')", "def metrics_group():", "def read_metrics(self):\n raise NotImplementedError()", "def _fill_results(self,spec,measurements,period,duration):\r\n logging.info(\"Fill measurements for spec {0}\".format(spec))\r\n \r\n if self._verb==mplane.model.VERB_QUERY:\r\n \"\"\"\r\n Query according to the time specified in the specification\r\n \"\"\"\r\n (first_time,last_time) = spec.when().datetimes()\r\n first_time=int(first_time.replace(tzinfo=datetime.timezone.utc).timestamp())\r\n last_time=int(last_time.replace(tzinfo=datetime.timezone.utc).timestamp())\r\n sleep_time = 0\r\n else:\r\n \"\"\"\r\n Query from NOW\r\n \"\"\"\r\n first_time = int(time.time())\r\n if (len(measurements[1])>0 or len(measurements[2])>0) and period<=self._pvsr_default_conf_check_cycle:\r\n #there are newly created or modified measurements\r\n first_time = first_time + self._pvsr_default_conf_check_cycle\r\n if first_time % period > 0:\r\n first_time = first_time - (first_time % period)\r\n last_time = first_time + int(duration / period) * period\r\n sleep_time = duration\r\n\r\n logging.debug(\"From: {0}, To: {1}\".format(datetime.datetime.fromtimestamp(first_time),datetime.datetime.fromtimestamp(last_time)))\r\n \r\n meas_data = {}\r\n\r\n while True:\r\n logging.info(\"Wait {0} seconds\".format(sleep_time))\r\n time.sleep(sleep_time)\r\n sleep_time = 30\r\n \r\n loaded_until=self._pvsr.getLastLoadedDataTimestamp(period)\r\n if int(loaded_until.timestamp())>=last_time or time.time()>last_time+period+300:\r\n for i in (0,1,2):\r\n for j in range(len(measurements[i])):\r\n self._fill_meas_result(measurements[i][j],first_time,last_time,meas_data)\r\n break\r\n else:\r\n logging.debug(\"last loaded is still {0}\".format(loaded_until))\r\n \r\n res = mplane.model.Result(specification=spec)\r\n res.set_when(mplane.model.When(a = datetime.datetime.utcfromtimestamp(first_time+period), b = datetime.datetime.utcfromtimestamp(last_time)))\r\n \r\n tmp_time=first_time+period\r\n row_index=0\r\n while tmp_time<=last_time:\r\n tmp_time2 = datetime.datetime.fromtimestamp(tmp_time)\r\n tmp_time3 = datetime.datetime.utcfromtimestamp(tmp_time)\r\n res.set_result_value(\"time\", tmp_time3, row_index)\r\n if tmp_time2 in meas_data:\r\n for mplane_name in meas_data[tmp_time2]:\r\n value = str(meas_data[tmp_time2][mplane_name])\r\n res.set_result_value(mplane_name, value, row_index)\r\n row_index+=1\r\n tmp_time+=period\r\n \r\n return res", "def compute_analysis(self):\r\n def get_mean(self):\r\n \"\"\"\r\n Compute mean in all sensors\r\n \"\"\"\r\n for i in range(1,len(self.data[0])):\r\n self.prom.append(np.mean(self.data[:,i])) \r\n\r\n \r\n def get_stddev(self):\r\n \"\"\"\r\n Compute mean in all sensors\r\n \"\"\"\r\n for i in range(1,len(self.data[0])):\r\n self.stddev.append(np.std(self.data[:,i])) \r\n \r\n # Get the values\r\n get_mean(self)\r\n get_stddev(self)\r\n \r\n # Check condition\r\n [(self.out_of_3stddev.append(i)) \r\n for (i) in (self.data[:,0:4]) \r\n if (any(\r\n (i[1:4] > 3*np.array(self.stddev)+np.array(self.prom))|\r\n (i[1:4] < -3*np.array(self.stddev)+np.array(self.prom))\r\n ))]", "def getMeasures(unique_name=None):", "def updatefig(*args):\n p1.set_array(turn(grid))\n p2.set_data(tally['time'], tally['sickos'])\n p3.set_data(tally['time'], tally['immune'])\n p4.set_data(tally['time'], tally['dead'])\n ax2.set_xlim(0, max(tally['time']))\n # ax2.set_ylim(0, max(max(sickos), max(immune)))\n # End sim if the disease is gone\n if tally['sickos'][-1] == 0:\n ani.event_source.stop()\n end_time = time.process_time()\n show_summary()\n print(\"Process time:\", end_time - start_time)\n return p1, p2, p3, p4,", "def computeBasicStatistics(self, targetLabels, actualLabels):\r\n self.basicMeasures = reduce(self._cbe, map(lambda x,y:(x,y), targetLabels,\r\n actualLabels), (0,0,0,0))\r\n return self.basicMeasures", "def update(self, labels, preds):\n labels, preds = check_label_shapes(labels, preds, True)\n\n for label, pred in zip(labels, preds):\n self.metrics.update_binary_stats(label, pred)\n\n if self.average == \"macro\":\n self.sum_metric += self.metrics.fscore\n self.num_inst += 1\n self.metrics.reset_stats()\n else:\n self.sum_metric = self.metrics.fscore * self.metrics.total_examples\n self.num_inst = self.metrics.total_examples", "def process_meter_message(self, d):\n dpid = int(d.get(\"dpid\", 0))\n dp = self.dpset.get(dpid)\n if not dp:\n return \"Datapath does not exist!\"\n\n ofproto = dp.ofproto\n parser = dp.ofproto_parser\n\n command = {\n 'add': ofproto.OFPMC_ADD,\n 'mod': ofproto.OFPMC_MODIFY,\n 'del': ofproto.OFPMC_DELETE,\n }\n cmd = command.get(d[\"operation\"], ofproto.OFPMC_ADD)\n\n meter_id = d[\"meter_id\"]\n\n flags = 0\n bands = []\n if \"flags\" in d: # Ryu's format\n print(d['flags'])\n for f in d['flags']:\n flags += 0x01 if f == 'KBPS' else 0\n flags += 0x02 if f == 'PKTPS' else 0\n flags += 0x04 if f == 'BURST' else 0\n flags += 0x08 if f == 'STATS' else 0\n\n for band in d[\"bands\"]:\n if band['type'] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band['rate'],\n burst_size=band['burst_size'])]\n elif band['type'] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band['rate'],\n burst_size=band['burst_size'], prec_level=band['prec_level'])]\n\n else: # FlowManager's format\n flags += 0x01 if d['OFPMF_KBPS'] else 0\n flags += 0x02 if d['OFPMF_PKTPS'] else 0\n flags += 0x04 if d['OFPMF_BURST'] else 0\n flags += 0x08 if d['OFPMF_STATS'] else 0\n\n # Flags must have KBPS or PKTPS\n flags = flags if (flags & 0x03) else (flags | 0x01)\n\n for band in d[\"bands\"]:\n #mtype = type_convert.get(band[0])\n if band[0] == 'DROP':\n bands += [parser.OFPMeterBandDrop(rate=band[1],\n burst_size=band[2])]\n elif band[0] == 'DSCP_REMARK':\n bands += [parser.OFPMeterBandDscpRemark(rate=band[1],\n burst_size=band[2], prec_level=band[3])]\n\n # TODO: catch some errors\n meter_mod = parser.OFPMeterMod(dp, cmd, flags, meter_id, bands)\n try:\n dp.send_msg(meter_mod)\n except KeyError as e:\n return e.__repr__()\n except Exception as e:\n return e.__repr__()\n\n return \"Message sent successfully.\"", "def _measure():\n return {\n 'type' : 'class',\n 'name' : 'measure',\n 'base' : None,\n 'is_abstract' : False,\n 'doc' : None,\n 'properties' : [\n ('description', 'str', '0.1', None),\n ('identification', 'str', '0.1', None),\n ('name', 'str', '0.1', None),\n ],\n 'decodings' : [\n ('description', 'child::cim:measureDescription'),\n ('identification', 'child::cim:measureIdentification/gmd:code/gco:CharacterString'),\n ('name', 'child::cim:nameOfMeasure'),\n\n # Hacks due to DKRZ misimplementation.\n ('description', 'parent::cim:report/child::gmd:measureDescription/gco:CharacterString'),\n ('name', 'parent::cim:report/child::gmd:nameOfMeasure/gco:CharacterString'),\n ]\n }", "def measure(self, recommender):", "def calc_psd(self):\n psd2d = np.array(self.calc_psd2d())\n\n print(\"Azimuthally averaging 2D power spectral density ... \",\n end=\"\", flush=True)\n dim = self.shape[0]\n dim_half = (dim+1) // 2\n # NOTE:\n # The zero-frequency component is shifted to position of index\n # (0-based): (ceil((n-1) / 2), ceil((m-1) / 2))\n px = np.arange(dim_half-dim, dim_half)\n x, y = np.meshgrid(px, px)\n rho = np.sqrt(x**2 + y**2)\n\n radii = self.radii\n nr = len(radii)\n if nr > 100:\n print(\"\\n ... %d data points, may take a while ... \" % nr,\n end=\"\", flush=True)\n else:\n print(\" %d data points ... \" % nr, end=\"\", flush=True)\n psd1d = np.zeros(shape=(nr, 4))\n psd1d[:, 0] = self.frequencies\n\n for i, r in enumerate(radii):\n if (i+1) % 100 == 0:\n percent = 100 * (i+1) / nr\n print(\"%.1f%% ... \" % percent, end=\"\", flush=True)\n ii, jj = (rho <= r).nonzero()\n rho[ii, jj] = np.inf\n cells = psd2d[ii, jj]\n psd1d[i, 3] = len(cells)\n if self.meanstd:\n psd1d[i, 1] = np.mean(cells)\n psd1d[i, 2] = np.std(cells)\n else:\n median = np.median(cells)\n mad = np.median(np.abs(cells - median))\n psd1d[i, 1] = median\n psd1d[i, 2] = mad * 1.4826\n print(\"DONE\", flush=True)\n\n self.psd1d = psd1d\n return psd1d", "def updateMeter(self, name1, name2, op):\r\n mini = 0\r\n maxi = 100\r\n pos = (self.var.get() - mini) / (maxi - mini)\r\n self.updateMeterLine(pos * 0.6 + 0.2)", "def measurements(self):\n return self._measurements", "def iaq_measure(self) -> List[int]:\n # name, command, signals, delay\n return self._run_profile((\"iaq_measure\", [0x20, 0x08], 2, 0.05))", "def measure(x, y):\n return dotc_gpu(x, y)", "def update(self, phase, targets, outputs):\n iou, dice, dice_neg, dice_pos, _, _ = self.metric(outputs, targets)\n self.base_dice_scores[phase].append(dice)\n self.dice_pos_scores[phase].append(dice_pos)\n self.dice_neg_scores[phase].append(dice_neg)\n self.iou_scores[phase].append(iou)", "def getMeasurements(self):\n return self._Measurements", "def computePRMeasures(self, targetLabels, actualLabels):\r\n if self.basicMeasures is None:\r\n self.basicMeasures = self.computeBasicStatistics(targetLabels, actualLabels)\r\n if self.basicMeasures[0] == 0:\r\n self.prMeasures = (0,0)\r\n else:\r\n self.prMeasures = ((0.0 + self.basicMeasures[0]) / (self.basicMeasures[0] + self.basicMeasures[1]),\r\n (0.0 + self.basicMeasures[0]) / (self.basicMeasures[0] + self.basicMeasures[3]))\r\n return self.prMeasures", "def test_get_measure_parameters_by_id(self):\n pass", "def calculate_dataset_metrics(self):\n pass", "def add_meas(self, ros_meas, output=False):\n msg_id = self._get_meas_identifier(ros_meas)\n # Main filter fuses all measurements\n if self.is_main_filter:\n pass\n elif ros_meas.src_asset != self.my_name:\n pass\n elif self._is_shareable(ros_meas.src_asset, ros_meas.meas_type): \n pass\n elif msg_id in self.meas_types_received:\n return\n else: # Don't fuse (e.g. depth, sonar_z)\n return -1\n self.meas_types_received.append(msg_id)\n\n ledger_ind = self._get_meas_ledger_index( ros_meas.stamp )\n\n # Check for Event-Triggering\n if self._is_shareable(ros_meas.src_asset, ros_meas.meas_type):\n if \"implicit\" not in ros_meas.meas_type:\n src_id = self.asset2id[ros_meas.src_asset]\n measured_id = self.asset2id[ros_meas.measured_asset]\n ros_meas.et_delta = self._get_meas_et_delta(ros_meas.meas_type)\n meas = get_internal_meas_from_ros_meas(ros_meas, src_id, measured_id)\n\n implicit, innovation = self.filter.check_implicit(meas)\n if implicit:\n\n ros_meas.meas_type += \"_implicit\"\n\n # print(\"@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ IMPLICIT @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\")\n # print(ros_meas)\n # print(vars(meas))\n # print(self.filter.x_hat)\n else:\n self.explicit_count += 1\n if output:\n expected = meas.data - innovation\n meas_id = self._get_meas_identifier(ros_meas)\n last_update_time = self.ledger[len(self.ledger)-1][\"time\"]\n # print(\"Explicit {} {} : expected: {}, got: {}\".format(last_update_time.to_sec(), meas_id, expected, meas.data))\n # print(self.meas_types_received)\n # print(self.filter.x_hat.T)\n # print(\"Explicit #{} {} : {}\".format(self.explicit_count, self.delta_multiplier, ros_meas.meas_type))\n # print(ros_meas)\n # print(vars(meas))\n # print(self.filter.x_hat)\n\n\n \n # Append to the ledger\n self.ledger[ ledger_ind ][\"meas\"].append( ros_meas )\n return ledger_ind", "def GraphicsContext_CreateMeasuringContext(*args):\n return _gdi_.GraphicsContext_CreateMeasuringContext(*args)", "def trigger_measurement_with_diff_pressure_t_comp_and_averaging(self):\n return self.execute(SdpI2cCmdTriggerMeasurementWithDiffPressureTComp())", "def test_add_two_sources(self):\n metric = self.metric()\n measurement = self.measurement(\n metric,\n sources=[self.source(metric, value=\"10\"), self.source(metric, value=\"20\")],\n )\n self.assertEqual(\"30\", measurement[\"count\"][\"value\"])", "def get_measurements_by_time(self):\n data_path = os.path.abspath(\n os.path.join(\n os.path.dirname(os.path.realpath(__file__)),\n \"..\",\n \"data/NVB_rescale_dataset.p\",\n )\n )\n self.log_print([\"Getting experimental data from {}\".format(data_path)])\n self.measurements = pickle.load(open(data_path, \"rb\"))\n return self.measurements", "def measures(self) -> Tuple[Union[ReportMeasures, str], ...]:\n return self.__measures" ]
[ "0.56643057", "0.5537284", "0.55313635", "0.55198115", "0.543281", "0.542291", "0.53697515", "0.5345556", "0.5296102", "0.5231518", "0.52056426", "0.51958555", "0.5187039", "0.5114033", "0.5113663", "0.50779897", "0.50573653", "0.50535953", "0.50285745", "0.5011202", "0.50048065", "0.49506637", "0.49415648", "0.49152985", "0.49132672", "0.4893087", "0.48619795", "0.48573297", "0.4853301", "0.48380572", "0.4834089", "0.4833397", "0.47993866", "0.47943667", "0.47787547", "0.47653198", "0.4763545", "0.4763545", "0.47436386", "0.47310105", "0.4729736", "0.47275564", "0.47257254", "0.47243488", "0.47227818", "0.47192988", "0.4718148", "0.47168556", "0.47148615", "0.47113058", "0.47105232", "0.46913484", "0.46789557", "0.46788776", "0.4670256", "0.46582776", "0.4656619", "0.4656607", "0.4653057", "0.46466792", "0.46434197", "0.4631471", "0.46268797", "0.4621381", "0.46194053", "0.46116927", "0.46090946", "0.4608497", "0.46076784", "0.46046758", "0.4597776", "0.45905373", "0.45888713", "0.45884326", "0.45829964", "0.45791557", "0.45789963", "0.45763752", "0.45745078", "0.45607617", "0.45602936", "0.4559439", "0.4546698", "0.4546066", "0.45405418", "0.4534455", "0.45272398", "0.45260572", "0.4522788", "0.45191273", "0.45170546", "0.4515481", "0.4512268", "0.4511979", "0.45082906", "0.4502611", "0.44999534", "0.44995496", "0.4482328", "0.44820276", "0.44807094" ]
0.0
-1
Actions for Diffrn objects.
def action_diffrn(obj: Diffrn, thread: QtCore.QThread): w_actions = [] f_setup = obj.is_attribute("setup") f_diffrn_radiation = obj.is_attribute("diffrn_radiation") f_diffrn_orient_matrix = obj.is_attribute("diffrn_orient_matrix") f_diffrn_refln = obj.is_attribute("diffrn_refln") f_phase = obj.is_attribute("phase") if not(f_setup & f_diffrn_radiation & f_diffrn_orient_matrix & f_diffrn_refln & f_phase): if not(f_setup): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add setup") qtb_1.clicked.connect(lambda: add_items(obj, [Setup()], thread)) w_actions.append(qtb_1) if not(f_diffrn_radiation): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add diffrn_radiation") qtb_1.clicked.connect(lambda: add_items( obj, [DiffrnRadiation()], thread)) w_actions.append(qtb_1) if not(f_diffrn_orient_matrix): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add diffrn_orient_matrix") qtb_1.clicked.connect(lambda: add_items(obj, [DiffrnOrientMatrix( ub_11=1., ub_12=0., ub_13=0., ub_21=0., ub_22=1., ub_23=0., ub_31=0., ub_32=0., ub_33=1.,)], thread)) w_actions.append(qtb_1) if not(f_diffrn_refln): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add diffrn_refln") qtb_1.clicked.connect(lambda: add_items( obj, [DiffrnReflnL()], thread)) w_actions.append(qtb_1) if not(f_phase): qtb_1 = QtWidgets.QToolButton() qtb_1.setText("Add phase") qtb_1.clicked.connect(lambda: add_items(obj, [ Phase(label="phase")], thread)) w_actions.append(qtb_1) if f_diffrn_refln: diffrn_refln = obj.diffrn_refln w_actions.extend(action_diffrn_refln_l(diffrn_refln, thread)) if f_diffrn_orient_matrix: diffrn_orient_matrix = obj.diffrn_orient_matrix w_actions.extend(action_diffrn_orient_matrix( diffrn_orient_matrix, thread)) return w_actions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_diffs(history):\n\n # First get all possible representations\n mgr = plugins_get_mgr() \n keys = mgr.search('representation')['representation']\n representations = [mgr.get_by_key('representation', k) for k in keys]\n\n for i in range(len(history)):\n if i+1 > len(history) - 1:\n continue\n\n prev = history[i]\n curr = history[i+1]\n\n #print(prev['subject'], \"==>\", curr['subject'])\n #print(curr['changes'])\n for c in curr['changes']:\n \n path = c['path']\n\n # Skip the metadata file\n if c['path'].endswith('datapackage.json'): \n continue \n\n # Find a handler for this kind of file...\n handler = None \n for r in representations: \n if r.can_process(path): \n handler = r \n break \n \n if handler is None: \n continue \n\n # print(path, \"being handled by\", handler)\n\n v1_hex = prev['commit']\n v2_hex = curr['commit']\n\n temp1 = tempfile.mkdtemp(prefix=\"dgit-diff-\") \n \n try: \n for h in [v1_hex, v2_hex]: \n filename = '{}/{}/checkout.tar'.format(temp1, h)\n try:\n os.makedirs(os.path.dirname(filename))\n except:\n pass \n extractcmd = ['git', 'archive', '-o', filename, h, path]\n output = run(extractcmd)\n if 'fatal' in output: \n raise Exception(\"File not present in commit\") \n with cd(os.path.dirname(filename)): \n cmd = ['tar', 'xvf', 'checkout.tar']\n output = run(cmd) \n if 'fatal' in output: \n print(\"Cleaning up - fatal 1\", temp1)\n shutil.rmtree(temp1)\n continue \n\n # Check to make sure that \n path1 = os.path.join(temp1, v1_hex, path) \n path2 = os.path.join(temp1, v2_hex, path) \n if not os.path.exists(path1) or not os.path.exists(path2): \n # print(\"One of the two output files is missing\") \n shutil.rmtree(temp1)\n continue \n\n #print(path1, path2) \n\n # Now call the handler\n diff = handler.get_diff(path1, path2)\n\n # print(\"Inserting diff\", diff)\n c['diff'] = diff\n\n except Exception as e: \n #traceback.print_exc() \n #print(\"Cleaning up - Exception \", temp1)\n shutil.rmtree(temp1)", "def __actions__(self, obj):\n primary_fields = self.__provider__.get_primary_fields(self.__entity__)\n pklist = '/'.join(map(lambda x: str(getattr(obj, x)), primary_fields))\n #if has_permission('manage'):############\n \n historial = DBSession.query(Item.nrohistorial).filter_by(id=pklist).first()\n idlineabase = DBSession.query(Item.idLineaBase).filter_by(nrohistorial=historial, ultimaversion=1).first()\n lineabase = DBSession.query(LineaBase).filter_by(id=idlineabase).first()\n \n value = '<div></div>'\n \n if lineabase != None:\n if str(lineabase.estado).__eq__('abierta'):\n value = '<div><a class=\"loginlogout\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">Revertir</a></div>'\n else:\n value = '<div><a class=\"loginlogout\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">Revertir</a></div>'\n \n return value", "def PostProcessDiff(self, diff):\r\n return diff", "def run(self, obj, diff):\n for a in AlertRule.objects.all():\n a = json.loads(a.alert_rule)\n\n if a[\"object\"] == obj.__class__.__name__:\n if a[\"attribute\"] in diff:\n if diff[a[\"attribute\"]] == a[\"changed_to\"]:\n # Criteria Satisfied. Run Alert Action\n\n subject = Template(a[\"action\"][\"subject\"]).render(c)\n msg = Template(a[\"action\"][\"message\"]).render(c)\n\n if \"type\" == \"email\":\n # Fill out subject/message Template\n c = Context({\n \"object\": obj,\n \"diff\": diff\n })\n\n if a[\"action\"][\"type\"] == \"email\":\n send_mail(\n subject,\n msg,\n settings.DEFAULT_FROM_EMAIL,\n [a[\"action\"][\"to\"]],\n fail_silently=False,\n )\n\n # TODO Add More Alert Types (phone, text, im)", "def actions(self):\n raise NotImplementedError", "def _diff(self, param, diff):\n pass", "def svn_diff_diff(*args):\n return _diff.svn_diff_diff(*args)", "def diff(request):\n if request.patch.no_base_file:\n # Can't show side-by-side diff since we don't have the base file. Show the\n # unified diff instead.\n return patch_helper(request, 'diff')\n\n patchset = request.patchset\n patch = request.patch\n\n patchsets = list(request.issue.patchsets)\n\n context = _get_context_for_user(request)\n column_width = _get_column_width_for_user(request)\n if patch.is_binary:\n rows = None\n else:\n try:\n rows = _get_diff_table_rows(request, patch, context, column_width)\n except FetchError as err:\n return HttpTextResponse(str(err), status=404)\n\n _add_next_prev(patchset, patch)\n return respond(request, 'diff.html',\n {'issue': request.issue,\n 'patchset': patchset,\n 'patch': patch,\n 'view_style': 'diff',\n 'rows': rows,\n 'context': context,\n 'context_values': models.CONTEXT_CHOICES,\n 'column_width': column_width,\n 'patchsets': patchsets,\n })", "def getChanges():", "def cmd_get_diff(base, target):\n return ['git', 'diff', base, target]", "def __actions__(self, obj):\n bool_ultimo = obj.bool_ultimo \n primary_fields = self.__provider__.get_primary_fields(self.__entity__)\n pklist = '/'.join(map(lambda x: str(getattr(obj, x)), primary_fields))\n\n if bool_ultimo == 1:\n cod_item = obj.cod_item\n value = '<div>'\n if has_permission('editar_item'):\n value = value + '<div><a class=\"edit_link\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">edit</a></div>'\n if has_permission('eliminar_relacion'):\n value = value + '<div><form method=\"POST\" action=\"'+pklist+'\" class=\"button-to\"><input type=\"hidden\" name=\"_method\" value=\"DELETE\" /><input class=\"delete-button\" onclick=\"return confirm(\\'Are you sure?\\');\" value=\"delete\" type=\"submit\" style=\"background-color: transparent; float:left; border:0; color: #286571; display: inline; margin: 0; padding: 0;\"/></form></div>'\n value = value + '<div><a class=\"relacion_link\" href=\"../relacions/?iid='+pklist+'\">Relaciones </a><br/><a class=\"versiones_link\" href=\"./?codi='+cod_item+'\">Revertir</a></div></div>'\n \n else:\n id_item_rev = DBSession.query(Item).filter_by(cod_item = obj.cod_item, bool_ultimo = 1).one().id_item\n ids = str(pklist) + \"-\" + str(id_item_rev)\n href = \"./revertir/?ids=\" + ids\n value = '<div><div><a class=\"edit_link\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">edit</a>'\\\n '</div><div>'\\\n '<form method=\"POST\" action=\"'+pklist+'\" class=\"button-to\">'\\\n '<input type=\"hidden\" name=\"_method\" value=\"DELETE\" />'\\\n '<input class=\"delete-button\" onclick=\"return confirm(\\'Are you sure?\\');\" value=\"delete\" type=\"submit\" '\\\n 'style=\"background-color: transparent; float:left; border:0; color: #286571; display: inline; margin: 0; padding: 0;\"/>'\\\n '</form>'\\\n '<a class=\"relacion_link\" href=\"../relacions/?iid='+pklist+'\">Relaciones </a>'\\\n '<a class=\"volver_link\" href=\"'+href+'\">Volver a</a>'\\\n '</div></div>'\n\n return value", "def transact(self):", "def transact(self):", "def objects(self):", "def _populateModel(self):\n\n self.repoPath = self.argv[1]\n self.rev = self.argv[2]\n self.model.rev = self.rev\n self.model.repo = os.path.split(self.repoPath)[-1]\n self.prefix = (self.addRepoPrefix() and ('/' + self.model.repo)) or ''\n\n # First, get the user and log message\n lines = self._svnlook('info')\n self.model.user = lines[0][:-1]\n self.model.log = ''.join(lines[3:]).strip()\n\n # Now build an initial tree of file and tree changes\n for line in self._svnlook('changed'):\n action = self.actions[line[0]]\n target = '/' + line[4:-1]\n\n if target.endswith('/'):\n directory = self.model.directory(self.prefix + target)\n directory.action = action\n else:\n parts = target.split('/')\n name = parts[-1]\n directoryPath = '/' + '/'.join(parts[0:-1]) + '/'\n\n file = File(name, self.model.directory(self.prefix + directoryPath), action)\n\n # Markers to tell us when we hit a new diff\n markers = ['Modified', 'Added', 'Copied', 'Deleted', 'Property changes on']\n\n # Recontruct each diff by parsing through the output of svnlook line by line\n diffs = []\n partialDiff = None\n\n #A marker word after a \"____\" line is a change in a property and shouldn't be added as a change\n #in a file. InProperty keeps track of this. If it's 0 this is a normal line, any larger \n #and it's a property line.\n inProperty = 1\n for line in self.getDiffLines():\n inProperty = max(0, inProperty-1)\n if line == \"___________________________________________________________________\\n\":\n inProperty = 2\n\n # Look for Modified:, Added:, etc.\n if line[0:line.find(':')] in markers and not inProperty > 0:\n # Handle starting a new diff\n partialDiff = [line]\n diffs.append(partialDiff)\n elif partialDiff:\n partialDiff.append(line)\n\n if len(diffs) == 0:\n for file in self.model.files():\n file.delta = '<Unavailable>'\n file.diff = ''\n\n # And finally parse through the diffs and save them into our tree of changes\n for diff in diffs:\n # Use [:-1] to leave of the trailing \\n\n start = diff[0].find(': ') + 2\n stop = diff[0].find('(') - 1 # -1 ignores the space before the paren\n if stop == -2: stop = len(diff[0])\n\n filePath = '/' + diff[0][:-1][start:stop]\n\n # This could be a file or a directory - going ahead with the .file()\n # call for most directories is fine as it will just return null.\n #\n # Howeever, root / will exception out as an invalid file path so\n # just special case it\n if filePath == '/':\n file = None\n else:\n file = self.model.file(self.prefix + filePath)\n\n # Maybe its a directory\n if file:\n isFile = True\n else:\n file = self.model.directory(self.prefix + filePath + '/')\n isFile = False\n\n if not diff[0].startswith('Property changes on:'):\n file.delta, file.diff = self._parse_diff(diff)\n else:\n if file.diff:\n # Only files will already have a diff set\n file.diff = file.diff + '\\n\\n' + ''.join(diff)\n else:\n # If the 'Property changes on' line is here without a\n # file.diff, that file.diff will never come because it would\n # have been printed before us\n if isFile:\n sep = '===================================================================\\n\\n'\n file.diff = ''.join([sep] + diff)\n file.delta = '+0 -0'\n else:\n file.diff = ''.join(diff)", "def diff(self):\n return self.client.api.diff(self.id)", "def list_operations():", "def GenerateDiff(self, args):\r\n raise NotImplementedError(\r\n \"abstract method -- subclass %s must override\" % self.__class__)", "def __actions__(self, obj):\n\t\t\tprimary_fields \t= self.__provider__.get_primary_fields(self.__entity__)\n\t\t\tpklist \t\t= '/'.join(map(lambda x: str(getattr(obj, x)), primary_fields))\n\n\t\t\tvalue \t\t= '<div>'\n\t\t\tif has_permission('editar_LB'):\n\t\t\t\tvalue = value + '<div><a class=\"edit_link\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">edit</a></div>'\n\t\t\tif has_permission('eliminar_LB'):\n\t\t\t\tvalue = value + '<div><form method=\"POST\" action=\"'+pklist+'\" class=\"button-to\"><input type=\"hidden\" name=\"_method\" value=\"DELETE\" /><input class=\"delete-button\" onclick=\"return confirm(\\'Est&aacute; seguro que desea eliminar?\\');\" value=\"delete\" type=\"submit\" style=\"background-color: transparent; float:left; border:0; color: #286571; display: inline; margin: 0; padding: 0;\"/></form></div>'\n\t\t\tvalue = value + '</div>'\n\t\t\treturn value", "def diff(self):\n if self.event == 'Create':\n old = ''\n else:\n # Get the Change just ahead of _this_ change because that has the\n # state of the Resource before this Change occurred.\n # TODO(nickpegg): Get rid of this if we change the behavior of\n # Change to store the previous version of the object\n old_change = Change.objects.filter(\n change_at__lt=self.change_at,\n resource_id=self.resource_id,\n resource_name=self.resource_name\n ).order_by(\n '-change_at'\n ).first()\n old = json.dumps(old_change._resource, indent=2, sort_keys=True)\n\n if self.event == 'Delete':\n current = ''\n else:\n resource = apps.get_model(self._meta.app_label, self.resource_name)\n obj = resource.objects.get(pk=self.resource_id)\n\n serializer_class = self.get_serializer_for_resource(\n self.resource_name)\n serializer = serializer_class(obj)\n current = json.dumps(serializer.data, indent=2, sort_keys=True)\n\n diff = \"\\n\".join(difflib.ndiff(\n old.splitlines(),\n current.splitlines()\n ))\n\n return diff", "def do_manipulations(self, *args, **kwargs):\n pass", "def _git_diff_files(ref=\"master\"):\n result = []\n command = [\"git\", \"diff\", \"--name-status\", \"%s\" % (ref)]\n exit_code, output = _execute(command)\n if exit_code != 0:\n print(\"Failed to diff files.\")\n sys.exit(1)\n\n for line in output.decode(\"utf-8\").splitlines():\n parts = line.split(\"\\t\")\n action = parts[0]\n name = parts[-1]\n action = action.lower()\n result.append((action, name))\n\n return result", "def _buildDiff(self):\n outputList = []\n for tag, alo, ahi, blo, bhi in self.cruncher.get_opcodes():\n if tag == 'replace':\n # Text replaced = deletion + insertion\n outputList.append(self.delTag % u\" \".join(self.source[alo:ahi]))\n outputList.append(self.insTag % u\" \".join(self.target[blo:bhi]))\n self.replaceCount += 1\n elif tag == 'delete':\n # Text deleted\n outputList.append(self.delTag % u\" \".join(self.source[alo:ahi]))\n self.deleteCount += 1\n elif tag == 'insert':\n # Text inserted\n outputList.append(self.insTag % u\" \".join(self.target[blo:bhi]))\n self.insertCount += 1\n diffText = u\" \".join(outputList)\n #diffText = \" \".join(diffText.split())\n self.diffText = diffText.replace(self.nl, u\"\\n\")", "def actions():\n pass", "def diff(self, rev=None):\r\n args = []\r\n if rev is not None:\r\n args.append(\"-r %d\" % rev)\r\n out = self._authsvn('diff', args)\r\n return out", "def diff(ctx, files, metrics, all, detail, revision, wrap):\n config = ctx.obj[\"CONFIG\"]\n\n if not exists(config):\n handle_no_cache(ctx)\n\n if not metrics:\n metrics = get_default_metrics(config)\n logger.info(f\"Using default metrics {metrics}\")\n else:\n metrics = metrics.split(\",\")\n logger.info(f\"Using specified metrics {metrics}\")\n\n from wily.commands.diff import diff\n\n logger.debug(f\"Running diff on {files} for metric {metrics}\")\n diff(\n config=config,\n files=files,\n metrics=metrics,\n changes_only=not all,\n detail=detail,\n revision=revision,\n wrap=wrap,\n )", "def svn_diff_file_diff(*args):\n return _diff.svn_diff_file_diff(*args)", "def actions(self, state):\n\t\traise NotImplementedError", "def post_revert(self):", "def GenerateDiff(self, args):\n raise NotImplementedError(\n \"abstract method -- subclass %s must override\" % self.__class__)", "def initActions(self):\n self.vcsNewAct = E5Action(\n self.tr('New from repository'),\n UI.PixmapCache.getIcon(\"vcsCheckout.png\"),\n self.tr('&New from repository...'), 0, 0,\n self, 'git_new')\n self.vcsNewAct.setStatusTip(self.tr(\n 'Create (clone) a new project from a Git repository'\n ))\n self.vcsNewAct.setWhatsThis(self.tr(\n \"\"\"<b>New from repository</b>\"\"\"\n \"\"\"<p>This creates (clones) a new local project from \"\"\"\n \"\"\"a Git repository.</p>\"\"\"\n ))\n self.vcsNewAct.triggered.connect(self._vcsCheckout)\n self.actions.append(self.vcsNewAct)\n \n self.gitFetchAct = E5Action(\n self.tr('Fetch changes'),\n UI.PixmapCache.getIcon(\"vcsUpdate.png\"),\n self.tr('Fetch changes'),\n 0, 0, self, 'git_fetch')\n self.gitFetchAct.setStatusTip(self.tr(\n 'Fetch changes from a remote repository'\n ))\n self.gitFetchAct.setWhatsThis(self.tr(\n \"\"\"<b>Fetch changes</b>\"\"\"\n \"\"\"<p>This fetches changes from a remote repository into the \"\"\"\n \"\"\"local repository.</p>\"\"\"\n ))\n self.gitFetchAct.triggered.connect(self.__gitFetch)\n self.actions.append(self.gitFetchAct)\n \n self.gitPullAct = E5Action(\n self.tr('Pull changes'),\n UI.PixmapCache.getIcon(\"vcsUpdate.png\"),\n self.tr('Pull changes'),\n 0, 0, self, 'git_pull')\n self.gitPullAct.setStatusTip(self.tr(\n 'Pull changes from a remote repository and update the work area'\n ))\n self.gitPullAct.setWhatsThis(self.tr(\n \"\"\"<b>Pull changes</b>\"\"\"\n \"\"\"<p>This pulls changes from a remote repository into the \"\"\"\n \"\"\"local repository and updates the work area.</p>\"\"\"\n ))\n self.gitPullAct.triggered.connect(self.__gitPull)\n self.actions.append(self.gitPullAct)\n \n self.vcsCommitAct = E5Action(\n self.tr('Commit changes to repository'),\n UI.PixmapCache.getIcon(\"vcsCommit.png\"),\n self.tr('Commit changes to repository...'), 0, 0, self,\n 'git_commit')\n self.vcsCommitAct.setStatusTip(self.tr(\n 'Commit changes of the local project to the Git repository'\n ))\n self.vcsCommitAct.setWhatsThis(self.tr(\n \"\"\"<b>Commit changes to repository</b>\"\"\"\n \"\"\"<p>This commits changes of the local project to the \"\"\"\n \"\"\"Git repository.</p>\"\"\"\n ))\n self.vcsCommitAct.triggered.connect(self._vcsCommit)\n self.actions.append(self.vcsCommitAct)\n \n self.gitPushAct = E5Action(\n self.tr('Push changes'),\n UI.PixmapCache.getIcon(\"vcsCommit.png\"),\n self.tr('Push changes'),\n 0, 0, self, 'git_push')\n self.gitPushAct.setStatusTip(self.tr(\n 'Push changes to a remote repository'\n ))\n self.gitPushAct.setWhatsThis(self.tr(\n \"\"\"<b>Push changes</b>\"\"\"\n \"\"\"<p>This pushes changes from the local repository to a \"\"\"\n \"\"\"remote repository.</p>\"\"\"\n ))\n self.gitPushAct.triggered.connect(self.__gitPush)\n self.actions.append(self.gitPushAct)\n \n self.vcsExportAct = E5Action(\n self.tr('Export from repository'),\n UI.PixmapCache.getIcon(\"vcsExport.png\"),\n self.tr('&Export from repository...'),\n 0, 0, self, 'git_export_repo')\n self.vcsExportAct.setStatusTip(self.tr(\n 'Export a project from the repository'\n ))\n self.vcsExportAct.setWhatsThis(self.tr(\n \"\"\"<b>Export from repository</b>\"\"\"\n \"\"\"<p>This exports a project from the repository.</p>\"\"\"\n ))\n self.vcsExportAct.triggered.connect(self._vcsExport)\n self.actions.append(self.vcsExportAct)\n \n self.gitLogBrowserAct = E5Action(\n self.tr('Show log browser'),\n UI.PixmapCache.getIcon(\"vcsLog.png\"),\n self.tr('Show log browser'),\n 0, 0, self, 'git_log_browser')\n self.gitLogBrowserAct.setStatusTip(self.tr(\n 'Show a dialog to browse the log of the local project'\n ))\n self.gitLogBrowserAct.setWhatsThis(self.tr(\n \"\"\"<b>Show log browser</b>\"\"\"\n \"\"\"<p>This shows a dialog to browse the log of the local\"\"\"\n \"\"\" project. A limited number of entries is shown first.\"\"\"\n \"\"\" More can be retrieved later on.</p>\"\"\"\n ))\n self.gitLogBrowserAct.triggered.connect(self._vcsLogBrowser)\n self.actions.append(self.gitLogBrowserAct)\n \n self.gitReflogBrowserAct = E5Action(\n self.tr('Show reflog browser'),\n UI.PixmapCache.getIcon(\"vcsLog.png\"),\n self.tr('Show reflog browser'),\n 0, 0, self, 'git_reflog_browser')\n self.gitReflogBrowserAct.setStatusTip(self.tr(\n 'Show a dialog to browse the reflog of the local project'\n ))\n self.gitReflogBrowserAct.setWhatsThis(self.tr(\n \"\"\"<b>Show reflog browser</b>\"\"\"\n \"\"\"<p>This shows a dialog to browse the reflog of the local\"\"\"\n \"\"\" project. A limited number of entries is shown first.\"\"\"\n \"\"\" More can be retrieved later on.</p>\"\"\"\n ))\n self.gitReflogBrowserAct.triggered.connect(self.__gitReflogBrowser)\n self.actions.append(self.gitReflogBrowserAct)\n \n self.vcsDiffAct = E5Action(\n self.tr('Show differences'),\n UI.PixmapCache.getIcon(\"vcsDiff.png\"),\n self.tr('Show &differences...'),\n 0, 0, self, 'git_diff')\n self.vcsDiffAct.setStatusTip(self.tr(\n 'Show the differences of the local project to the repository'\n ))\n self.vcsDiffAct.setWhatsThis(self.tr(\n \"\"\"<b>Show differences</b>\"\"\"\n \"\"\"<p>This shows differences of the local project to the\"\"\"\n \"\"\" repository.</p>\"\"\"\n ))\n self.vcsDiffAct.triggered.connect(self._vcsDiff)\n self.actions.append(self.vcsDiffAct)\n \n self.gitExtDiffAct = E5Action(\n self.tr('Show differences (extended)'),\n UI.PixmapCache.getIcon(\"vcsDiff.png\"),\n self.tr('Show differences (extended) ...'),\n 0, 0, self, 'git_extendeddiff')\n self.gitExtDiffAct.setStatusTip(self.tr(\n 'Show the difference of revisions of the project to the repository'\n ))\n self.gitExtDiffAct.setWhatsThis(self.tr(\n \"\"\"<b>Show differences (extended)</b>\"\"\"\n \"\"\"<p>This shows differences of selectable revisions of the\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitExtDiffAct.triggered.connect(self.__gitExtendedDiff)\n self.actions.append(self.gitExtDiffAct)\n \n self.vcsStatusAct = E5Action(\n self.tr('Show status'),\n UI.PixmapCache.getIcon(\"vcsStatus.png\"),\n self.tr('Show &status...'),\n 0, 0, self, 'git_status')\n self.vcsStatusAct.setStatusTip(self.tr(\n 'Show the status of the local project'\n ))\n self.vcsStatusAct.setWhatsThis(self.tr(\n \"\"\"<b>Show status</b>\"\"\"\n \"\"\"<p>This shows the status of the local project.</p>\"\"\"\n ))\n self.vcsStatusAct.triggered.connect(self._vcsStatus)\n self.actions.append(self.vcsStatusAct)\n \n self.vcsSwitchAct = E5Action(\n self.tr('Switch'),\n UI.PixmapCache.getIcon(\"vcsSwitch.png\"),\n self.tr('S&witch...'),\n 0, 0, self, 'git_switch')\n self.vcsSwitchAct.setStatusTip(self.tr(\n 'Switch the working directory to another revision'\n ))\n self.vcsSwitchAct.setWhatsThis(self.tr(\n \"\"\"<b>Switch</b>\"\"\"\n \"\"\"<p>This switches the working directory to another\"\"\"\n \"\"\" revision.</p>\"\"\"\n ))\n self.vcsSwitchAct.triggered.connect(self._vcsSwitch)\n self.actions.append(self.vcsSwitchAct)\n \n self.vcsTagAct = E5Action(\n self.tr('Tag in repository'),\n UI.PixmapCache.getIcon(\"vcsTag.png\"),\n self.tr('&Tag in repository...'),\n 0, 0, self, 'git_tag')\n self.vcsTagAct.setStatusTip(self.tr(\n 'Perform tag operations for the local project'\n ))\n self.vcsTagAct.setWhatsThis(self.tr(\n \"\"\"<b>Tag in repository</b>\"\"\"\n \"\"\"<p>This performs selectable tag operations for the local\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.vcsTagAct.triggered.connect(self._vcsTag)\n self.actions.append(self.vcsTagAct)\n \n self.gitTagListAct = E5Action(\n self.tr('List tags'),\n self.tr('&List tags...'),\n 0, 0, self, 'git_list_tags')\n self.gitTagListAct.setStatusTip(self.tr(\n 'List tags of the project'\n ))\n self.gitTagListAct.setWhatsThis(self.tr(\n \"\"\"<b>List tags</b>\"\"\"\n \"\"\"<p>This lists the tags of the project.</p>\"\"\"\n ))\n self.gitTagListAct.triggered.connect(self.__gitTagList)\n self.actions.append(self.gitTagListAct)\n \n self.gitDescribeTagAct = E5Action(\n self.tr('Show most recent tag'),\n self.tr('Show most recent tag'),\n 0, 0, self, 'git_describe_tag')\n self.gitDescribeTagAct.setStatusTip(self.tr(\n 'Show the most recent tag reachable from the work tree'\n ))\n self.gitDescribeTagAct.setWhatsThis(self.tr(\n \"\"\"<b>Show most recent tag</b>\"\"\"\n \"\"\"<p>This shows the most recent tag reachable from the work\"\"\"\n \"\"\" tree.</p>\"\"\"\n ))\n self.gitDescribeTagAct.triggered.connect(self.__gitDescribeTag)\n self.actions.append(self.gitDescribeTagAct)\n \n self.gitBranchListAct = E5Action(\n self.tr('List branches'),\n self.tr('&List branches...'),\n 0, 0, self, 'git_list_branches')\n self.gitBranchListAct.setStatusTip(self.tr(\n 'List branches of the project'\n ))\n self.gitBranchListAct.setWhatsThis(self.tr(\n \"\"\"<b>List branches</b>\"\"\"\n \"\"\"<p>This lists the branches of the project.</p>\"\"\"\n ))\n self.gitBranchListAct.triggered.connect(self.__gitBranchList)\n self.actions.append(self.gitBranchListAct)\n \n self.gitMergedBranchListAct = E5Action(\n self.tr('List merged branches'),\n self.tr('List &merged branches...'),\n 0, 0, self, 'git_list_merged_branches')\n self.gitMergedBranchListAct.setStatusTip(self.tr(\n 'List merged branches of the project'\n ))\n self.gitMergedBranchListAct.setWhatsThis(self.tr(\n \"\"\"<b>List merged branches</b>\"\"\"\n \"\"\"<p>This lists the merged branches of the project.</p>\"\"\"\n ))\n self.gitMergedBranchListAct.triggered.connect(\n self.__gitMergedBranchList)\n self.actions.append(self.gitMergedBranchListAct)\n \n self.gitNotMergedBranchListAct = E5Action(\n self.tr('List non-merged branches'),\n self.tr('List &non-merged branches...'),\n 0, 0, self, 'git_list_non_merged_branches')\n self.gitNotMergedBranchListAct.setStatusTip(self.tr(\n 'List non-merged branches of the project'\n ))\n self.gitNotMergedBranchListAct.setWhatsThis(self.tr(\n \"\"\"<b>List non-merged branches</b>\"\"\"\n \"\"\"<p>This lists the non-merged branches of the project.</p>\"\"\"\n ))\n self.gitNotMergedBranchListAct.triggered.connect(\n self.__gitNotMergedBranchList)\n self.actions.append(self.gitNotMergedBranchListAct)\n \n self.gitBranchAct = E5Action(\n self.tr('Branch in repository'),\n UI.PixmapCache.getIcon(\"vcsBranch.png\"),\n self.tr('&Branch in repository...'),\n 0, 0, self, 'git_branch')\n self.gitBranchAct.setStatusTip(self.tr(\n 'Perform branch operations for the local project'\n ))\n self.gitBranchAct.setWhatsThis(self.tr(\n \"\"\"<b>Branch in repository</b>\"\"\"\n \"\"\"<p>This performs selectable branch operations for the local\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitBranchAct.triggered.connect(self.__gitBranch)\n self.actions.append(self.gitBranchAct)\n \n self.gitDeleteRemoteBranchAct = E5Action(\n self.tr('Delete Remote Branch'),\n self.tr('&Delete Remote Branch...'),\n 0, 0, self, 'git_delete_remote_branch')\n self.gitDeleteRemoteBranchAct.setStatusTip(self.tr(\n 'Delete a branch from a remote repository'\n ))\n self.gitDeleteRemoteBranchAct.setWhatsThis(self.tr(\n \"\"\"<b>Delete Remote Branch</b>\"\"\"\n \"\"\"<p>This deletes a branch from a remote repository.</p>\"\"\"\n ))\n self.gitDeleteRemoteBranchAct.triggered.connect(self.__gitDeleteBranch)\n self.actions.append(self.gitDeleteRemoteBranchAct)\n \n self.gitShowBranchAct = E5Action(\n self.tr('Show current branch'),\n self.tr('Show current branch'),\n 0, 0, self, 'git_show_branch')\n self.gitShowBranchAct.setStatusTip(self.tr(\n 'Show the current branch of the project'\n ))\n self.gitShowBranchAct.setWhatsThis(self.tr(\n \"\"\"<b>Show current branch</b>\"\"\"\n \"\"\"<p>This shows the current branch of the project.</p>\"\"\"\n ))\n self.gitShowBranchAct.triggered.connect(self.__gitShowBranch)\n self.actions.append(self.gitShowBranchAct)\n \n self.vcsRevertAct = E5Action(\n self.tr('Revert changes'),\n UI.PixmapCache.getIcon(\"vcsRevert.png\"),\n self.tr('Re&vert changes'),\n 0, 0, self, 'git_revert')\n self.vcsRevertAct.setStatusTip(self.tr(\n 'Revert all changes made to the local project'\n ))\n self.vcsRevertAct.setWhatsThis(self.tr(\n \"\"\"<b>Revert changes</b>\"\"\"\n \"\"\"<p>This reverts all changes made to the local project.</p>\"\"\"\n ))\n self.vcsRevertAct.triggered.connect(self.__gitRevert)\n self.actions.append(self.vcsRevertAct)\n \n self.gitUnstageAct = E5Action(\n self.tr('Unstage changes'),\n UI.PixmapCache.getIcon(\"vcsRevert.png\"),\n self.tr('&Unstage changes'),\n 0, 0, self, 'git_revert')\n self.gitUnstageAct.setStatusTip(self.tr(\n 'Unstage all changes made to the local project'\n ))\n self.gitUnstageAct.setWhatsThis(self.tr(\n \"\"\"<b>Unstage changes</b>\"\"\"\n \"\"\"<p>This unstages all changes made to the local project.</p>\"\"\"\n ))\n self.gitUnstageAct.triggered.connect(self.__gitUnstage)\n self.actions.append(self.gitUnstageAct)\n \n self.vcsMergeAct = E5Action(\n self.tr('Merge'),\n UI.PixmapCache.getIcon(\"vcsMerge.png\"),\n self.tr('Mer&ge changes...'),\n 0, 0, self, 'git_merge')\n self.vcsMergeAct.setStatusTip(self.tr(\n 'Merge changes into the local project'\n ))\n self.vcsMergeAct.setWhatsThis(self.tr(\n \"\"\"<b>Merge</b>\"\"\"\n \"\"\"<p>This merges changes into the local project.</p>\"\"\"\n ))\n self.vcsMergeAct.triggered.connect(self._vcsMerge)\n self.actions.append(self.vcsMergeAct)\n \n self.gitCancelMergeAct = E5Action(\n self.tr('Cancel uncommitted/failed merge'),\n self.tr('Cancel uncommitted/failed merge'),\n 0, 0, self, 'git_cancel_merge')\n self.gitCancelMergeAct.setStatusTip(self.tr(\n 'Cancel an uncommitted or failed merge and lose all changes'\n ))\n self.gitCancelMergeAct.setWhatsThis(self.tr(\n \"\"\"<b>Cancel uncommitted/failed merge</b>\"\"\"\n \"\"\"<p>This cancels an uncommitted or failed merge causing all\"\"\"\n \"\"\" changes to be lost.</p>\"\"\"\n ))\n self.gitCancelMergeAct.triggered.connect(self.__gitCancelMerge)\n self.actions.append(self.gitCancelMergeAct)\n \n self.gitCommitMergeAct = E5Action(\n self.tr('Commit failed merge'),\n self.tr('Commit failed merge'),\n 0, 0, self, 'git_commit_merge')\n self.gitCommitMergeAct.setStatusTip(self.tr(\n 'Commit a failed merge after conflicts have been resolved'\n ))\n self.gitCommitMergeAct.setWhatsThis(self.tr(\n \"\"\"<b>Commit failed merge</b>\"\"\"\n \"\"\"<p>This commits a failed merge after conflicts have been\"\"\"\n \"\"\" resolved.</p>\"\"\"\n ))\n self.gitCommitMergeAct.triggered.connect(self.__gitCommitMerge)\n self.actions.append(self.gitCommitMergeAct)\n \n self.vcsCleanupAct = E5Action(\n self.tr('Cleanup'),\n self.tr('Cleanu&p'),\n 0, 0, self, 'git_cleanup')\n self.vcsCleanupAct.setStatusTip(self.tr(\n 'Cleanup the local project'\n ))\n self.vcsCleanupAct.setWhatsThis(self.tr(\n \"\"\"<b>Cleanup</b>\"\"\"\n \"\"\"<p>This performs a cleanup of the local project.</p>\"\"\"\n ))\n self.vcsCleanupAct.triggered.connect(self._vcsCleanup)\n self.actions.append(self.vcsCleanupAct)\n \n self.vcsCommandAct = E5Action(\n self.tr('Execute command'),\n self.tr('E&xecute command...'),\n 0, 0, self, 'git_command')\n self.vcsCommandAct.setStatusTip(self.tr(\n 'Execute an arbitrary Git command'\n ))\n self.vcsCommandAct.setWhatsThis(self.tr(\n \"\"\"<b>Execute command</b>\"\"\"\n \"\"\"<p>This opens a dialog to enter an arbitrary Git\"\"\"\n \"\"\" command.</p>\"\"\"\n ))\n self.vcsCommandAct.triggered.connect(self._vcsCommand)\n self.actions.append(self.vcsCommandAct)\n \n self.gitConfigAct = E5Action(\n self.tr('Configure'),\n self.tr('Configure...'),\n 0, 0, self, 'git_configure')\n self.gitConfigAct.setStatusTip(self.tr(\n 'Show the configuration dialog with the Git page selected'\n ))\n self.gitConfigAct.setWhatsThis(self.tr(\n \"\"\"<b>Configure</b>\"\"\"\n \"\"\"<p>Show the configuration dialog with the Git page\"\"\"\n \"\"\" selected.</p>\"\"\"\n ))\n self.gitConfigAct.triggered.connect(self.__gitConfigure)\n self.actions.append(self.gitConfigAct)\n \n self.gitRemotesShowAct = E5Action(\n self.tr('Show Remotes'),\n self.tr('Show Remotes...'),\n 0, 0, self, 'git_show_remotes')\n self.gitRemotesShowAct.setStatusTip(self.tr(\n 'Show the available remote repositories'\n ))\n self.gitRemotesShowAct.setWhatsThis(self.tr(\n \"\"\"<b>Show Remotes</b>\"\"\"\n \"\"\"<p>This shows the remote repositories available for\"\"\"\n \"\"\" pulling, fetching and pushing.</p>\"\"\"\n ))\n self.gitRemotesShowAct.triggered.connect(self.__gitShowRemotes)\n self.actions.append(self.gitRemotesShowAct)\n \n self.gitRemoteShowAct = E5Action(\n self.tr('Show Remote Info'),\n self.tr('Show Remote Info...'),\n 0, 0, self, 'git_show_remote_info')\n self.gitRemoteShowAct.setStatusTip(self.tr(\n 'Show information about a remote repository'\n ))\n self.gitRemoteShowAct.setWhatsThis(self.tr(\n \"\"\"<b>Show Remotes</b>\"\"\"\n \"\"\"<p>This shows the remote repositories available for\"\"\"\n \"\"\" pulling, fetching and pushing.</p>\"\"\"\n ))\n self.gitRemoteShowAct.triggered.connect(self.__gitShowRemote)\n self.actions.append(self.gitRemoteShowAct)\n \n self.gitRemoteAddAct = E5Action(\n self.tr('Add'),\n self.tr('Add...'),\n 0, 0, self, 'git_add_remote')\n self.gitRemoteAddAct.setStatusTip(self.tr(\n 'Add a remote repository'\n ))\n self.gitRemoteAddAct.setWhatsThis(self.tr(\n \"\"\"<b>Add</b>\"\"\"\n \"\"\"<p>This adds a remote repository.</p>\"\"\"\n ))\n self.gitRemoteAddAct.triggered.connect(self.__gitAddRemote)\n self.actions.append(self.gitRemoteAddAct)\n \n self.gitRemoteRemoveAct = E5Action(\n self.tr('Remove'),\n self.tr('Remove...'),\n 0, 0, self, 'git_remove_remote')\n self.gitRemoteRemoveAct.setStatusTip(self.tr(\n 'Remove a remote repository'\n ))\n self.gitRemoteRemoveAct.setWhatsThis(self.tr(\n \"\"\"<b>Remove</b>\"\"\"\n \"\"\"<p>This removes a remote repository.</p>\"\"\"\n ))\n self.gitRemoteRemoveAct.triggered.connect(self.__gitRemoveRemote)\n self.actions.append(self.gitRemoteRemoveAct)\n \n self.gitRemotePruneAct = E5Action(\n self.tr('Prune'),\n self.tr('Prune...'),\n 0, 0, self, 'git_prune_remote')\n self.gitRemotePruneAct.setStatusTip(self.tr(\n 'Prune stale remote-tracking branches of a remote repository'\n ))\n self.gitRemotePruneAct.setWhatsThis(self.tr(\n \"\"\"<b>Prune</b>\"\"\"\n \"\"\"<p>This prunes stale remote-tracking branches of a remote\"\"\"\n \"\"\" repository.</p>\"\"\"\n ))\n self.gitRemotePruneAct.triggered.connect(self.__gitPruneRemote)\n self.actions.append(self.gitRemotePruneAct)\n \n self.gitRemoteRenameAct = E5Action(\n self.tr('Rename'),\n self.tr('Rename...'),\n 0, 0, self, 'git_rename_remote')\n self.gitRemoteRenameAct.setStatusTip(self.tr(\n 'Rename a remote repository'\n ))\n self.gitRemoteRenameAct.setWhatsThis(self.tr(\n \"\"\"<b>Rename</b>\"\"\"\n \"\"\"<p>This renames a remote repository.</p>\"\"\"\n ))\n self.gitRemoteRenameAct.triggered.connect(self.__gitRenameRemote)\n self.actions.append(self.gitRemoteRenameAct)\n \n self.gitRemoteChangeUrlAct = E5Action(\n self.tr('Change URL'),\n self.tr('Change URL...'),\n 0, 0, self, 'git_change_remote_url')\n self.gitRemoteChangeUrlAct.setStatusTip(self.tr(\n 'Change the URL of a remote repository'\n ))\n self.gitRemoteChangeUrlAct.setWhatsThis(self.tr(\n \"\"\"<b>Change URL</b>\"\"\"\n \"\"\"<p>This changes the URL of a remote repository.</p>\"\"\"\n ))\n self.gitRemoteChangeUrlAct.triggered.connect(self.__gitChangeRemoteUrl)\n self.actions.append(self.gitRemoteChangeUrlAct)\n \n self.gitRemoteCredentialsAct = E5Action(\n self.tr('Credentials'),\n self.tr('Credentials...'),\n 0, 0, self, 'git_remote_credentials')\n self.gitRemoteCredentialsAct.setStatusTip(self.tr(\n 'Change or set the user credentials of a remote repository'\n ))\n self.gitRemoteCredentialsAct.setWhatsThis(self.tr(\n \"\"\"<b>Credentials</b>\"\"\"\n \"\"\"<p>This changes or sets the user credentials of a\"\"\"\n \"\"\" remote repository.</p>\"\"\"\n ))\n self.gitRemoteCredentialsAct.triggered.connect(\n self.__gitRemoteCredentials)\n self.actions.append(self.gitRemoteCredentialsAct)\n \n self.gitCherryPickAct = E5Action(\n self.tr('Copy Commits'),\n UI.PixmapCache.getIcon(\"vcsGraft.png\"),\n self.tr('Copy Commits'),\n 0, 0, self, 'git_cherrypick')\n self.gitCherryPickAct.setStatusTip(self.tr(\n 'Copies commits into the current branch'\n ))\n self.gitCherryPickAct.setWhatsThis(self.tr(\n \"\"\"<b>Copy Commits</b>\"\"\"\n \"\"\"<p>This copies commits on top of the current branch.</p>\"\"\"\n ))\n self.gitCherryPickAct.triggered.connect(self.__gitCherryPick)\n self.actions.append(self.gitCherryPickAct)\n \n self.gitCherryPickContinueAct = E5Action(\n self.tr('Continue Copying Session'),\n self.tr('Continue Copying Session'),\n 0, 0, self, 'git_cherrypick_continue')\n self.gitCherryPickContinueAct.setStatusTip(self.tr(\n 'Continue the last copying session after conflicts were resolved'\n ))\n self.gitCherryPickContinueAct.setWhatsThis(self.tr(\n \"\"\"<b>Continue Copying Session</b>\"\"\"\n \"\"\"<p>This continues the last copying session after conflicts\"\"\"\n \"\"\" were resolved.</p>\"\"\"\n ))\n self.gitCherryPickContinueAct.triggered.connect(\n self.__gitCherryPickContinue)\n self.actions.append(self.gitCherryPickContinueAct)\n \n self.gitCherryPickQuitAct = E5Action(\n self.tr('Quit Copying Session'),\n self.tr('Quit Copying Session'),\n 0, 0, self, 'git_cherrypick_quit')\n self.gitCherryPickQuitAct.setStatusTip(self.tr(\n 'Quit the current copying session'\n ))\n self.gitCherryPickQuitAct.setWhatsThis(self.tr(\n \"\"\"<b>Quit Copying Session</b>\"\"\"\n \"\"\"<p>This quits the current copying session.</p>\"\"\"\n ))\n self.gitCherryPickQuitAct.triggered.connect(self.__gitCherryPickQuit)\n self.actions.append(self.gitCherryPickQuitAct)\n \n self.gitCherryPickAbortAct = E5Action(\n self.tr('Cancel Copying Session'),\n self.tr('Cancel Copying Session'),\n 0, 0, self, 'git_cherrypick_abort')\n self.gitCherryPickAbortAct.setStatusTip(self.tr(\n 'Cancel the current copying session and return to the'\n ' previous state'\n ))\n self.gitCherryPickAbortAct.setWhatsThis(self.tr(\n \"\"\"<b>Cancel Copying Session</b>\"\"\"\n \"\"\"<p>This cancels the current copying session and returns to\"\"\"\n \"\"\" the previous state.</p>\"\"\"\n ))\n self.gitCherryPickAbortAct.triggered.connect(self.__gitCherryPickAbort)\n self.actions.append(self.gitCherryPickAbortAct)\n \n self.gitStashAct = E5Action(\n self.tr('Stash changes'),\n self.tr('Stash changes...'),\n 0, 0, self, 'git_stash')\n self.gitStashAct.setStatusTip(self.tr(\n 'Stash all current changes of the project'\n ))\n self.gitStashAct.setWhatsThis(self.tr(\n \"\"\"<b>Stash changes</b>\"\"\"\n \"\"\"<p>This stashes all current changes of the project.</p>\"\"\"\n ))\n self.gitStashAct.triggered.connect(self.__gitStashSave)\n self.actions.append(self.gitStashAct)\n \n self.gitStashBrowserAct = E5Action(\n self.tr('Show stash browser'),\n self.tr('Show stash browser...'),\n 0, 0, self, 'git_stash_browser')\n self.gitStashBrowserAct.setStatusTip(self.tr(\n 'Show a dialog with all stashes'\n ))\n self.gitStashBrowserAct.setWhatsThis(self.tr(\n \"\"\"<b>Show stash browser...</b>\"\"\"\n \"\"\"<p>This shows a dialog listing all available stashes.\"\"\"\n \"\"\" Actions on these stashes may be executed via the\"\"\"\n \"\"\" context menu.</p>\"\"\"\n ))\n self.gitStashBrowserAct.triggered.connect(self.__gitStashBrowser)\n self.actions.append(self.gitStashBrowserAct)\n \n self.gitStashShowAct = E5Action(\n self.tr('Show stash'),\n self.tr('Show stash...'),\n 0, 0, self, 'git_stash_show')\n self.gitStashShowAct.setStatusTip(self.tr(\n 'Show a dialog with a patch of a stash'\n ))\n self.gitStashShowAct.setWhatsThis(self.tr(\n \"\"\"<b>Show stash...</b>\"\"\"\n \"\"\"<p>This shows a dialog with a patch of a selectable\"\"\"\n \"\"\" stash.</p>\"\"\"\n ))\n self.gitStashShowAct.triggered.connect(self.__gitStashShow)\n self.actions.append(self.gitStashShowAct)\n \n self.gitStashApplyAct = E5Action(\n self.tr('Restore && Keep'),\n self.tr('Restore && Keep'),\n 0, 0, self, 'git_stash_apply')\n self.gitStashApplyAct.setStatusTip(self.tr(\n 'Restore a stash but keep it'\n ))\n self.gitStashApplyAct.setWhatsThis(self.tr(\n \"\"\"<b>Restore &amp; Keep</b>\"\"\"\n \"\"\"<p>This restores a selectable stash and keeps it.</p>\"\"\"\n ))\n self.gitStashApplyAct.triggered.connect(self.__gitStashApply)\n self.actions.append(self.gitStashApplyAct)\n \n self.gitStashPopAct = E5Action(\n self.tr('Restore && Delete'),\n self.tr('Restore && Delete'),\n 0, 0, self, 'git_stash_pop')\n self.gitStashPopAct.setStatusTip(self.tr(\n 'Restore a stash and delete it'\n ))\n self.gitStashPopAct.setWhatsThis(self.tr(\n \"\"\"<b>Restore &amp; Delete</b>\"\"\"\n \"\"\"<p>This restores a selectable stash and deletes it.</p>\"\"\"\n ))\n self.gitStashPopAct.triggered.connect(self.__gitStashPop)\n self.actions.append(self.gitStashPopAct)\n \n self.gitStashBranchAct = E5Action(\n self.tr('Create Branch'),\n self.tr('Create Branch'),\n 0, 0, self, 'git_stash_branch')\n self.gitStashBranchAct.setStatusTip(self.tr(\n 'Create a new branch and restore a stash into it'\n ))\n self.gitStashBranchAct.setWhatsThis(self.tr(\n \"\"\"<b>Create Branch</b>\"\"\"\n \"\"\"<p>This creates a new branch and restores a stash into\"\"\"\n \"\"\" it.</p>\"\"\"\n ))\n self.gitStashBranchAct.triggered.connect(self.__gitStashBranch)\n self.actions.append(self.gitStashBranchAct)\n \n self.gitStashDropAct = E5Action(\n self.tr('Delete'),\n self.tr('Delete'),\n 0, 0, self, 'git_stash_delete')\n self.gitStashDropAct.setStatusTip(self.tr(\n 'Delete a stash'\n ))\n self.gitStashDropAct.setWhatsThis(self.tr(\n \"\"\"<b>Delete</b>\"\"\"\n \"\"\"<p>This deletes a stash.</p>\"\"\"\n ))\n self.gitStashDropAct.triggered.connect(self.__gitStashDrop)\n self.actions.append(self.gitStashDropAct)\n \n self.gitStashClearAct = E5Action(\n self.tr('Delete All'),\n self.tr('Delete All'),\n 0, 0, self, 'git_stash_delete_all')\n self.gitStashClearAct.setStatusTip(self.tr(\n 'Delete all stashes'\n ))\n self.gitStashClearAct.setWhatsThis(self.tr(\n \"\"\"<b>Delete All</b>\"\"\"\n \"\"\"<p>This deletes all stashes.</p>\"\"\"\n ))\n self.gitStashClearAct.triggered.connect(self.__gitStashClear)\n self.actions.append(self.gitStashClearAct)\n \n self.gitEditUserConfigAct = E5Action(\n self.tr('Edit user configuration'),\n self.tr('Edit user configuration...'),\n 0, 0, self, 'git_user_configure')\n self.gitEditUserConfigAct.setStatusTip(self.tr(\n 'Show an editor to edit the user configuration file'\n ))\n self.gitEditUserConfigAct.setWhatsThis(self.tr(\n \"\"\"<b>Edit user configuration</b>\"\"\"\n \"\"\"<p>Show an editor to edit the user configuration file.</p>\"\"\"\n ))\n self.gitEditUserConfigAct.triggered.connect(self.__gitEditUserConfig)\n self.actions.append(self.gitEditUserConfigAct)\n \n self.gitRepoConfigAct = E5Action(\n self.tr('Edit repository configuration'),\n self.tr('Edit repository configuration...'),\n 0, 0, self, 'git_repo_configure')\n self.gitRepoConfigAct.setStatusTip(self.tr(\n 'Show an editor to edit the repository configuration file'\n ))\n self.gitRepoConfigAct.setWhatsThis(self.tr(\n \"\"\"<b>Edit repository configuration</b>\"\"\"\n \"\"\"<p>Show an editor to edit the repository configuration\"\"\"\n \"\"\" file.</p>\"\"\"\n ))\n self.gitRepoConfigAct.triggered.connect(self.__gitEditRepoConfig)\n self.actions.append(self.gitRepoConfigAct)\n \n self.gitCreateIgnoreAct = E5Action(\n self.tr('Create .gitignore'),\n self.tr('Create .gitignore'),\n 0, 0, self, 'git_create_ignore')\n self.gitCreateIgnoreAct.setStatusTip(self.tr(\n 'Create a .gitignore file with default values'\n ))\n self.gitCreateIgnoreAct.setWhatsThis(self.tr(\n \"\"\"<b>Create .gitignore</b>\"\"\"\n \"\"\"<p>This creates a .gitignore file with default values.</p>\"\"\"\n ))\n self.gitCreateIgnoreAct.triggered.connect(self.__gitCreateIgnore)\n self.actions.append(self.gitCreateIgnoreAct)\n \n self.gitShowConfigAct = E5Action(\n self.tr('Show combined configuration settings'),\n self.tr('Show combined configuration settings...'),\n 0, 0, self, 'git_show_config')\n self.gitShowConfigAct.setStatusTip(self.tr(\n 'Show the combined configuration settings from all configuration'\n ' files'\n ))\n self.gitShowConfigAct.setWhatsThis(self.tr(\n \"\"\"<b>Show combined configuration settings</b>\"\"\"\n \"\"\"<p>This shows the combined configuration settings\"\"\"\n \"\"\" from all configuration files.</p>\"\"\"\n ))\n self.gitShowConfigAct.triggered.connect(self.__gitShowConfig)\n self.actions.append(self.gitShowConfigAct)\n \n self.gitVerifyAct = E5Action(\n self.tr('Verify repository'),\n self.tr('Verify repository...'),\n 0, 0, self, 'git_verify')\n self.gitVerifyAct.setStatusTip(self.tr(\n 'Verify the connectivity and validity of objects of the database'\n ))\n self.gitVerifyAct.setWhatsThis(self.tr(\n \"\"\"<b>Verify repository</b>\"\"\"\n \"\"\"<p>This verifies the connectivity and validity of objects\"\"\"\n \"\"\" of the database.</p>\"\"\"\n ))\n self.gitVerifyAct.triggered.connect(self.__gitVerify)\n self.actions.append(self.gitVerifyAct)\n \n self.gitHouseKeepingAct = E5Action(\n self.tr('Optimize repository'),\n self.tr('Optimize repository...'),\n 0, 0, self, 'git_housekeeping')\n self.gitHouseKeepingAct.setStatusTip(self.tr(\n 'Cleanup and optimize the local repository'\n ))\n self.gitHouseKeepingAct.setWhatsThis(self.tr(\n \"\"\"<b>Optimize repository</b>\"\"\"\n \"\"\"<p>This cleans up and optimizes the local repository.</p>\"\"\"\n ))\n self.gitHouseKeepingAct.triggered.connect(self.__gitHouseKeeping)\n self.actions.append(self.gitHouseKeepingAct)\n \n self.gitStatisticsAct = E5Action(\n self.tr('Repository Statistics'),\n self.tr('Repository Statistics...'),\n 0, 0, self, 'git_statistics')\n self.gitStatisticsAct.setStatusTip(self.tr(\n 'Show some statistics of the local repository'\n ))\n self.gitStatisticsAct.setWhatsThis(self.tr(\n \"\"\"<b>Repository Statistics</b>\"\"\"\n \"\"\"<p>This show some statistics of the local repository.</p>\"\"\"\n ))\n self.gitStatisticsAct.triggered.connect(self.__gitStatistics)\n self.actions.append(self.gitStatisticsAct)\n \n self.gitCreateArchiveAct = E5Action(\n self.tr('Create Archive'),\n self.tr('Create Archive'),\n 0, 0, self, 'git_create_archive')\n self.gitCreateArchiveAct.setStatusTip(self.tr(\n 'Create an archive from the local repository'\n ))\n self.gitCreateArchiveAct.setWhatsThis(self.tr(\n \"\"\"<b>Create Archive</b>\"\"\"\n \"\"\"<p>This creates an archive from the local repository.</p>\"\"\"\n ))\n self.gitCreateArchiveAct.triggered.connect(self.__gitCreateArchive)\n self.actions.append(self.gitCreateArchiveAct)\n \n self.gitBundleAct = E5Action(\n self.tr('Create bundle'),\n self.tr('Create bundle...'),\n 0, 0, self, 'mercurial_bundle_create')\n self.gitBundleAct.setStatusTip(self.tr(\n 'Create bundle file collecting changesets'\n ))\n self.gitBundleAct.setWhatsThis(self.tr(\n \"\"\"<b>Create bundle</b>\"\"\"\n \"\"\"<p>This creates a bundle file collecting selected\"\"\"\n \"\"\" changesets (git bundle create).</p>\"\"\"\n ))\n self.gitBundleAct.triggered.connect(self.__gitBundle)\n self.actions.append(self.gitBundleAct)\n \n self.gitBundleVerifyAct = E5Action(\n self.tr('Verify bundle'),\n self.tr('Verify bundle...'),\n 0, 0, self, 'mercurial_bundle_verify')\n self.gitBundleVerifyAct.setStatusTip(self.tr(\n 'Verify the validity and applicability of a bundle file'\n ))\n self.gitBundleVerifyAct.setWhatsThis(self.tr(\n \"\"\"<b>Verify bundle</b>\"\"\"\n \"\"\"<p>This verifies that a bundle file is valid and will\"\"\"\n \"\"\" apply cleanly.</p>\"\"\"\n ))\n self.gitBundleVerifyAct.triggered.connect(self.__gitVerifyBundle)\n self.actions.append(self.gitBundleVerifyAct)\n \n self.gitBundleListHeadsAct = E5Action(\n self.tr('List bundle heads'),\n self.tr('List bundle heads...'),\n 0, 0, self, 'mercurial_bundle_list_heads')\n self.gitBundleListHeadsAct.setStatusTip(self.tr(\n 'List all heads contained in a bundle file'\n ))\n self.gitBundleListHeadsAct.setWhatsThis(self.tr(\n \"\"\"<b>List bundle heads</b>\"\"\"\n \"\"\"<p>This lists all heads contained in a bundle file.</p>\"\"\"\n ))\n self.gitBundleListHeadsAct.triggered.connect(self.__gitBundleListHeads)\n self.actions.append(self.gitBundleListHeadsAct)\n \n self.gitBundleApplyFetchAct = E5Action(\n self.tr('Apply Bundle (fetch)'),\n self.tr('Apply Bundle (fetch)...'),\n 0, 0, self, 'mercurial_bundle_apply_fetch')\n self.gitBundleApplyFetchAct.setStatusTip(self.tr(\n 'Apply a head of a bundle file using fetch'\n ))\n self.gitBundleApplyFetchAct.setWhatsThis(self.tr(\n \"\"\"<b>Apply Bundle (fetch)</b>\"\"\"\n \"\"\"<p>This applies a head of a bundle file using fetch.</p>\"\"\"\n ))\n self.gitBundleApplyFetchAct.triggered.connect(self.__gitBundleFetch)\n self.actions.append(self.gitBundleApplyFetchAct)\n \n self.gitBundleApplyPullAct = E5Action(\n self.tr('Apply Bundle (pull)'),\n self.tr('Apply Bundle (pull)...'),\n 0, 0, self, 'mercurial_bundle_apply_pull')\n self.gitBundleApplyPullAct.setStatusTip(self.tr(\n 'Apply a head of a bundle file using pull'\n ))\n self.gitBundleApplyPullAct.setWhatsThis(self.tr(\n \"\"\"<b>Apply Bundle (pull)</b>\"\"\"\n \"\"\"<p>This applies a head of a bundle file using pull.</p>\"\"\"\n ))\n self.gitBundleApplyPullAct.triggered.connect(self.__gitBundlePull)\n self.actions.append(self.gitBundleApplyPullAct)\n \n self.gitBisectStartAct = E5Action(\n self.tr('Start'),\n self.tr('Start'),\n 0, 0, self, 'git_bisect_start')\n self.gitBisectStartAct.setStatusTip(self.tr(\n 'Start a bisect session'\n ))\n self.gitBisectStartAct.setWhatsThis(self.tr(\n \"\"\"<b>Start</b>\"\"\"\n \"\"\"<p>This starts a bisect session.</p>\"\"\"\n ))\n self.gitBisectStartAct.triggered.connect(self.__gitBisectStart)\n self.actions.append(self.gitBisectStartAct)\n \n self.gitBisectStartExtendedAct = E5Action(\n self.tr('Start (Extended)'),\n self.tr('Start (Extended)'),\n 0, 0, self, 'git_bisect_start_extended')\n self.gitBisectStartExtendedAct.setStatusTip(self.tr(\n 'Start a bisect session giving a bad and optionally good commits'\n ))\n self.gitBisectStartExtendedAct.setWhatsThis(self.tr(\n \"\"\"<b>Start (Extended)</b>\"\"\"\n \"\"\"<p>This starts a bisect session giving a bad and optionally\"\"\"\n \"\"\" good commits.</p>\"\"\"\n ))\n self.gitBisectStartExtendedAct.triggered.connect(\n self.__gitBisectStartExtended)\n self.actions.append(self.gitBisectStartExtendedAct)\n \n self.gitBisectGoodAct = E5Action(\n self.tr('Mark as \"good\"'),\n self.tr('Mark as \"good\"...'),\n 0, 0, self, 'git_bisect_good')\n self.gitBisectGoodAct.setStatusTip(self.tr(\n 'Mark a selectable revision as good'\n ))\n self.gitBisectGoodAct.setWhatsThis(self.tr(\n \"\"\"<b>Mark as \"good\"</b>\"\"\"\n \"\"\"<p>This marks a selectable revision as good.</p>\"\"\"\n ))\n self.gitBisectGoodAct.triggered.connect(self.__gitBisectGood)\n self.actions.append(self.gitBisectGoodAct)\n \n self.gitBisectBadAct = E5Action(\n self.tr('Mark as \"bad\"'),\n self.tr('Mark as \"bad\"...'),\n 0, 0, self, 'git_bisect_bad')\n self.gitBisectBadAct.setStatusTip(self.tr(\n 'Mark a selectable revision as bad'\n ))\n self.gitBisectBadAct.setWhatsThis(self.tr(\n \"\"\"<b>Mark as \"bad\"</b>\"\"\"\n \"\"\"<p>This marks a selectable revision as bad.</p>\"\"\"\n ))\n self.gitBisectBadAct.triggered.connect(self.__gitBisectBad)\n self.actions.append(self.gitBisectBadAct)\n \n self.gitBisectSkipAct = E5Action(\n self.tr('Skip'),\n self.tr('Skip...'),\n 0, 0, self, 'git_bisect_skip')\n self.gitBisectSkipAct.setStatusTip(self.tr(\n 'Skip a selectable revision'\n ))\n self.gitBisectSkipAct.setWhatsThis(self.tr(\n \"\"\"<b>Skip</b>\"\"\"\n \"\"\"<p>This skips a selectable revision.</p>\"\"\"\n ))\n self.gitBisectSkipAct.triggered.connect(self.__gitBisectSkip)\n self.actions.append(self.gitBisectSkipAct)\n \n self.gitBisectResetAct = E5Action(\n self.tr('Reset'),\n self.tr('Reset...'),\n 0, 0, self, 'git_bisect_reset')\n self.gitBisectResetAct.setStatusTip(self.tr(\n 'Reset the bisect session'\n ))\n self.gitBisectResetAct.setWhatsThis(self.tr(\n \"\"\"<b>Reset</b>\"\"\"\n \"\"\"<p>This resets the bisect session.</p>\"\"\"\n ))\n self.gitBisectResetAct.triggered.connect(self.__gitBisectReset)\n self.actions.append(self.gitBisectResetAct)\n \n self.gitBisectLogBrowserAct = E5Action(\n self.tr('Show bisect log browser'),\n UI.PixmapCache.getIcon(\"vcsLog.png\"),\n self.tr('Show bisect log browser'),\n 0, 0, self, 'git_bisect_log_browser')\n self.gitBisectLogBrowserAct.setStatusTip(self.tr(\n 'Show a dialog to browse the bisect log of the local project'\n ))\n self.gitBisectLogBrowserAct.setWhatsThis(self.tr(\n \"\"\"<b>Show bisect log browser</b>\"\"\"\n \"\"\"<p>This shows a dialog to browse the bisect log of the local\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitBisectLogBrowserAct.triggered.connect(\n self.__gitBisectLogBrowser)\n self.actions.append(self.gitBisectLogBrowserAct)\n \n self.gitBisectCreateReplayAct = E5Action(\n self.tr('Create replay file'),\n self.tr('Create replay file'),\n 0, 0, self, 'git_bisect_create_replay')\n self.gitBisectCreateReplayAct.setStatusTip(self.tr(\n 'Create a replay file to repeat the current bisect session'\n ))\n self.gitBisectCreateReplayAct.setWhatsThis(self.tr(\n \"\"\"<b>Create replay file</b>\"\"\"\n \"\"\"<p>This creates a replay file to repeat the current bisect\"\"\"\n \"\"\" session.</p>\"\"\"\n ))\n self.gitBisectCreateReplayAct.triggered.connect(\n self.__gitBisectCreateReplay)\n self.actions.append(self.gitBisectCreateReplayAct)\n \n self.gitBisectEditReplayAct = E5Action(\n self.tr('Edit replay file'),\n self.tr('Edit replay file'),\n 0, 0, self, 'git_bisect_edit_replay')\n self.gitBisectEditReplayAct.setStatusTip(self.tr(\n 'Edit a bisect replay file'\n ))\n self.gitBisectEditReplayAct.setWhatsThis(self.tr(\n \"\"\"<b>Edit replay file</b>\"\"\"\n \"\"\"<p>This edits a bisect replay file.</p>\"\"\"\n ))\n self.gitBisectEditReplayAct.triggered.connect(\n self.__gitBisectEditReplay)\n self.actions.append(self.gitBisectEditReplayAct)\n \n self.gitBisectReplayAct = E5Action(\n self.tr('Replay session'),\n self.tr('Replay session'),\n 0, 0, self, 'git_bisect_replay')\n self.gitBisectReplayAct.setStatusTip(self.tr(\n 'Replay a bisect session from file'\n ))\n self.gitBisectReplayAct.setWhatsThis(self.tr(\n \"\"\"<b>Replay session</b>\"\"\"\n \"\"\"<p>This replays a bisect session from file.</p>\"\"\"\n ))\n self.gitBisectReplayAct.triggered.connect(self.__gitBisectReplay)\n self.actions.append(self.gitBisectReplayAct)\n \n self.gitCheckPatchesAct = E5Action(\n self.tr('Check patch files'),\n self.tr('Check patch files'),\n 0, 0, self, 'git_check_patches')\n self.gitCheckPatchesAct.setStatusTip(self.tr(\n 'Check a list of patch files, if they would apply cleanly'\n ))\n self.gitCheckPatchesAct.setWhatsThis(self.tr(\n \"\"\"<b>Check patch files</b>\"\"\"\n \"\"\"<p>This checks a list of patch files, if they would apply\"\"\"\n \"\"\" cleanly.</p>\"\"\"\n ))\n self.gitCheckPatchesAct.triggered.connect(self.__gitCheckPatches)\n self.actions.append(self.gitCheckPatchesAct)\n \n self.gitApplyPatchesAct = E5Action(\n self.tr('Apply patch files'),\n self.tr('Apply patch files'),\n 0, 0, self, 'git_apply_patches')\n self.gitApplyPatchesAct.setStatusTip(self.tr(\n 'Apply a list of patch files'\n ))\n self.gitApplyPatchesAct.setWhatsThis(self.tr(\n \"\"\"<b>Apply patch files</b>\"\"\"\n \"\"\"<p>This applies a list of patch files.</p>\"\"\"\n ))\n self.gitApplyPatchesAct.triggered.connect(self.__gitApplyPatches)\n self.actions.append(self.gitApplyPatchesAct)\n \n self.gitShowPatcheStatisticsAct = E5Action(\n self.tr('Show patch statistics'),\n self.tr('Show patch statistics'),\n 0, 0, self, 'git_show_patches_statistics')\n self.gitShowPatcheStatisticsAct.setStatusTip(self.tr(\n 'Show some statistics for a list of patch files'\n ))\n self.gitShowPatcheStatisticsAct.setWhatsThis(self.tr(\n \"\"\"<b>Show patch statistics</b>\"\"\"\n \"\"\"<p>This shows some statistics for a list of patch files.</p>\"\"\"\n ))\n self.gitShowPatcheStatisticsAct.triggered.connect(\n self.__gitShowPatchStatistics)\n self.actions.append(self.gitShowPatcheStatisticsAct)\n \n self.gitSubmoduleAddAct = E5Action(\n self.tr('Add'),\n self.tr('Add'),\n 0, 0, self, 'git_submodule_add')\n self.gitSubmoduleAddAct.setStatusTip(self.tr(\n 'Add a submodule to the current project'\n ))\n self.gitSubmoduleAddAct.setWhatsThis(self.tr(\n \"\"\"<b>Add</b>\"\"\"\n \"\"\"<p>This adds a submodule to the current project.</p>\"\"\"\n ))\n self.gitSubmoduleAddAct.triggered.connect(\n self.__gitSubmoduleAdd)\n self.actions.append(self.gitSubmoduleAddAct)\n \n self.gitSubmodulesListAct = E5Action(\n self.tr('List'),\n self.tr('List'),\n 0, 0, self, 'git_submodules_list')\n self.gitSubmodulesListAct.setStatusTip(self.tr(\n 'List the submodule of the current project'\n ))\n self.gitSubmodulesListAct.setWhatsThis(self.tr(\n \"\"\"<b>List</b>\"\"\"\n \"\"\"<p>This lists the submodules of the current project.</p>\"\"\"\n ))\n self.gitSubmodulesListAct.triggered.connect(\n self.__gitSubmodulesList)\n self.actions.append(self.gitSubmodulesListAct)\n \n self.gitSubmodulesInitAct = E5Action(\n self.tr('Initialize'),\n self.tr('Initialize'),\n 0, 0, self, 'git_submodules_init')\n self.gitSubmodulesInitAct.setStatusTip(self.tr(\n 'Initialize the submodules of the current project'\n ))\n self.gitSubmodulesInitAct.setWhatsThis(self.tr(\n \"\"\"<b>Initialize</b>\"\"\"\n \"\"\"<p>This initializes the submodules of the current\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitSubmodulesInitAct.triggered.connect(\n self.__gitSubmodulesInit)\n self.actions.append(self.gitSubmodulesInitAct)\n \n self.gitSubmodulesDeinitAct = E5Action(\n self.tr('Unregister'),\n self.tr('Unregister'),\n 0, 0, self, 'git_submodules_deinit')\n self.gitSubmodulesDeinitAct.setStatusTip(self.tr(\n 'Unregister submodules of the current project'\n ))\n self.gitSubmodulesDeinitAct.setWhatsThis(self.tr(\n \"\"\"<b>Unregister</b>\"\"\"\n \"\"\"<p>This unregisters submodules of the current project.</p>\"\"\"\n ))\n self.gitSubmodulesDeinitAct.triggered.connect(\n self.__gitSubmodulesDeinit)\n self.actions.append(self.gitSubmodulesDeinitAct)\n \n self.gitSubmodulesUpdateAct = E5Action(\n self.tr('Update'),\n self.tr('Update'),\n 0, 0, self, 'git_submodules_update')\n self.gitSubmodulesUpdateAct.setStatusTip(self.tr(\n 'Update submodules of the current project'\n ))\n self.gitSubmodulesUpdateAct.setWhatsThis(self.tr(\n \"\"\"<b>Update</b>\"\"\"\n \"\"\"<p>This updates submodules of the current project.</p>\"\"\"\n ))\n self.gitSubmodulesUpdateAct.triggered.connect(\n self.__gitSubmodulesUpdate)\n self.actions.append(self.gitSubmodulesUpdateAct)\n \n self.gitSubmodulesUpdateInitAct = E5Action(\n self.tr('Initialize and Update'),\n self.tr('Initialize and Update'),\n 0, 0, self, 'git_submodules_update_init')\n self.gitSubmodulesUpdateInitAct.setStatusTip(self.tr(\n 'Initialize and update submodules of the current project'\n ))\n self.gitSubmodulesUpdateInitAct.setWhatsThis(self.tr(\n \"\"\"<b>Initialize and Update</b>\"\"\"\n \"\"\"<p>This initializes and updates submodules of the current\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitSubmodulesUpdateInitAct.triggered.connect(\n self.__gitSubmodulesUpdateInit)\n self.actions.append(self.gitSubmodulesUpdateInitAct)\n \n self.gitSubmodulesUpdateRemoteAct = E5Action(\n self.tr('Fetch and Update'),\n self.tr('Fetch and Update'),\n 0, 0, self, 'git_submodules_update_remote')\n self.gitSubmodulesUpdateRemoteAct.setStatusTip(self.tr(\n 'Fetch and update submodules of the current project'\n ))\n self.gitSubmodulesUpdateRemoteAct.setWhatsThis(self.tr(\n \"\"\"<b>Fetch and Update</b>\"\"\"\n \"\"\"<p>This fetches and updates submodules of the current\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitSubmodulesUpdateRemoteAct.triggered.connect(\n self.__gitSubmodulesUpdateRemote)\n self.actions.append(self.gitSubmodulesUpdateRemoteAct)\n \n self.gitSubmodulesUpdateOptionsAct = E5Action(\n self.tr('Update with Options'),\n self.tr('Update with Options'),\n 0, 0, self, 'git_submodules_update_options')\n self.gitSubmodulesUpdateOptionsAct.setStatusTip(self.tr(\n 'Update submodules of the current project offering a dialog'\n ' to enter options'\n ))\n self.gitSubmodulesUpdateOptionsAct.setWhatsThis(self.tr(\n \"\"\"<b>Update with Options</b>\"\"\"\n \"\"\"<p>This updates submodules of the current project\"\"\"\n \"\"\" offering a dialog to enter update options.</p>\"\"\"\n ))\n self.gitSubmodulesUpdateOptionsAct.triggered.connect(\n self.__gitSubmodulesUpdateOptions)\n self.actions.append(self.gitSubmodulesUpdateOptionsAct)\n \n self.gitSubmodulesSyncAct = E5Action(\n self.tr('Synchronize URLs'),\n self.tr('Synchronize URLs'),\n 0, 0, self, 'git_submodules_sync')\n self.gitSubmodulesSyncAct.setStatusTip(self.tr(\n 'Synchronize URLs of submodules of the current project'\n ))\n self.gitSubmodulesSyncAct.setWhatsThis(self.tr(\n \"\"\"<b>Synchronize URLs</b>\"\"\"\n \"\"\"<p>This synchronizes URLs of submodules of the current\"\"\"\n \"\"\" project.</p>\"\"\"\n ))\n self.gitSubmodulesSyncAct.triggered.connect(\n self.__gitSubmodulesSync)\n self.actions.append(self.gitSubmodulesSyncAct)\n \n self.gitSubmodulesStatusAct = E5Action(\n self.tr('Show Status'),\n self.tr('Show Status'),\n 0, 0, self, 'git_submodules_status')\n self.gitSubmodulesStatusAct.setStatusTip(self.tr(\n 'Show the status of submodules of the current project'\n ))\n self.gitSubmodulesStatusAct.setWhatsThis(self.tr(\n \"\"\"<b>Show Status</b>\"\"\"\n \"\"\"<p>This shows a dialog with the status of submodules of the\"\"\"\n \"\"\" current project.</p>\"\"\"\n ))\n self.gitSubmodulesStatusAct.triggered.connect(\n self.__gitSubmodulesStatus)\n self.actions.append(self.gitSubmodulesStatusAct)\n \n self.gitSubmodulesSummaryAct = E5Action(\n self.tr('Show Summary'),\n self.tr('Show Summary'),\n 0, 0, self, 'git_submodules_summary')\n self.gitSubmodulesSummaryAct.setStatusTip(self.tr(\n 'Show summary information for submodules of the current project'\n ))\n self.gitSubmodulesSummaryAct.setWhatsThis(self.tr(\n \"\"\"<b>Show Summary</b>\"\"\"\n \"\"\"<p>This shows some summary information for submodules of the\"\"\"\n \"\"\" current project.</p>\"\"\"\n ))\n self.gitSubmodulesSummaryAct.triggered.connect(\n self.__gitSubmodulesSummary)\n self.actions.append(self.gitSubmodulesSummaryAct)", "def __init__(self, record, modifications):\n self.modifications = modifications\n super(ModifyAction, self).__init__(record)", "def compareModels(request):\n diff = Diff()\n path = ''\n diff.deleteDir()\n\n if request.method == 'POST':\n diff.createDir()\n form = ModelCompareForm(request.POST, request.FILES, instance=diff)\n if form.is_valid():\n form.save()\n path = 'media/rmg/tools/compare/diff.html'\n # Generate the output HTML file\n diff.createOutput()\n return render_to_response('modelCompare.html', {'form': form, 'path':path}, context_instance=RequestContext(request))\n\n\n # Otherwise create the form\n else:\n form = ModelCompareForm(instance=diff)\n\n return render_to_response('modelCompare.html', {'form': form,'path':path}, context_instance=RequestContext(request))", "def diff(request):\n if _use_new_ui(request):\n return _serve_new_ui(request)\n\n if request.patch.no_base_file:\n # Can't show side-by-side diff since we don't have the base file. Show the\n # unified diff instead.\n return _patch_helper(request, 'diff')\n\n patchset = request.patchset\n patch = request.patch\n\n patchsets = list(request.issue.patchsets)\n\n context = _get_context_for_user(request)\n column_width = _get_column_width_for_user(request)\n tab_spaces = _get_tab_spaces_for_user(request)\n if patch.filename.startswith('webkit/api'):\n column_width = django_settings.MAX_COLUMN_WIDTH\n tab_spaces = django_settings.MAX_TAB_SPACES\n if patch.is_binary:\n rows = None\n else:\n try:\n rows = _get_diff_table_rows(request, patch, context, column_width,\n tab_spaces)\n except FetchError as err:\n return HttpTextResponse(str(err), status=404)\n\n _add_next_prev(patchset, patch)\n src_url = _map_base_url(request.issue.base)\n if src_url and not src_url.endswith('/'):\n src_url = src_url + '/'\n return respond(request, 'diff.html',\n {'issue': request.issue,\n 'patchset': patchset,\n 'patch': patch,\n 'view_style': 'diff',\n 'rows': rows,\n 'context': context,\n 'context_values': models.CONTEXT_CHOICES,\n 'column_width': column_width,\n 'tab_spaces': tab_spaces,\n 'patchsets': patchsets,\n 'src_url': src_url,\n })", "def _post(self, request_obj):\n return self._execute_action(request_obj, [CreateAction, EditAction], 'POST')", "def svn_diff_diff_2(*args):\n return _diff.svn_diff_diff_2(*args)", "def test_with_draft_diff(self):\n repository = self.create_repository(tool_name='Test')\n review_request = self.create_review_request(\n repository=repository,\n submitter=self.user,\n publish=True)\n diffset = self.create_diffset(review_request, draft=True)\n filediff = self.create_filediff(diffset)\n\n rsp = self.api_get(\n get_original_file_url(review_request, diffset, filediff),\n expected_status=404)\n self.assertEqual(rsp['stat'], 'fail')\n self.assertEqual(rsp['err']['code'], DOES_NOT_EXIST.code)", "def action(self):\n pass", "def action(self):\n pass", "def __gitExtendedDiff(self):\n self.vcs.gitExtendedDiff(self.project.getProjectPath())", "def UpdateDelta(self, request, context):\n pass", "def _diff(self) -> ModelDiff:\n try:\n description = self.connection.describe_activity_type(self.domain.name, self.name, self.version)\n except SWFResponseError as err:\n if err.error_code == \"UnknownResourceFault\":\n raise DoesNotExistError(\"Remote ActivityType does not exist\")\n\n raise ResponseError(err.body[\"message\"])\n\n info = description[\"typeInfo\"]\n config = description[\"configuration\"]\n\n return ModelDiff(\n (\"name\", self.name, info[\"activityType\"][\"name\"]),\n (\"version\", self.version, info[\"activityType\"][\"version\"]),\n (\"status\", self.status, info[\"status\"]),\n (\"description\", self.description, info[\"description\"]),\n (\"creation_date\", self.creation_date, info[\"creationDate\"]),\n (\"deprecation_date\", self.deprecation_date, info[\"deprecationDate\"]),\n (\"task_list\", self.task_list, config[\"defaultTaskList\"][\"name\"]),\n (\n \"task_heartbeat_timeout\",\n self.task_heartbeat_timeout,\n config[\"defaultTaskHeartbeatTimeout\"],\n ),\n (\n \"task_schedule_to_close_timeout\",\n self.task_schedule_to_close_timeout,\n config[\"defaultTaskScheduleToCloseTimeout\"],\n ),\n (\n \"task_schedule_to_start_timeout\",\n self.task_schedule_to_start_timeout,\n config[\"defaultTaskScheduleToStartTimeout\"],\n ),\n (\n \"task_start_to_close_timeout\",\n self.task_start_to_close_timeout,\n config[\"defaultTaskStartToCloseTimeout\"],\n ),\n )", "def diff(self, files=[], rev=None, change=None, text=False,\n git=False, nodates=False, show_function=False, reverse=False,\n ignore_all_space=False, ignore_space_change=False,\n ignore_blank_lines=False, unified=None,\n stat=False, subrepos=False, include=None, exclude=None):\n if change and rev:\n raise ValueError('cannot specify both change and rev')\n\n files = self._map_files(files)\n rev = self._map_revs(rev)\n change = self._map_one_rev(change)\n\n out = self._client.execute('diff', files, r=rev, c=change,\n a=text, g=git, nodates=nodates,\n p=show_function, reverse=reverse,\n w=ignore_all_space, b=ignore_space_change,\n B=ignore_blank_lines, U=unified, stat=stat,\n S=subrepos, I=include, X=exclude,\n binary=True)\n\n return out", "def diff(ctx, input_file):\n if input_file is None:\n click.echo(diff.get_help(ctx))\n return\n\n diff_color(input_file, ctx.obj)", "def test_get_label_with_diffs(self) -> None:\n review_request = self.create_review_request(create_repository=True)\n self.create_diffset(review_request)\n\n self.request.user = review_request.submitter\n\n self.assertEqual(\n self.action.get_label(context=Context({\n 'review_request': review_request,\n 'request': self.request,\n })),\n 'Update Diff')", "def diff2(request, ps_left_id, ps_right_id, patch_filename):\n context = _get_context_for_user(request)\n column_width = _get_column_width_for_user(request)\n tab_spaces = _get_tab_spaces_for_user(request)\n\n ps_right = models.PatchSet.get_by_id(\n int(ps_right_id), parent=request.issue.key)\n patch_right = None\n\n if ps_right:\n patch_right = models.Patch.query(\n models.Patch.filename == patch_filename,\n ancestor=ps_right.key).get()\n\n if patch_right:\n patch_id = patch_right.key.id()\n elif patch_filename.isdigit():\n # Perhaps it's an ID that's passed in, based on the old URL scheme.\n patch_id = int(patch_filename)\n else: # patch doesn't exist in this patchset\n patch_id = None\n\n data = _get_diff2_data(request, ps_left_id, ps_right_id, patch_id, context,\n column_width, tab_spaces, patch_filename)\n if isinstance(data, HttpResponse) and data.status_code != 302:\n return data\n\n patchsets = list(request.issue.patchsets)\n\n if data[\"patch_right\"]:\n _add_next_prev2(data[\"ps_left\"], data[\"ps_right\"], data[\"patch_right\"])\n return respond(request, 'diff2.html',\n {'issue': request.issue,\n 'ps_left': data[\"ps_left\"],\n 'patch_left': data[\"patch_left\"],\n 'ps_right': data[\"ps_right\"],\n 'patch_right': data[\"patch_right\"],\n 'rows': data[\"rows\"],\n 'patch_id': patch_id,\n 'context': context,\n 'context_values': models.CONTEXT_CHOICES,\n 'column_width': column_width,\n 'tab_spaces': tab_spaces,\n 'patchsets': patchsets,\n 'filename': patch_filename,\n })", "def act(self):\n pass", "def svn_diff_diff3(*args):\n return _diff.svn_diff_diff3(*args)", "def delta(self) -> None:", "def actions(self, state):\n raise NotImplementedError # Override this!", "def Diff(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def diff2(request, ps_left_id, ps_right_id, patch_filename):\n context = _get_context_for_user(request)\n column_width = _get_column_width_for_user(request)\n\n ps_right = models.PatchSet.get_by_id(\n int(ps_right_id), parent=request.issue.key)\n patch_right = None\n\n if ps_right:\n patch_right = models.Patch.query(\n models.Patch.patchset_key == ps_right.key,\n models.Patch.filename == patch_filename).get()\n\n if patch_right:\n patch_id = patch_right.key.id()\n elif patch_filename.isdigit():\n # Perhaps it's an ID that's passed in, based on the old URL scheme.\n patch_id = int(patch_filename)\n else: # patch doesn't exist in this patchset\n patch_id = None\n\n data = _get_diff2_data(request, ps_left_id, ps_right_id, patch_id, context,\n column_width, patch_filename)\n if isinstance(data, HttpResponse) and data.status_code != 302:\n return data\n\n patchsets = list(request.issue.patchsets)\n\n if data[\"patch_right\"]:\n _add_next_prev2(data[\"ps_left\"], data[\"ps_right\"], data[\"patch_right\"])\n return respond(request, 'diff2.html',\n {'issue': request.issue,\n 'ps_left': data[\"ps_left\"],\n 'patch_left': data[\"patch_left\"],\n 'ps_right': data[\"ps_right\"],\n 'patch_right': data[\"patch_right\"],\n 'rows': data[\"rows\"],\n 'patch_id': patch_id,\n 'context': context,\n 'context_values': models.CONTEXT_CHOICES,\n 'column_width': column_width,\n 'patchsets': patchsets,\n 'filename': patch_filename,\n })", "def apply_rl_actions(self, rl_actions):\n pass", "def test_should_render_on_interdiff(self) -> None:\n self.assertTrue(self.action.should_render(\n context=self._create_request_context(\n url_name='view-diff-revision')))", "def difference_update(self, *others):\r\n return self.sdiffstore(self.r_key, slef.r_key, *[o.r_key for o in others])", "def svn_diff_diff4(*args):\n return _diff.svn_diff_diff4(*args)", "def _execute(self, model_obj):", "def __initEditActions(self):\n self.editActGrp = createActionGroup(self)\n \n self.undoAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Undo'),\n UI.PixmapCache.getIcon(\"editUndo.png\"),\n QCoreApplication.translate('ViewManager', '&Undo'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Z\", \"Edit|Undo\")),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Alt+Backspace\", \"Edit|Undo\")),\n self.editActGrp, 'vm_edit_undo')\n self.undoAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Undo the last change'))\n self.undoAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Undo</b>\"\"\"\n \"\"\"<p>Undo the last change done in the current editor.</p>\"\"\"\n ))\n self.undoAct.triggered.connect(self.__editUndo)\n self.editActions.append(self.undoAct)\n \n self.redoAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Redo'),\n UI.PixmapCache.getIcon(\"editRedo.png\"),\n QCoreApplication.translate('ViewManager', '&Redo'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Shift+Z\", \"Edit|Redo\")),\n 0,\n self.editActGrp, 'vm_edit_redo')\n self.redoAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Redo the last change'))\n self.redoAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Redo</b>\"\"\"\n \"\"\"<p>Redo the last change done in the current editor.</p>\"\"\"\n ))\n self.redoAct.triggered.connect(self.__editRedo)\n self.editActions.append(self.redoAct)\n \n self.revertAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Revert to last saved state'),\n QCoreApplication.translate(\n 'ViewManager', 'Re&vert to last saved state'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Y\", \"Edit|Revert\")),\n 0,\n self.editActGrp, 'vm_edit_revert')\n self.revertAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Revert to last saved state'))\n self.revertAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Revert to last saved state</b>\"\"\"\n \"\"\"<p>Undo all changes up to the last saved state\"\"\"\n \"\"\" of the current editor.</p>\"\"\"\n ))\n self.revertAct.triggered.connect(self.__editRevert)\n self.editActions.append(self.revertAct)\n \n self.copyActGrp = createActionGroup(self.editActGrp)\n \n self.cutAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Cut'),\n UI.PixmapCache.getIcon(\"editCut.png\"),\n QCoreApplication.translate('ViewManager', 'Cu&t'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+X\", \"Edit|Cut\")),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Shift+Del\", \"Edit|Cut\")),\n self.copyActGrp, 'vm_edit_cut')\n self.cutAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Cut the selection'))\n self.cutAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Cut</b>\"\"\"\n \"\"\"<p>Cut the selected text of the current editor to the\"\"\"\n \"\"\" clipboard.</p>\"\"\"\n ))\n self.cutAct.triggered.connect(self.__editCut)\n self.editActions.append(self.cutAct)\n \n self.copyAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Copy'),\n UI.PixmapCache.getIcon(\"editCopy.png\"),\n QCoreApplication.translate('ViewManager', '&Copy'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+C\", \"Edit|Copy\")),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Ins\", \"Edit|Copy\")),\n self.copyActGrp, 'vm_edit_copy')\n self.copyAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Copy the selection'))\n self.copyAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Copy</b>\"\"\"\n \"\"\"<p>Copy the selected text of the current editor to the\"\"\"\n \"\"\" clipboard.</p>\"\"\"\n ))\n self.copyAct.triggered.connect(self.__editCopy)\n self.editActions.append(self.copyAct)\n \n self.pasteAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Paste'),\n UI.PixmapCache.getIcon(\"editPaste.png\"),\n QCoreApplication.translate('ViewManager', '&Paste'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+V\", \"Edit|Paste\")),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Shift+Ins\", \"Edit|Paste\")),\n self.copyActGrp, 'vm_edit_paste')\n self.pasteAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Paste the last cut/copied text'))\n self.pasteAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Paste</b>\"\"\"\n \"\"\"<p>Paste the last cut/copied text from the clipboard to\"\"\"\n \"\"\" the current editor.</p>\"\"\"\n ))\n self.pasteAct.triggered.connect(self.__editPaste)\n self.editActions.append(self.pasteAct)\n \n self.deleteAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Clear'),\n UI.PixmapCache.getIcon(\"editDelete.png\"),\n QCoreApplication.translate('ViewManager', 'Clear'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Alt+Shift+C\", \"Edit|Clear\")),\n 0,\n self.copyActGrp, 'vm_edit_clear')\n self.deleteAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Clear all text'))\n self.deleteAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Clear</b>\"\"\"\n \"\"\"<p>Delete all text of the current editor.</p>\"\"\"\n ))\n self.deleteAct.triggered.connect(self.__editDelete)\n self.editActions.append(self.deleteAct)\n \n self.joinAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Join Lines'),\n QCoreApplication.translate('ViewManager', 'Join Lines'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+J\", \"Edit|Join Lines\")),\n 0,\n self.editActGrp, 'vm_edit_join_lines')\n self.joinAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Join Lines'))\n self.joinAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Join Lines</b>\"\"\"\n \"\"\"<p>Join the current and the next lines.</p>\"\"\"\n ))\n self.joinAct.triggered.connect(self.__editJoin)\n self.editActions.append(self.joinAct)\n \n self.indentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Indent'),\n UI.PixmapCache.getIcon(\"editIndent.png\"),\n QCoreApplication.translate('ViewManager', '&Indent'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+I\", \"Edit|Indent\")),\n 0,\n self.editActGrp, 'vm_edit_indent')\n self.indentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Indent line'))\n self.indentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Indent</b>\"\"\"\n \"\"\"<p>Indents the current line or the lines of the\"\"\"\n \"\"\" selection by one level.</p>\"\"\"\n ))\n self.indentAct.triggered.connect(self.__editIndent)\n self.editActions.append(self.indentAct)\n \n self.unindentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Unindent'),\n UI.PixmapCache.getIcon(\"editUnindent.png\"),\n QCoreApplication.translate('ViewManager', 'U&nindent'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Shift+I\", \"Edit|Unindent\")),\n 0,\n self.editActGrp, 'vm_edit_unindent')\n self.unindentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Unindent line'))\n self.unindentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Unindent</b>\"\"\"\n \"\"\"<p>Unindents the current line or the lines of the\"\"\"\n \"\"\" selection by one level.</p>\"\"\"\n ))\n self.unindentAct.triggered.connect(self.__editUnindent)\n self.editActions.append(self.unindentAct)\n \n self.smartIndentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Smart indent'),\n UI.PixmapCache.getIcon(\"editSmartIndent.png\"),\n QCoreApplication.translate('ViewManager', 'Smart indent'),\n 0, 0,\n self.editActGrp, 'vm_edit_smart_indent')\n self.smartIndentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Smart indent Line or Selection'))\n self.smartIndentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Smart indent</b>\"\"\"\n \"\"\"<p>Indents the current line or the lines of the\"\"\"\n \"\"\" current selection smartly.</p>\"\"\"\n ))\n self.smartIndentAct.triggered.connect(self.__editSmartIndent)\n self.editActions.append(self.smartIndentAct)\n \n self.commentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Comment'),\n UI.PixmapCache.getIcon(\"editComment.png\"),\n QCoreApplication.translate('ViewManager', 'C&omment'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+M\", \"Edit|Comment\")),\n 0,\n self.editActGrp, 'vm_edit_comment')\n self.commentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Comment Line or Selection'))\n self.commentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Comment</b>\"\"\"\n \"\"\"<p>Comments the current line or the lines of the\"\"\"\n \"\"\" current selection.</p>\"\"\"\n ))\n self.commentAct.triggered.connect(self.__editComment)\n self.editActions.append(self.commentAct)\n \n self.uncommentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Uncomment'),\n UI.PixmapCache.getIcon(\"editUncomment.png\"),\n QCoreApplication.translate('ViewManager', 'Unco&mment'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Alt+Ctrl+M\", \"Edit|Uncomment\")),\n 0,\n self.editActGrp, 'vm_edit_uncomment')\n self.uncommentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Uncomment Line or Selection'))\n self.uncommentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Uncomment</b>\"\"\"\n \"\"\"<p>Uncomments the current line or the lines of the\"\"\"\n \"\"\" current selection.</p>\"\"\"\n ))\n self.uncommentAct.triggered.connect(self.__editUncomment)\n self.editActions.append(self.uncommentAct)\n \n self.toggleCommentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Toggle Comment'),\n UI.PixmapCache.getIcon(\"editToggleComment.png\"),\n QCoreApplication.translate('ViewManager', 'Toggle Comment'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Shift+M\", \"Edit|Toggle Comment\")),\n 0,\n self.editActGrp, 'vm_edit_toggle_comment')\n self.toggleCommentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Toggle the comment of the current line, selection or'\n ' comment block'))\n self.toggleCommentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Toggle Comment</b>\"\"\"\n \"\"\"<p>If the current line does not start with a block comment,\"\"\"\n \"\"\" the current line or selection is commented. If it is already\"\"\"\n \"\"\" commented, this comment block is uncommented. </p>\"\"\"\n ))\n self.toggleCommentAct.triggered.connect(self.__editToggleComment)\n self.editActions.append(self.toggleCommentAct)\n \n self.streamCommentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Stream Comment'),\n QCoreApplication.translate('ViewManager', 'Stream Comment'),\n 0, 0,\n self.editActGrp, 'vm_edit_stream_comment')\n self.streamCommentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Stream Comment Line or Selection'))\n self.streamCommentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Stream Comment</b>\"\"\"\n \"\"\"<p>Stream comments the current line or the current\"\"\"\n \"\"\" selection.</p>\"\"\"\n ))\n self.streamCommentAct.triggered.connect(self.__editStreamComment)\n self.editActions.append(self.streamCommentAct)\n \n self.boxCommentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Box Comment'),\n QCoreApplication.translate('ViewManager', 'Box Comment'),\n 0, 0,\n self.editActGrp, 'vm_edit_box_comment')\n self.boxCommentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Box Comment Line or Selection'))\n self.boxCommentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Box Comment</b>\"\"\"\n \"\"\"<p>Box comments the current line or the lines of the\"\"\"\n \"\"\" current selection.</p>\"\"\"\n ))\n self.boxCommentAct.triggered.connect(self.__editBoxComment)\n self.editActions.append(self.boxCommentAct)\n \n self.selectBraceAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Select to brace'),\n QCoreApplication.translate('ViewManager', 'Select to &brace'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+E\", \"Edit|Select to brace\")),\n 0,\n self.editActGrp, 'vm_edit_select_to_brace')\n self.selectBraceAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Select text to the matching brace'))\n self.selectBraceAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Select to brace</b>\"\"\"\n \"\"\"<p>Select text of the current editor to the matching\"\"\"\n \"\"\" brace.</p>\"\"\"\n ))\n self.selectBraceAct.triggered.connect(self.__editSelectBrace)\n self.editActions.append(self.selectBraceAct)\n \n self.selectAllAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Select all'),\n UI.PixmapCache.getIcon(\"editSelectAll.png\"),\n QCoreApplication.translate('ViewManager', '&Select all'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+A\", \"Edit|Select all\")),\n 0,\n self.editActGrp, 'vm_edit_select_all')\n self.selectAllAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Select all text'))\n self.selectAllAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Select All</b>\"\"\"\n \"\"\"<p>Select all text of the current editor.</p>\"\"\"\n ))\n self.selectAllAct.triggered.connect(self.__editSelectAll)\n self.editActions.append(self.selectAllAct)\n \n self.deselectAllAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Deselect all'),\n QCoreApplication.translate('ViewManager', '&Deselect all'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Alt+Ctrl+A\", \"Edit|Deselect all\")),\n 0,\n self.editActGrp, 'vm_edit_deselect_all')\n self.deselectAllAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Deselect all text'))\n self.deselectAllAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Deselect All</b>\"\"\"\n \"\"\"<p>Deselect all text of the current editor.</p>\"\"\"\n ))\n self.deselectAllAct.triggered.connect(self.__editDeselectAll)\n self.editActions.append(self.deselectAllAct)\n \n self.convertEOLAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Convert Line End Characters'),\n QCoreApplication.translate(\n 'ViewManager', 'Convert &Line End Characters'),\n 0, 0,\n self.editActGrp, 'vm_edit_convert_eol')\n self.convertEOLAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Convert Line End Characters'))\n self.convertEOLAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Convert Line End Characters</b>\"\"\"\n \"\"\"<p>Convert the line end characters to the currently set\"\"\"\n \"\"\" type.</p>\"\"\"\n ))\n self.convertEOLAct.triggered.connect(self.__convertEOL)\n self.editActions.append(self.convertEOLAct)\n \n self.shortenEmptyAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Shorten empty lines'),\n QCoreApplication.translate('ViewManager', 'Shorten empty lines'),\n 0, 0,\n self.editActGrp, 'vm_edit_shorten_empty_lines')\n self.shortenEmptyAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Shorten empty lines'))\n self.shortenEmptyAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Shorten empty lines</b>\"\"\"\n \"\"\"<p>Shorten lines consisting solely of whitespace\"\"\"\n \"\"\" characters.</p>\"\"\"\n ))\n self.shortenEmptyAct.triggered.connect(self.__shortenEmptyLines)\n self.editActions.append(self.shortenEmptyAct)\n \n self.autoCompleteAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Complete'),\n QCoreApplication.translate('ViewManager', '&Complete'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Space\", \"Edit|Complete\")),\n 0,\n self.editActGrp, 'vm_edit_autocomplete')\n self.autoCompleteAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Complete current word'))\n self.autoCompleteAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Complete</b>\"\"\"\n \"\"\"<p>Performs a completion of the word containing\"\"\"\n \"\"\" the cursor.</p>\"\"\"\n ))\n self.autoCompleteAct.triggered.connect(self.__editAutoComplete)\n self.editActions.append(self.autoCompleteAct)\n \n self.autoCompleteFromDocAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Complete from Document'),\n QCoreApplication.translate(\n 'ViewManager', 'Complete from Document'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Shift+Space\",\n \"Edit|Complete from Document\")),\n 0,\n self.editActGrp, 'vm_edit_autocomplete_from_document')\n self.autoCompleteFromDocAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Complete current word from Document'))\n self.autoCompleteFromDocAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Complete from Document</b>\"\"\"\n \"\"\"<p>Performs a completion from document of the word\"\"\"\n \"\"\" containing the cursor.</p>\"\"\"\n ))\n self.autoCompleteFromDocAct.triggered.connect(\n self.__editAutoCompleteFromDoc)\n self.editActions.append(self.autoCompleteFromDocAct)\n \n self.autoCompleteFromAPIsAct = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Complete from APIs'),\n QCoreApplication.translate('ViewManager',\n 'Complete from APIs'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Alt+Space\",\n \"Edit|Complete from APIs\")),\n 0,\n self.editActGrp, 'vm_edit_autocomplete_from_api')\n self.autoCompleteFromAPIsAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Complete current word from APIs'))\n self.autoCompleteFromAPIsAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Complete from APIs</b>\"\"\"\n \"\"\"<p>Performs a completion from APIs of the word\"\"\"\n \"\"\" containing the cursor.</p>\"\"\"\n ))\n self.autoCompleteFromAPIsAct.triggered.connect(\n self.__editAutoCompleteFromAPIs)\n self.editActions.append(self.autoCompleteFromAPIsAct)\n \n self.autoCompleteFromAllAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Complete from Document and APIs'),\n QCoreApplication.translate(\n 'ViewManager', 'Complete from Document and APIs'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Alt+Shift+Space\",\n \"Edit|Complete from Document and APIs\")),\n 0,\n self.editActGrp, 'vm_edit_autocomplete_from_all')\n self.autoCompleteFromAllAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Complete current word from Document and APIs'))\n self.autoCompleteFromAllAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Complete from Document and APIs</b>\"\"\"\n \"\"\"<p>Performs a completion from document and APIs\"\"\"\n \"\"\" of the word containing the cursor.</p>\"\"\"\n ))\n self.autoCompleteFromAllAct.triggered.connect(\n self.__editAutoCompleteFromAll)\n self.editActions.append(self.autoCompleteFromAllAct)\n \n self.calltipsAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Calltip'),\n QCoreApplication.translate('ViewManager', '&Calltip'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Meta+Alt+Space\", \"Edit|Calltip\")),\n 0,\n self.editActGrp, 'vm_edit_calltip')\n self.calltipsAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Show Calltips'))\n self.calltipsAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Calltip</b>\"\"\"\n \"\"\"<p>Show calltips based on the characters immediately to the\"\"\"\n \"\"\" left of the cursor.</p>\"\"\"\n ))\n self.calltipsAct.triggered.connect(self.__editShowCallTips)\n self.editActions.append(self.calltipsAct)\n \n self.codeInfoAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Code Info'),\n UI.PixmapCache.getIcon(\"codeDocuViewer.png\"),\n QCoreApplication.translate('ViewManager', 'Code Info'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Alt+I\", \"Edit|Code Info\")),\n 0,\n self.editActGrp, 'vm_edit_codeinfo')\n self.codeInfoAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Show Code Info'))\n self.codeInfoAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Code Info</b>\"\"\"\n \"\"\"<p>Show code information based on the cursor position.</p>\"\"\"\n ))\n self.codeInfoAct.triggered.connect(self.__editShowCodeInfo)\n self.editActions.append(self.codeInfoAct)\n \n self.sortAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Sort'),\n QCoreApplication.translate('ViewManager', 'Sort'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Alt+S\", \"Edit|Sort\")),\n 0,\n self.editActGrp, 'vm_edit_sort')\n self.sortAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Sort the lines containing the rectangular selection'))\n self.sortAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Sort</b>\"\"\"\n \"\"\"<p>Sort the lines spanned by a rectangular selection based on\"\"\"\n \"\"\" the selection ignoring leading and trailing whitespace.</p>\"\"\"\n ))\n self.sortAct.triggered.connect(self.__editSortSelectedLines)\n self.editActions.append(self.sortAct)\n \n self.editActGrp.setEnabled(False)\n self.copyActGrp.setEnabled(False)\n \n ####################################################################\n ## Below follow the actions for QScintilla standard commands.\n ####################################################################\n \n self.esm = QSignalMapper(self)\n self.esm.mapped[int].connect(self.__editorCommand)\n \n self.editorActGrp = createActionGroup(self.editActGrp)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move left one character'),\n QCoreApplication.translate('ViewManager',\n 'Move left one character'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Left')), 0,\n self.editorActGrp, 'vm_edit_move_left_char')\n self.esm.setMapping(act, QsciScintilla.SCI_CHARLEFT)\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+B')))\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move right one character'),\n QCoreApplication.translate('ViewManager',\n 'Move right one character'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Right')),\n 0, self.editorActGrp, 'vm_edit_move_right_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+F')))\n self.esm.setMapping(act, QsciScintilla.SCI_CHARRIGHT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move up one line'),\n QCoreApplication.translate('ViewManager', 'Move up one line'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Up')), 0,\n self.editorActGrp, 'vm_edit_move_up_line')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+P')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEUP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move down one line'),\n QCoreApplication.translate('ViewManager', 'Move down one line'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Down')), 0,\n self.editorActGrp, 'vm_edit_move_down_line')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+N')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEDOWN)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move left one word part'),\n QCoreApplication.translate('ViewManager',\n 'Move left one word part'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_left_word_part')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Left')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDPARTLEFT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move right one word part'),\n QCoreApplication.translate('ViewManager',\n 'Move right one word part'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_right_word_part')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDPARTRIGHT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move left one word'),\n QCoreApplication.translate('ViewManager', 'Move left one word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_left_word')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Left')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Left')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDLEFT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move right one word'),\n QCoreApplication.translate('ViewManager', 'Move right one word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_right_word')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDRIGHT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to first visible character in document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to first visible character in document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_first_visible_char')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_VCHOME)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move to start of display line'),\n QCoreApplication.translate(\n 'ViewManager', 'Move to start of display line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_start_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Left')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_HOMEDISPLAY)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of document line'),\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_end_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+E')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'End')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Scroll view down one line'),\n QCoreApplication.translate('ViewManager',\n 'Scroll view down one line'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Ctrl+Down')),\n 0, self.editorActGrp, 'vm_edit_scroll_down_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINESCROLLDOWN)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Scroll view up one line'),\n QCoreApplication.translate('ViewManager',\n 'Scroll view up one line'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Ctrl+Up')),\n 0, self.editorActGrp, 'vm_edit_scroll_up_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINESCROLLUP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move up one paragraph'),\n QCoreApplication.translate('ViewManager', 'Move up one paragraph'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Alt+Up')),\n 0, self.editorActGrp, 'vm_edit_move_up_para')\n self.esm.setMapping(act, QsciScintilla.SCI_PARAUP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move down one paragraph'),\n QCoreApplication.translate('ViewManager',\n 'Move down one paragraph'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Alt+Down')),\n 0, self.editorActGrp, 'vm_edit_move_down_para')\n self.esm.setMapping(act, QsciScintilla.SCI_PARADOWN)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move up one page'),\n QCoreApplication.translate('ViewManager', 'Move up one page'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'PgUp')), 0,\n self.editorActGrp, 'vm_edit_move_up_page')\n self.esm.setMapping(act, QsciScintilla.SCI_PAGEUP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Move down one page'),\n QCoreApplication.translate('ViewManager', 'Move down one page'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'PgDown')),\n 0, self.editorActGrp, 'vm_edit_move_down_page')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+V')))\n self.esm.setMapping(act, QsciScintilla.SCI_PAGEDOWN)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move to start of document'),\n QCoreApplication.translate('ViewManager',\n 'Move to start of document'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_start_text')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Up')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_DOCUMENTSTART)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Move to end of document'),\n QCoreApplication.translate('ViewManager',\n 'Move to end of document'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_end_text')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Down')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+End')))\n self.esm.setMapping(act, QsciScintilla.SCI_DOCUMENTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Indent one level'),\n QCoreApplication.translate('ViewManager', 'Indent one level'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Tab')), 0,\n self.editorActGrp, 'vm_edit_indent_one_level')\n self.esm.setMapping(act, QsciScintilla.SCI_TAB)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Unindent one level'),\n QCoreApplication.translate('ViewManager', 'Unindent one level'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+Tab')),\n 0, self.editorActGrp, 'vm_edit_unindent_one_level')\n self.esm.setMapping(act, QsciScintilla.SCI_BACKTAB)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection left one character'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection left one character'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+Left')),\n 0, self.editorActGrp, 'vm_edit_extend_selection_left_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+B')))\n self.esm.setMapping(act, QsciScintilla.SCI_CHARLEFTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection right one character'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection right one character'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+Right')),\n 0, self.editorActGrp, 'vm_edit_extend_selection_right_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+F')))\n self.esm.setMapping(act, QsciScintilla.SCI_CHARRIGHTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection up one line'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection up one line'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+Up')),\n 0, self.editorActGrp, 'vm_edit_extend_selection_up_line')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+P')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEUPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection down one line'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection down one line'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+Down')),\n 0, self.editorActGrp, 'vm_edit_extend_selection_down_line')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+N')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEDOWNEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection left one word part'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection left one word part'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_left_word_part')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Shift+Left')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDPARTLEFTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection right one word part'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection right one word part'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_right_word_part')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Shift+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDPARTRIGHTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection left one word'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection left one word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_left_word')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Shift+Left')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Left')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDLEFTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection right one word'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection right one word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_right_word')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Shift+Right')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDRIGHTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to first visible character in document'\n ' line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to first visible character in document'\n ' line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_first_visible_char')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Shift+Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_VCHOMEEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of document line'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_end_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+E')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Shift+End')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEENDEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection up one paragraph'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection up one paragraph'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Shift+Up')),\n 0,\n self.editorActGrp, 'vm_edit_extend_selection_up_para')\n self.esm.setMapping(act, QsciScintilla.SCI_PARAUPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection down one paragraph'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection down one paragraph'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Shift+Down')),\n 0,\n self.editorActGrp, 'vm_edit_extend_selection_down_para')\n self.esm.setMapping(act, QsciScintilla.SCI_PARADOWNEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection up one page'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection up one page'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+PgUp')),\n 0, self.editorActGrp, 'vm_edit_extend_selection_up_page')\n self.esm.setMapping(act, QsciScintilla.SCI_PAGEUPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection down one page'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection down one page'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Shift+PgDown')),\n 0,\n self.editorActGrp, 'vm_edit_extend_selection_down_page')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+V')))\n self.esm.setMapping(act, QsciScintilla.SCI_PAGEDOWNEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to start of document'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to start of document'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_start_text')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Up')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_DOCUMENTSTARTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of document'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of document'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_end_text')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Down')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+End')))\n self.esm.setMapping(act, QsciScintilla.SCI_DOCUMENTENDEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Delete previous character'),\n QCoreApplication.translate('ViewManager',\n 'Delete previous character'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Backspace')),\n 0, self.editorActGrp, 'vm_edit_delete_previous_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+H')))\n else:\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Shift+Backspace')))\n self.esm.setMapping(act, QsciScintilla.SCI_DELETEBACK)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Delete previous character if not at start of line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Delete previous character if not at start of line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_delet_previous_char_not_line_start')\n self.esm.setMapping(act, QsciScintilla.SCI_DELETEBACKNOTLINE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Delete current character'),\n QCoreApplication.translate('ViewManager',\n 'Delete current character'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Del')),\n 0, self.editorActGrp, 'vm_edit_delete_current_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+D')))\n self.esm.setMapping(act, QsciScintilla.SCI_CLEAR)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Delete word to left'),\n QCoreApplication.translate('ViewManager', 'Delete word to left'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Ctrl+Backspace')),\n 0,\n self.editorActGrp, 'vm_edit_delete_word_left')\n self.esm.setMapping(act, QsciScintilla.SCI_DELWORDLEFT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Delete word to right'),\n QCoreApplication.translate('ViewManager', 'Delete word to right'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Ctrl+Del')),\n 0, self.editorActGrp, 'vm_edit_delete_word_right')\n self.esm.setMapping(act, QsciScintilla.SCI_DELWORDRIGHT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Delete line to left'),\n QCoreApplication.translate('ViewManager', 'Delete line to left'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Ctrl+Shift+Backspace')),\n 0,\n self.editorActGrp, 'vm_edit_delete_line_left')\n self.esm.setMapping(act, QsciScintilla.SCI_DELLINELEFT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Delete line to right'),\n QCoreApplication.translate('ViewManager', 'Delete line to right'),\n 0, 0,\n self.editorActGrp, 'vm_edit_delete_line_right')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+K')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Del')))\n self.esm.setMapping(act, QsciScintilla.SCI_DELLINERIGHT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Insert new line'),\n QCoreApplication.translate('ViewManager', 'Insert new line'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Return')),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Enter')),\n self.editorActGrp, 'vm_edit_insert_line')\n self.esm.setMapping(act, QsciScintilla.SCI_NEWLINE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Insert new line below current line'),\n QCoreApplication.translate(\n 'ViewManager', 'Insert new line below current line'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Shift+Return')),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Shift+Enter')),\n self.editorActGrp, 'vm_edit_insert_line_below')\n act.triggered.connect(self.__newLineBelow)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Delete current line'),\n QCoreApplication.translate('ViewManager', 'Delete current line'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Ctrl+Shift+L')),\n 0,\n self.editorActGrp, 'vm_edit_delete_current_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINEDELETE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Duplicate current line'),\n QCoreApplication.translate(\n 'ViewManager', 'Duplicate current line'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Ctrl+D')),\n 0, self.editorActGrp, 'vm_edit_duplicate_current_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINEDUPLICATE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Swap current and previous lines'),\n QCoreApplication.translate(\n 'ViewManager', 'Swap current and previous lines'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Ctrl+T')),\n 0, self.editorActGrp, 'vm_edit_swap_current_previous_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINETRANSPOSE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if QSCINTILLA_VERSION() >= 0x020B00:\n act = E5Action(\n QCoreApplication.translate('ViewManager',\n 'Reverse selected lines'),\n QCoreApplication.translate('ViewManager',\n 'Reverse selected lines'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Meta+Alt+R')),\n 0, self.editorActGrp, 'vm_edit_reverse selected_lines')\n self.esm.setMapping(act, QsciScintilla.SCI_LINEREVERSE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Cut current line'),\n QCoreApplication.translate('ViewManager', 'Cut current line'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Alt+Shift+L')),\n 0, self.editorActGrp, 'vm_edit_cut_current_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINECUT)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Copy current line'),\n QCoreApplication.translate('ViewManager', 'Copy current line'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Ctrl+Shift+T')),\n 0,\n self.editorActGrp, 'vm_edit_copy_current_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINECOPY)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Toggle insert/overtype'),\n QCoreApplication.translate(\n 'ViewManager', 'Toggle insert/overtype'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Ins')),\n 0, self.editorActGrp, 'vm_edit_toggle_insert_overtype')\n self.esm.setMapping(act, QsciScintilla.SCI_EDITTOGGLEOVERTYPE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of display line'),\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of display line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_end_displayed_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Right')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+End')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEENDDISPLAY)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of display line'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of display line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_selection_end_displayed_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Ctrl+Shift+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEENDDISPLAYEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Formfeed'),\n QCoreApplication.translate('ViewManager', 'Formfeed'),\n 0, 0,\n self.editorActGrp, 'vm_edit_formfeed')\n self.esm.setMapping(act, QsciScintilla.SCI_FORMFEED)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate('ViewManager', 'Escape'),\n QCoreApplication.translate('ViewManager', 'Escape'),\n QKeySequence(QCoreApplication.translate('ViewManager', 'Esc')), 0,\n self.editorActGrp, 'vm_edit_escape')\n self.esm.setMapping(act, QsciScintilla.SCI_CANCEL)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend rectangular selection down one line'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend rectangular selection down one line'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Ctrl+Down')),\n 0,\n self.editorActGrp, 'vm_edit_extend_rect_selection_down_line')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Alt+Shift+N')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEDOWNRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend rectangular selection up one line'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend rectangular selection up one line'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Alt+Ctrl+Up')),\n 0, self.editorActGrp, 'vm_edit_extend_rect_selection_up_line')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Alt+Shift+P')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEUPRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection left one character'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection left one character'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Ctrl+Left')),\n 0,\n self.editorActGrp, 'vm_edit_extend_rect_selection_left_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Alt+Shift+B')))\n self.esm.setMapping(act, QsciScintilla.SCI_CHARLEFTRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection right one character'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection right one character'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Ctrl+Right')),\n 0,\n self.editorActGrp, 'vm_edit_extend_rect_selection_right_char')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Alt+Shift+F')))\n self.esm.setMapping(act, QsciScintilla.SCI_CHARRIGHTRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection to first visible character in'\n ' document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection to first visible character in'\n ' document line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_extend_rect_selection_first_visible_char')\n if not isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Shift+Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_VCHOMERECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection to end of document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection to end of document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_extend_rect_selection_end_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Alt+Shift+E')))\n else:\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Shift+End')))\n self.esm.setMapping(act, QsciScintilla.SCI_LINEENDRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection up one page'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection up one page'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Shift+PgUp')),\n 0,\n self.editorActGrp, 'vm_edit_extend_rect_selection_up_page')\n self.esm.setMapping(act, QsciScintilla.SCI_PAGEUPRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection down one page'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection down one page'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Alt+Shift+PgDown')),\n 0,\n self.editorActGrp, 'vm_edit_extend_rect_selection_down_page')\n if isMacPlatform():\n act.setAlternateShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Alt+Shift+V')))\n self.esm.setMapping(act, QsciScintilla.SCI_PAGEDOWNRECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Duplicate current selection'),\n QCoreApplication.translate(\n 'ViewManager', 'Duplicate current selection'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Ctrl+Shift+D')),\n 0,\n self.editorActGrp, 'vm_edit_duplicate_current_selection')\n self.esm.setMapping(act, QsciScintilla.SCI_SELECTIONDUPLICATE)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_SCROLLTOSTART\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Scroll to start of document'),\n QCoreApplication.translate(\n 'ViewManager', 'Scroll to start of document'),\n 0, 0,\n self.editorActGrp, 'vm_edit_scroll_start_text')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Home')))\n self.esm.setMapping(act, QsciScintilla.SCI_SCROLLTOSTART)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_SCROLLTOEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Scroll to end of document'),\n QCoreApplication.translate(\n 'ViewManager', 'Scroll to end of document'),\n 0, 0,\n self.editorActGrp, 'vm_edit_scroll_end_text')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'End')))\n self.esm.setMapping(act, QsciScintilla.SCI_SCROLLTOEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_VERTICALCENTRECARET\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Scroll vertically to center current line'),\n QCoreApplication.translate(\n 'ViewManager', 'Scroll vertically to center current line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_scroll_vertically_center')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+L')))\n self.esm.setMapping(act, QsciScintilla.SCI_VERTICALCENTRECARET)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_WORDRIGHTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of next word'),\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of next word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_end_next_word')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDRIGHTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_WORDRIGHTENDEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of next word'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of next word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_select_end_next_word')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager',\n 'Alt+Shift+Right')))\n self.esm.setMapping(act, QsciScintilla.SCI_WORDRIGHTENDEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_WORDLEFTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of previous word'),\n QCoreApplication.translate(\n 'ViewManager', 'Move to end of previous word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_end_previous_word')\n self.esm.setMapping(act, QsciScintilla.SCI_WORDLEFTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_WORDLEFTENDEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of previous word'),\n QCoreApplication.translate(\n 'ViewManager', 'Extend selection to end of previous word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_select_end_previous_word')\n self.esm.setMapping(act, QsciScintilla.SCI_WORDLEFTENDEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_HOME\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move to start of document line'),\n QCoreApplication.translate(\n 'ViewManager', 'Move to start of document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_start_document_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+A')))\n self.esm.setMapping(act, QsciScintilla.SCI_HOME)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_HOMEEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to start of document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to start of document line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_extend_selection_start_document_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Meta+Shift+A')))\n self.esm.setMapping(act, QsciScintilla.SCI_HOMEEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_HOMERECTEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection to start of document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend rectangular selection to start of document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_select_rect_start_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager',\n 'Meta+Alt+Shift+A')))\n self.esm.setMapping(act, QsciScintilla.SCI_HOMERECTEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_HOMEDISPLAYEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to start of display line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to start of display line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_extend_selection_start_display_line')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager',\n 'Ctrl+Shift+Left')))\n self.esm.setMapping(act, QsciScintilla.SCI_HOMEDISPLAYEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_HOMEWRAP\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to start of display or document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to start of display or document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_start_display_document_line')\n self.esm.setMapping(act, QsciScintilla.SCI_HOMEWRAP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_HOMEWRAPEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to start of display or document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to start of display or document line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_extend_selection_start_display_document_line')\n self.esm.setMapping(act, QsciScintilla.SCI_HOMEWRAPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_VCHOMEWRAP\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to first visible character in display or document'\n ' line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to first visible character in display or document'\n ' line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_move_first_visible_char_document_line')\n self.esm.setMapping(act, QsciScintilla.SCI_VCHOMEWRAP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_VCHOMEWRAPEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to first visible character in'\n ' display or document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to first visible character in'\n ' display or document line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_extend_selection_first_visible_char_document_line')\n self.esm.setMapping(act, QsciScintilla.SCI_VCHOMEWRAPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_LINEENDWRAP\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to end of display or document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Move to end of display or document line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_end_start_display_document_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINEENDWRAP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_LINEENDWRAPEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to end of display or document line'),\n QCoreApplication.translate(\n 'ViewManager',\n 'Extend selection to end of display or document line'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_extend_selection_end_display_document_line')\n self.esm.setMapping(act, QsciScintilla.SCI_LINEENDWRAPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_STUTTEREDPAGEUP\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered move up one page'),\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered move up one page'),\n 0, 0,\n self.editorActGrp, 'vm_edit_stuttered_move_up_page')\n self.esm.setMapping(act, QsciScintilla.SCI_STUTTEREDPAGEUP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_STUTTEREDPAGEUPEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered extend selection up one page'),\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered extend selection up one page'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_stuttered_extend_selection_up_page')\n self.esm.setMapping(act, QsciScintilla.SCI_STUTTEREDPAGEUPEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_STUTTEREDPAGEDOWN\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered move down one page'),\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered move down one page'),\n 0, 0,\n self.editorActGrp, 'vm_edit_stuttered_move_down_page')\n self.esm.setMapping(act, QsciScintilla.SCI_STUTTEREDPAGEDOWN)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_STUTTEREDPAGEDOWNEXTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered extend selection down one page'),\n QCoreApplication.translate(\n 'ViewManager', 'Stuttered extend selection down one page'),\n 0, 0,\n self.editorActGrp,\n 'vm_edit_stuttered_extend_selection_down_page')\n self.esm.setMapping(act, QsciScintilla.SCI_STUTTEREDPAGEDOWNEXTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_DELWORDRIGHTEND\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Delete right to end of next word'),\n QCoreApplication.translate(\n 'ViewManager', 'Delete right to end of next word'),\n 0, 0,\n self.editorActGrp, 'vm_edit_delete_right_end_next_word')\n if isMacPlatform():\n act.setShortcut(QKeySequence(\n QCoreApplication.translate('ViewManager', 'Alt+Del')))\n self.esm.setMapping(act, QsciScintilla.SCI_DELWORDRIGHTEND)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_MOVESELECTEDLINESUP\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move selected lines up one line'),\n QCoreApplication.translate(\n 'ViewManager', 'Move selected lines up one line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_selection_up_one_line')\n self.esm.setMapping(act, QsciScintilla.SCI_MOVESELECTEDLINESUP)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n if hasattr(QsciScintilla, \"SCI_MOVESELECTEDLINESDOWN\"):\n act = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Move selected lines down one line'),\n QCoreApplication.translate(\n 'ViewManager', 'Move selected lines down one line'),\n 0, 0,\n self.editorActGrp, 'vm_edit_move_selection_down_one_line')\n self.esm.setMapping(act, QsciScintilla.SCI_MOVESELECTEDLINESDOWN)\n act.triggered.connect(self.esm.map)\n self.editActions.append(act)\n \n self.editorActGrp.setEnabled(False)\n \n self.editLowerCaseAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Convert selection to lower case'),\n QCoreApplication.translate(\n 'ViewManager', 'Convert selection to lower case'),\n QKeySequence(QCoreApplication.translate('ViewManager',\n 'Alt+Shift+U')),\n 0, self.editActGrp, 'vm_edit_convert_selection_lower')\n self.esm.setMapping(self.editLowerCaseAct, QsciScintilla.SCI_LOWERCASE)\n self.editLowerCaseAct.triggered.connect(self.esm.map)\n self.editActions.append(self.editLowerCaseAct)\n \n self.editUpperCaseAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Convert selection to upper case'),\n QCoreApplication.translate(\n 'ViewManager', 'Convert selection to upper case'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', 'Ctrl+Shift+U')),\n 0,\n self.editActGrp, 'vm_edit_convert_selection_upper')\n self.esm.setMapping(self.editUpperCaseAct, QsciScintilla.SCI_UPPERCASE)\n self.editUpperCaseAct.triggered.connect(self.esm.map)\n self.editActions.append(self.editUpperCaseAct)", "def action_run(self):\n pass", "def changes(self, **kwargs):\n\n path = self._get_movie_id_path('changes')\n resp = self._get_method(path, kwargs)\n return resp", "def initActions(self):\n self.hgFetchAct = E5Action(\n self.tr('Fetch changes'),\n UI.PixmapCache.getIcon(\"vcsUpdate.png\"),\n self.tr('Fetch changes'),\n 0, 0, self, 'mercurial_fetch')\n self.hgFetchAct.setStatusTip(self.tr(\n 'Fetch changes from a remote repository'\n ))\n self.hgFetchAct.setWhatsThis(self.tr(\n \"\"\"<b>Fetch changes</b>\"\"\"\n \"\"\"<p>This pulls changes from a remote repository into the \"\"\"\n \"\"\"local repository. If the pulled changes add a new branch\"\"\"\n \"\"\" head, the head is automatically merged, and the result of\"\"\"\n \"\"\" the merge is committed. Otherwise, the working directory\"\"\"\n \"\"\" is updated to include the new changes.</p>\"\"\"\n ))\n self.hgFetchAct.triggered.connect(self.__hgFetch)\n self.actions.append(self.hgFetchAct)", "def view(self):", "def diff(rev_1, rev_2):\n assert isinstance(rev_1, AbstractRevision)\n assert isinstance(rev_2, AbstractRevision)\n if rev_1.created_at == rev_2.created_at:\n print 'should not compare object to itself.'\n return None\n\n set_1 = Set((\n (k, v) for k, v in zip(\n rev_1.get_values().keys(),\n rev_1.get_values().values()\n )\n if k != u'id' and k != u'created_at' and k != 'tracked_model_id'\n ))\n set_2 = Set((\n (k, v) for k, v in zip(\n rev_2.get_values().keys(),\n rev_2.get_values().values()\n )\n if k != u'id' and k != u'created_at' and k != 'tracked_model_id'\n ))\n\n # new values\n diff = set_1 - set_2 # elements in s but not in t\n # common values\n intersection = set_1 & set_2 # elements common to s and t\n # pairs of changed values\n sym_diff = set_1 ^ set_2 # elements in s and t but not in both\n # changed values - set for consistency\n mod = sym_diff - diff\n return diff, intersection, sym_diff, mod", "def update_controller(self):", "def act(self):\n raise NotImplementedError", "def _iterativediff(t1, t2, subdir):\n if t1 is None:\n t1 = {}\n if t2 is None:\n t2 = {}\n\n for e1 in t1:\n realname = subdir + pycompat.fsencode(e1.name)\n\n if e1.type == pygit2.GIT_OBJ_TREE:\n try:\n e2 = t2[e1.name]\n if e2.type != pygit2.GIT_OBJ_TREE:\n e2 = None\n except KeyError:\n e2 = None\n\n stack.append((realname + b'/', e1, e2))\n else:\n n1, fl1 = self.find(realname)\n\n try:\n e2 = t2[e1.name]\n n2, fl2 = other.find(realname)\n except KeyError:\n e2 = None\n n2, fl2 = (None, b'')\n\n if e2 is not None and e2.type == pygit2.GIT_OBJ_TREE:\n stack.append((realname + b'/', None, e2))\n\n if not match(realname):\n continue\n\n if n1 != n2 or fl1 != fl2:\n result[realname] = ((n1, fl1), (n2, fl2))\n elif clean:\n result[realname] = None\n\n for e2 in t2:\n if e2.name in t1:\n continue\n\n realname = subdir + pycompat.fsencode(e2.name)\n\n if e2.type == pygit2.GIT_OBJ_TREE:\n stack.append((realname + b'/', None, e2))\n elif match(realname):\n n2, fl2 = other.find(realname)\n result[realname] = ((None, b''), (n2, fl2))", "def get_raw_diff(self, review):\r\n return self.http_request('/r/%s/diff/raw/' % review, {})", "def diff(self):\n return [node.diff for node in self]", "def get_actions(node, actions):\n # checkout\n if node.is_revision() and hasattr(node.data, \"update\") \\\n and node.revision.resource_type == tank.constants.ResourceType.SINGLE_FILE:\n # we can only do a checkout if the delegate supports updates\n # don't try to check out sequences\n actions[\"checkout\"] = Action(\"Check-out\", func=checkout, args=[node])\n\n # import\n if hasattr(node.data, \"import_\"):\n # we can only do an import if the delegate supports imports\n actions[\"import\"] = Action(\"Import\", func=node.import_)\n\n if hasattr(node.data, \"update\"):\n # we can only do a replace if the delegate supports updates\n actions[\"replace\"] = Action(\"Replace...\", func=replace, args=[node],\n params=[Param(\"revision\", Param.Revision,\n label=\"Revision\",\n default=(node.revision if node is not None else None))])\n\n if hasattr(node.data, \"update\") and node.is_revision() and node.container.latest_revisions.has_key(node.revision.system.type.system.name) and node.revision != node.container.latest_revisions[node.revision.system.type.system.name]:\n actions[\"latest\"] = Action(\"Update to Latest\",\n func=replace_with_latest, args=[node])\n# actions[\"recommended\"] = Action(\"Update to Recommended\",\n# func=replace_with_recommended, args=[node])\n\n if hasattr(node.data, \"update\") and node.container and node.container.system.type.properties.use_name:\n actions[\"container_name_to_filename\"] = Action(\"Set Container Name to Filename\",\n func=set_container_name_to_filename, args=[node])\n\n if node.is_working():\n params = [Param(\"description\", Param.Text, label=\"Description\", default=\"\"),\n Param(\"subset\", Param.NodeList, label=\"Nodes\", default=(), node=node)]\n try:\n if node.revision_type is not None:\n for field_name, field in node.revision_type.fields.items():\n try:\n if field.properties.hints['set_at_publish']:\n if field.properties.type == \"boolean\":\n p = Param(field_name, Param.Boolean, label=field.properties.nice_name, default=False)\n params.append(p)\n except (TypeError, KeyError):\n pass\n except:\n import traceback\n print traceback.print_exc()\n\n# if os.getenv('TOOLSET') not in ('beta', 'dev'):\n if os.getenv('TOOLSET') != 'dev':\n # Disable publish when a dev or beta toolset is used.\n actions[\"publish\"] = Action(\"Publish...\", func=publish, args=[node], params=params)\n\n return actions", "def _action_to_perform(self, ids, operationParams , default={}):\n full_ids=[]\n status=operationParams['status'] \n action=operationParams['action']\n docaction=operationParams['docaction']\n excludeStatuses=operationParams['excludeStatuses']\n includeStatuses=operationParams['includeStatuses']\n \n stopFlag,allIDs=self._get_recursive_parts(ids, excludeStatuses, includeStatuses)\n self._action_ondocuments(allIDs,docaction, status)\n if action:\n idMoves=move_workflow(self, allIDs, action, status)\n self.logging_workflow(idMoves, action, status)\n objId=self.browse(allIDs).with_context({'internal_writing':True}).write(default)\n if objId:\n wf_message_post(self, allIDs, body='Status moved to: {status}.'.format(status=status))\n return objId", "def get_actions(self: object, *args, parameters: dict = None, **kwargs) -> dict:\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/recon/GetActionsV1\n return process_service_request(\n calling_object=self,\n endpoints=Endpoints,\n operation_id=\"GetActionsV1\",\n keywords=kwargs,\n params=handle_single_argument(args, parameters, \"ids\")\n )", "def svn_diff_diff3_2(*args):\n return _diff.svn_diff_diff3_2(*args)", "def test_should_render_on_diff_viewer(self) -> None:\n self.assertTrue(self.action.should_render(\n context=self._create_request_context(\n url_name='view-diff')))", "def diff(self, other):\n if not isinstance(other, Article):\n raise TypeError(\"Can only diff two Articles.\")\n\n ndiffs = 0\n # Check for equality first (don't need to print anything in this case)\n if self == other: # defined via __eq__\n return 0\n\n # Compare all attributes except for time added and opened\n attribs = sorted(set(vars(self)) - {\"time_added\", \"time_opened\"})\n # Get field width (for pretty printing)\n maxlen = max(len(attrib) for attrib in attribs)\n # Check individual keys\n for attrib in attribs:\n # We need to convert authors to a string\n if attrib == \"authors\":\n if self.authors is not None:\n old_value = \", \".join(self.format_authors(\"full\"))\n else:\n old_value = None\n if other.authors is not None:\n new_value = \", \".join(other.format_authors(\"full\"))\n else:\n new_value = None\n # Other attributes can be accessed via the dict\n else:\n old_value = attrgetter(attrib)(self)\n new_value = attrgetter(attrib)(other)\n # Compare them\n if old_value is not None and old_value == new_value:\n print(f\"{attrib:>{maxlen}}: {old_value}\")\n else:\n ndiffs += 1\n if old_value is not None:\n print(f\"{attrib:>{maxlen}}: \"\n f\"{_g.ansiDiffRed}- {old_value}{_g.ansiReset}\")\n attrib = \"\" # avoid printing the attribute name twice\n if new_value is not None:\n print(f\"{attrib:>{maxlen}}: \"\n f\"{_g.ansiDiffGreen}+ {new_value}{_g.ansiReset}\")\n return ndiffs", "def run(self):\n try:\n diff = self.get_diff(self.diff_id)\n revision = self.get_revision(diff.revisionID)\n url = f\"https://reviews.llvm.org/D{revision['id']}?id={diff['id']}\"\n annotate(f\"Patching changes [{url}]({url})\", style='info', context='patch_diff')\n self.reset_repository()\n self.revision_id = revision['id']\n dependencies = self.get_dependencies(revision)\n dependencies.reverse() # Now revisions will be from oldest to newest.\n if len(dependencies) > 0:\n logging.info('This diff depends on: {}'.format(revision_list_to_str(dependencies)))\n plan = []\n for r in dependencies:\n if r['statusName'] == 'Closed':\n logging.info(f'skipping revision {r[\"id\"]} - it is closed, assuming it has landed')\n continue\n d = self.get_diff(r['diffs'][0])\n plan.append((r, d))\n plan.append((revision, diff))\n logging.info('Planning to apply in order:')\n for (r, d) in plan:\n logging.info(f\"https://reviews.llvm.org/D{r['id']}?id={d['id']}\")\n # Pick the newest known commit as a base for patches.\n base_commit = None\n for (r, d) in plan:\n c = self.find_commit(d['sourceControlBaseRevision'])\n if c is None:\n logging.warning(f\"D{r['id']}#{d['id']} commit {d['sourceControlBaseRevision']} does not exist\")\n continue\n if base_commit is None:\n logging.info(f\"D{r['id']}#{d['id']} commit {c.hexsha} exists\")\n base_commit = c\n elif c.committed_datetime > base_commit.committed_datetime:\n logging.info(f\"D{r['id']}#{d['id']} commit {c.hexsha} has a later commit date then\"\n f\"{base_commit.hexsha}\")\n base_commit = c\n if self.base_revision != 'auto':\n logging.info(f'Base revision \"{self.base_revision}\" is set by command argument. Will use '\n f'instead of resolved \"{base_commit}\"')\n base_commit = self.find_commit(self.base_revision)\n if base_commit is None:\n base_commit = self.repo.heads['main'].commit\n annotate(f\"Cannot find a base git revision. Will use current HEAD.\",\n style='warning', context='patch_diff')\n self.create_branch(base_commit)\n for (r, d) in plan:\n if not self.apply_diff(d, r):\n return 1\n if self.push_branch:\n self.repo.git.push('--force', 'origin', self.branch_name)\n annotate(f\"Created branch [{self.branch_name}]\"\n f\"(https://github.com/llvm-premerge-tests/llvm-project/tree/{self.branch_name}).\\n\\n\"\n f\"To checkout locally, run in your copy of llvm-project directory:\\n\\n\"\n \"```shell\\n\"\n \"git remote add premerge git@github.com:llvm-premerge-tests/llvm-project.git #first time\\n\"\n f\"git fetch premerge {self.branch_name}\\n\"\n f\"git checkout -b {self.branch_name} --track premerge/{self.branch_name}\\n\"\n \"```\",\n style='success',\n context='patch_diff')\n logging.info('Branch {} has been pushed'.format(self.branch_name))\n return 0\n except Exception as e:\n annotate(f\":bk-status-failed: Unexpected error. Consider [creating a bug]({feedback_url()}).\",\n style='error', context='patch_diff')\n logging.error(f'exception: {e}')\n return 1", "def diffRsp(self, n=2): \n # /dsm/saplxglast/home/aloh/ALAN/CygX-3_FermiAnalysis/MODEL/Model_GTDIFFRSP.xml \n\n # --------------------------------------------------------------------------------------------- #\n # Create a directory to store the little FT1 pieces\n self.workpath = os.path.join(self.datapath, 'GTDIFFRSP', 'Runs')\n if self.diffModel is None:\n print('\\t=== A diffuse model file needs to be provided --> self.diffModel ===')\n return\n tmpMETstart = self.metstart\n tmpMETstop = self.metstop\n\n # --------------------------------------------------------------------------------------------- #\n # Split the calculation\n tWidth = 2e6\n self.metstart = tmpMETstart\n self.metstop = tmpMETstart + tWidth\n\n count = 0\n while self.metstop < tmpMETstop + tWidth:\n filesRunning = glob.glob( os.path.join(self.workpath, 'tmp_*.py') )\n while len(filesRunning) == n: \n # Limit the number of parallel runs\n time.sleep(60)\n filesRunning = glob.glob( os.path.join(self.workpath, 'tmp_*.py') )\n\n fil = os.path.join( self.workpath, 'tmp_'+str(count)+'.py' )\n tmp = open(fil, 'w')\n tmp.write(\"import algamma; import os; a=algamma.algamma(); a.ft1='{}';\\\n a.ft2='{}'; a.diffModel='{}'; a.metstart={}; a.metstop={}; a.suffix='_Chk_{}';\\\n a.workpath='{}'; a._initNames(); a._gtSelect(); a._gtMktime();\\\n a._gtDiffrsp(); os.remove(a.outselect); os.remove('{}')\"\n .format(os.path.join(self.datapath, self.ft1),\n os.path.join(self.datapath, self.ft2), self.diffModel,\n self.metstart, self.metstop, count, self.workpath, fil))\n # Launch the file\n os.popen(\"nohup python {} &\".format(fil))\n tmp.close()\n \n count += 1\n self.metstart += tWidth\n self.metstop += tWidth\n\n self._mergeDiffrsp()\n return", "def _do_action(self):\n pass", "def _do_action(self):\n pass", "def edit(self):\n\n pass", "def svn_client_invoke_diff_summarize_func(svn_client_diff_summarize_func_t__obj, svn_client_diff_summarize_t_diff, void_baton, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass", "def response_change(self, request, obj):\n if request.POST.has_key(\"_viewnext\"):\n msg = (_('The %(name)s \"%(obj)s\" was changed successfully.') %\n {'name': force_unicode(obj._meta.verbose_name),\n 'obj': force_unicode(obj)})\n next = obj.__class__.objects.filter(id_xt_lab__gt=obj.id_xt_lab).order_by('id_xt_lab')[:1]\n if next:\n self.message_user(request, msg)\n return HttpResponseRedirect(\"../%s/\" % next[0].pk)\n return super(xtlabAdmin, self).response_change(request, obj)", "def _get_objects(\n self, model: Model, old_objects: List[Viewable], doc: Document,\n root: Model, comm: Optional[Comm] = None\n ):\n from ..pane.base import RerenderError, panel\n new_models, old_models = [], []\n for i, pane in enumerate(self.objects):\n pane = panel(pane)\n self.objects[i] = pane\n\n for obj in old_objects:\n if obj not in self.objects:\n obj._cleanup(root)\n\n current_objects = list(self.objects)\n ref = root.ref['id']\n for i, pane in enumerate(self.objects):\n if pane in old_objects and ref in pane._models:\n child, _ = pane._models[root.ref['id']]\n old_models.append(child)\n else:\n try:\n child = pane._get_model(doc, root, model, comm)\n except RerenderError as e:\n if e.layout is not None and e.layout is not self:\n raise e\n e.layout = None\n return self._get_objects(model, current_objects[:i], doc, root, comm)\n new_models.append(child)\n return new_models, old_models", "def handle(self, *args, **options):\n # Mitglied\n self.stdout.write('Deleting entries from Mitglied Historie...')\n mitglied_history = Mitglied.history.filter(history_date__lte=timezone.now()-timedelta(days=365))\n mitglied_counter = 0\n for entry in mitglied_history:\n if not Mitglied.objects.get(id=entry.mitglied.id):\n entry.delete()\n mitglied_counter += 1\n self.stdout.write('Deleted ' + str(mitglied_counter) + ' entries from Mitglied Historie older than 1 year')\n (mitglied_counter, _) = Mitglied.history.filter(history_date__lte=timezone.now()-timedelta(days=1825)).delete()\n self.stdout.write('Deleted ' + str(mitglied_counter) + ' entries from Mitglied Historie older than 5 years')\n\n # MitgliedAmt\n self.stdout.write('Deleting entries from MitgliedAmt Historie...')\n mitglied_amt_history = MitgliedAmt.history.filter(history_date__lte=timezone.now()-timedelta(days=365))\n mitglied_amt_counter = 0\n for entry in mitglied_amt_history:\n if not Mitglied.objects.get(id=entry.mitglied_id):\n entry.delete()\n mitglied_amt_counter += 1\n self.stdout.write('Deleted ' + str(mitglied_amt_counter) + ' entries from MitgliedAmt Historie older than 1 year')\n (mitglied_amt_counter, _) = MitgliedAmt.history.filter(history_date__lte=timezone.now()-timedelta(days=1825)).delete()\n self.stdout.write('Deleted ' + str(mitglied_amt_counter) + ' entries from MitgliedAmt Historie older than 5 years')\n\n # MitgliedMail\n self.stdout.write('Deleting entries from MitgliedMail Historie...')\n mitglied_mail_history = MitgliedMail.history.filter(history_date__lte=timezone.now()-timedelta(days=365))\n mitglied_mail_counter = 0\n for entry in mitglied_mail_history:\n if not Mitglied.objects.get(id=entry.mitglied_id):\n entry.delete()\n mitglied_mail_counter += 1\n self.stdout.write('Deleted ' + str(mitglied_mail_counter) + ' entries from MitgliedMail Historie older than 1 year')\n (mitglied_mail_counter, _) = MitgliedMail.history.filter(history_date__lte=timezone.now()-timedelta(days=1825)).delete()\n self.stdout.write('Deleted ' + str(mitglied_mail_counter) + ' entries from MitgliedMail Historie older than 5 years')", "def do(self, obj1: Entity, obj2: Entity, random_state_obj: RandomState) -> Entity:\n pass", "def perform(self):\n pass", "def _run_diff(oldfile, newfile):\n # TODO: It may be nicer to use the internal diff engine for this.\n # For one, this would use the correct colors set up for hg\n # diff rather than the colors set up for colordiff. It's not\n # clear to me how this can be done though, and if it is\n # worth the bother.\n _call_subprocesses(\"diff or colordiff\",\n [\"colordiff\", \"-u\", oldfile, newfile],\n [\"diff\", \"-u\", oldfile, newfile])", "def svn_diff_file_diff3(*args):\n return _diff.svn_diff_file_diff3(*args)", "def action_index(biv_obj):\n return pnf.Nomination().execute(biv_obj)", "def gitDiff(self, troveSource, diffBinaries=True):\n jobs = list(self.getJobSet())\n oldTroves = troveSource.getTroves(\n [ (x[0], x[1][0], x[1][1]) for x in jobs if x[1][0] is not None ])\n\n # get the old file objects we need\n filesNeeded = []\n for job in jobs:\n if job[1][0] is not None:\n oldTrv = oldTroves.pop(0)\n else:\n oldTrv = None\n\n if self.hasNewTrove(job[0], job[2][0], job[2][1]):\n trvCs = self.getNewTroveVersion(job[0], job[2][0], job[2][1])\n\n # look at the changed files and get a list of file objects\n # we need to have available\n for (pathId, path, fileId, fileVersion) in \\\n trvCs.getChangedFileList():\n oldPath = oldTrv.getFile(pathId)[0]\n if fileVersion:\n filesNeeded.append(\n (pathId, ) + oldTrv.getFile(pathId)[1:3] + (oldPath, ))\n\n for pathId in trvCs.getOldFileList():\n oldPath = oldTrv.getFile(pathId)[0]\n filesNeeded.append((pathId, ) +\n oldTrv.getFile(pathId)[1:3] +\n (oldPath, ))\n else:\n filesNeeded.extend((pathId, fileId, version, path)\n for pathId, path, fileId, version in oldTrv.iterFileList())\n\n fileObjects = troveSource.getFileVersions(\n [ x[0:3] for x in filesNeeded ])\n\n # now look at all of the files, new and old, to order the diff right\n # so we don't have to go seeking all over the changeset\n configList = []\n normalList = []\n removeList = []\n encapsulatedList = []\n for job in jobs:\n if self.hasNewTrove(job[0], job[2][0], job[2][1]):\n trvCs = self.getNewTroveVersion(job[0], job[2][0], job[2][1])\n for (pathId, path, fileId, fileVersion) in \\\n trvCs.getNewFileList():\n fileStream = self.getFileChange(None, fileId)\n if trvCs.hasCapsule():\n encapsulatedList.append((pathId, fileId,\n (None, None, None, None),\n (path, fileId, fileStream)))\n elif files.frozenFileFlags(fileStream).isConfig():\n configList.append((pathId, fileId,\n (None, None, None, None),\n (path, fileId, fileStream)))\n else:\n normalList.append((pathId, fileId,\n (None, None, None, None),\n (path, fileId, fileStream)))\n\n for (pathId, path, fileId, fileVersion) in \\\n trvCs.getChangedFileList():\n oldFileObj = fileObjects.pop(0)\n fileObj = oldFileObj.copy()\n oldFileId, oldFileVersion, oldPath = filesNeeded.pop(0)[1:4]\n diff = self.getFileChange(oldFileId, fileId)\n # check if new and old files are of the same type\n if fileObj.lsTag == diff[1]:\n fileObj.twm(diff, fileObj)\n else:\n fileObj = troveSource.getFileVersion(\n pathId, fileId, fileVersion)\n\n if path is None:\n path = oldPath\n\n if trvCs.hasCapsule():\n encapsulatedList.append((pathId, fileId,\n (oldPath, oldFileId, oldFileVersion, oldFileObj),\n (path, fileId, fileObj.freeze())))\n elif fileObj.flags.isConfig():\n configList.append((pathId, fileId,\n (oldPath, oldFileId, oldFileVersion,\n oldFileObj),\n (path, fileId, fileObj.freeze())))\n else:\n normalList.append((pathId, fileId,\n (oldPath, oldFileId, oldFileVersion,\n oldFileObj),\n (path, fileId, fileObj.freeze())))\n\n for pathId in trvCs.getOldFileList():\n oldFileObj = fileObjects.pop(0)\n oldFileId, oldFileVersion, oldPath = filesNeeded.pop(0)[1:4]\n removeList.append((oldPath, oldFileObj))\n else:\n for (pathId, fileId, version, path), fileObj in \\\n itertools.izip(filesNeeded, fileObjects):\n removeList.append((path, fileObj))\n\n for path, fileObj in removeList:\n yield \"diff --git a%s b%s\\n\" % (path, path)\n yield \"deleted file mode %o\\n\" % (fileObj.statType |\n fileObj.inode.perms())\n yield \"Binary files %s and /dev/null differ\\n\" % path\n\n configList.sort()\n normalList.sort()\n encapsulatedList.sort()\n\n for (pathId, fileId, oldInfo, newInfo) in \\\n itertools.chain(configList, normalList):\n newInfo = newInfo[0:2] + (files.ThawFile(newInfo[2], pathId),)\n for x in self._makeFileGitDiff(troveSource, pathId,\n oldInfo, newInfo, diffBinaries):\n yield x\n\n for (pathId, fileId, oldInfo, newInfo) in encapsulatedList:\n newInfo = newInfo[0:2] + (files.ThawFile(newInfo[2], pathId),)\n for x in self._makeFileGitDiffCapsule(troveSource, pathId,\n oldInfo, newInfo, diffBinaries):\n yield x", "def display_diff(self):\n # well, it *will* be the old content\n old = copy.copy(self.content_object)\n\n # newer non-reverted revisions of this content_object,\n # starting from this\n if not self.delta:\n return \"\"\n newer_changesets = Revision.objects.get_for_object(\n self.content_object\n ).filter(revision__gte=self.revision)\n\n model = self.content_object.__class__\n fields = _registry[model]\n # apply all patches to get the content of this revision\n for i, changeset in enumerate(newer_changesets):\n diffs = diff_split_by_fields(changeset.delta)\n if len(newer_changesets) == i + 1:\n # we need to compare with the next revision\n # after the change\n next_rev = copy.copy(old)\n for key, diff in diffs.items():\n model_name, field_name = key.split('.')\n if model_name != model.__name__ or field_name not in fields:\n continue\n patches = dmp.patch_fromText(diff)\n setattr(\n old,\n field_name,\n dmp.patch_apply(\n patches,\n get_field_data(old, field_name)\n )[0]\n )\n\n result = []\n for field_name in fields:\n result.append(\"<b>{0}</b>\".format(field_name))\n diffs = dmp.diff_main(\n get_field_data(old, field_name),\n get_field_data(next_rev, field_name)\n )\n result.append(dmp.diff_prettyHtml(diffs))\n return \"<br />\\n\".join(result)", "def sort(self, *args, **kwargs) -> \"Actions\":\n self.actions.sort(*args, **kwargs)\n return self", "def svn_diff_file_diff_2(*args):\n return _diff.svn_diff_file_diff_2(*args)", "def actions() -> None:\n pass", "def update_view(self):\n for row in self.view.obj_list:\n for obj in row:\n obj._update(self.model)", "def revert(self, *args, **kwargs):", "def _action(self):\n pass", "def diff(self):\n return differential(self)", "def svn_diff_output_fns_invoke_output_diff_modified(_obj, output_baton, original_start, original_length, modified_start, modified_length, latest_start, latest_length):\n return _diff.svn_diff_output_fns_invoke_output_diff_modified(_obj, output_baton, original_start, original_length, modified_start, modified_length, latest_start, latest_length)", "def execute(self):\r\n self.changeAttr(\"changeType\", \"delete\")\r\n self.changeAttr(\"changeMark\", \"1\")", "def _gen_diff_patch(self, git_object_a, git_object_b, src_alias, dst_alias,\n cwd, deps_rev):\n # The prefixes used in the command below are used to find and replace the\n # tree-ish git object id's on the diff output more easily.\n cmd = 'diff %s %s --src-prefix=IAMSRC: --dst-prefix=IAMDST:'\n cmd %= (git_object_a, git_object_b)\n cmd = cmd.split(' ')\n stdout = self.api.m.raw_io.output()\n step_name = 'Generating patch for %s to %s' % (git_object_a, deps_rev)\n step_result = self.api.m.git(*cmd, cwd=cwd, stdout=stdout, name=step_name)\n patch_text = step_result.stdout\n src_string = 'IAMSRC:' + git_object_a\n dst_string = 'IAMDST:' + git_object_b\n patch_text = patch_text.replace(src_string, src_alias)\n patch_text = patch_text.replace(dst_string, dst_alias)\n return patch_text" ]
[ "0.5404981", "0.53203255", "0.52869326", "0.5260477", "0.52597624", "0.5186789", "0.518508", "0.5163919", "0.514824", "0.50897974", "0.50714517", "0.50684255", "0.50684255", "0.5065854", "0.5063287", "0.50580245", "0.5037629", "0.50206566", "0.50169116", "0.5014264", "0.5000727", "0.4991861", "0.49900994", "0.4989343", "0.49879155", "0.49770758", "0.4972681", "0.49633923", "0.4942804", "0.49408433", "0.49251106", "0.4923192", "0.49060196", "0.49021477", "0.489084", "0.48861527", "0.48783988", "0.48777208", "0.48777208", "0.48728606", "0.4868089", "0.4861921", "0.48584372", "0.4855922", "0.4855508", "0.484963", "0.48480478", "0.48446143", "0.484432", "0.4828477", "0.48217273", "0.48181567", "0.47994977", "0.4783587", "0.4769153", "0.47535142", "0.47520345", "0.47486472", "0.47484797", "0.47378367", "0.47365654", "0.47251773", "0.4719759", "0.47169566", "0.47128966", "0.47007817", "0.46991932", "0.4697173", "0.46926147", "0.46876264", "0.4681261", "0.46785355", "0.46738803", "0.46735883", "0.46670976", "0.46646816", "0.46642938", "0.46642938", "0.4663555", "0.4660289", "0.46594545", "0.46525168", "0.46448475", "0.4636006", "0.46329373", "0.46321037", "0.46252996", "0.46215102", "0.46211958", "0.46194032", "0.46154535", "0.46139947", "0.4610693", "0.4606535", "0.45944765", "0.45939282", "0.4592859", "0.45704103", "0.45696288", "0.4560231" ]
0.5864252
0
Method to scan product. Adds the product order to the list of orders.
def scan(self, product_code): self.order.add_product(product_code)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add(self, product):\n pass", "def orderWatch(self, order):\r\n\t\tself.orders.append(order)", "def add_product(self):\n self.owner.new_product(self.barcode, self.description, self.price, self._add_product_callback)", "def orderWatch(self, order):\r\n\t\tself.pair.orders.append(order)", "def process(self, order):\r\n self._elements.append(order)", "def ProcessOrder(product_id):\n product = Product.query.filter_by(product_id = product_id)\n \n if (product):\n product.qty = product \n db.session.commit()", "def product(self, product):\n self._product = product", "def insert(self, product):\n pass", "def payload_add_products(self, payload: dict, order: Order, language: str):\n order_lines: [OrderLine] = OrderLine.objects.filter(order=order.id)\n items: [dict] = []\n\n area = resolve_area(order)\n\n # Additional product orders doesn't have berth product\n if hasattr(order, \"product\") and order.product:\n product = order.product\n int_tax = int(order.tax_percentage)\n assert (\n int_tax == product.tax_percentage\n ) # make sure the tax is a whole number\n with override(language):\n lease = order.lease\n place = (\n lease.berth\n if hasattr(lease, \"berth\")\n else lease.place\n if hasattr(lease, \"place\") and lease.place\n else lease.section\n if hasattr(lease, \"section\") and lease.section\n else area\n )\n product_name = f\"{product.name}: {place}\"\n items.append(\n {\n \"id\": get_talpa_product_id(product.id, area),\n \"title\": product_name,\n \"price\": price_as_fractional_int(order.price),\n \"pretax_price\": price_as_fractional_int(order.pretax_price),\n \"tax\": int_tax,\n \"count\": 1,\n \"type\": 1,\n }\n )\n\n for order_line in order_lines:\n product: AdditionalProduct = order_line.product\n int_tax = int(product.tax_percentage)\n assert (\n int_tax == product.tax_percentage\n ) # make sure the tax is a whole number\n with override(language):\n product_name = product.name\n items.append(\n {\n \"id\": get_talpa_product_id(\n product.id,\n area,\n is_storage_on_ice=product.service\n == ProductServiceType.STORAGE_ON_ICE,\n ),\n \"title\": product_name,\n \"price\": price_as_fractional_int(order_line.price),\n \"pretax_price\": price_as_fractional_int(order_line.pretax_price),\n \"tax\": int_tax,\n \"count\": order_line.quantity,\n \"type\": 1,\n }\n )\n payload[\"amount\"] = price_as_fractional_int(order.total_price)\n payload[\"products\"] = items", "def agregar_producto(self, producto):\n\n self.productos.append(producto)", "def product(self, product):\n\n self._product = product", "def product(self, product):\n\n self._product = product", "def add_products():\n result = order_obj.add_products(request.forms) \n return result", "def btn_create_order_pro(self):\n\t\tprint()\n\t\tprint('treatment - btn_create_order_pro')\n\n\t\t# Search Partner\n\t\tpartner = tre_funcs.get_partner(self, self.patient.name)\n\n\t\t# Search pricelist\n\t\tpricelist = tre_funcs.get_pricelist(self)\n\n\t\t# Search product\n\t\t# Create Product tuple\n\t\tproduct_tup = []\n\t\t#for service in self.service_all_ids:\n\t\tfor service in self.service_ids:\n\t\t\t#print()\n\t\t\t#print('* Create Product tuple')\n\t\t\t#print(service)\n\t\t\t#print(service.service)\n\t\t\t#print(service.service.name)\n\t\t\t#print(service.qty)\n\t\t\t#print(service.service.list_price)\n\t\t\t\n\t\t\t# Init\n\t\t\tproduct_template = service.service\n\t\t\tname = service.service.name\n\t\t\tqty = service.qty\n\t\t\tprice = service.service.list_price\n\t\t\t\n\t\t\t# Check Exceptions\n\t\t\ttry:\n\t\t\t\tprice_list = '2019'\n\t\t\t\tproduct = tre_funcs.get_product_product(self, name, price_list)\n\t\t\t\tproduct_tup.append((product, qty, price))\n\n\t\t\texcept Exception:\n\t\t\t\tprint('ERROR - Treatment - Product not in 2019 price_list !')\n\t\t\t\tprint('Search in other price_lists')\n\n\t\t\t\ttry:\n\t\t\t\t\tprice_list = False\n\t\t\t\t\tproduct = tre_funcs.get_product(self, name, price_list)\n\t\t\t\t\tprint(product)\n\t\t\t\t\tproduct_tup.append((product, qty, price))\n\n\t\t\t\texcept Exception:\n\t\t\t\t\tprint('ERROR - Treatment - Product Not Available at all !!!!!')\n\n\t\t\t#else:\n\t\t\t#\tprint('jx - Else !')\n\t\t\t\t#pass\n\n\n\t\t\t# Check \n\t\t\ttre_funcs.check_product(self, '2019', product, product_template)\n\t\t\n\t\t# Create order \n\t\torder = pl_creates.create_order(self, partner.id, pricelist.id, product_tup)\n\t\tprint(order)\n\n\t\t# Open Order\n\t\treturn action_funcs.open_order(order)", "def test_add_product_to_cart(self, driver):\n logging.info(\"Start test case: Continue Shop\")\n data = self.test_data[\"Continue Shop\"][\"Products\"][0]\n logging.info(f\"Test data: [{data}]\")\n product_name = data[\"Product Name\"]\n\n select_product(driver, data[\"Page\"], product_name)\n add_product_to_cart(driver, data[\"Size\"], data[\"Color\"], data[\"Quantity\"])\n assert is_product_in_cart(driver, product_name)\n continue_shopping_from_order_summary(driver)\n assert verify_current_page_is_home(driver)", "def place_order(self, **kwargs):\r\n create_options = self._generate_create_dict(**kwargs)\r\n return self.client['Product_Order'].placeOrder(create_options)", "def on_scan(self, product):\n self.new_product = product\n if self.active:\n self.sm.on_state_event(self.events.SCAN)", "def process_order(self, order_event : event.EventOrder) :\n pass", "def products(self, products):\n\n self._products = products", "def products(self, products):\n\n self._products = products", "async def on_order_updated(self, order: MetatraderOrder):\n for i in range(len(self._orders)):\n if self._orders[i]['id'] == order['id']:\n self._orders[i] = order\n break\n else:\n self._orders.append(order)", "def addProduct(self, product):\n self._checkDeleted()\n product._checkDeleted()\n\n productPath = self.productSearch.productClient.product_path(\n project=self.productSearch.projectId, location=self.productSearch.location, product=product.productId)\n\n self.productSearch.productClient.add_product_to_product_set(name=self.productSetPath, product=productPath)", "def _serialize_order_and_product_data(order_data:dict):\n\n placed_orders = []\n ordered_products = []\n\n for order in order_data:\n if order[\"financial_status\"] not in COMPLETE_ORDER_STATUSES:\n continue\n \n items = []\n products = []\n for item in order[\"line_items\"]:\n items.append(\n {\n \"ProductID\": item[\"id\"],\n \"SKU\": item[\"sku\"],\n \"ProductName\": item[\"title\"],\n \"Quantity\": item[\"quantity\"],\n \"ItemPrice\": item[\"name\"]\n }\n )\n\n products.append(\n {\n \"token\": PUBLIC_KEY,\n \"event\": \"Ordered Product\",\n \"customer_properties\": {\n \"$email\": order[\"customer\"][\"email\"],\n \"$first_name\": order[\"customer\"][\"first_name\"],\n \"$last_name\": order[\"customer\"][\"last_name\"]\n },\n \"properties\": {\n \"$event_id\": item[\"id\"],\n \"$value\": item[\"price\"],\n \"ProductID\": item[\"product_id\"],\n \"SKU\": item[\"sku\"],\n \"ProductName\": item[\"title\"],\n \"Quantity\": item[\"quantity\"]\n }\n }\n )\n \n ordered_products.append({\"order_id\":order[\"id\"], \"body\": products})\n\n placed_orders.append(\n {\n \"token\": PUBLIC_KEY,\n \"event\": \"Placed Order\",\n \"customer_properties\": {\n \"$email\": order[\"customer\"][\"email\"],\n \"$first_name\": order[\"customer\"][\"first_name\"],\n \"$last_name\": order[\"customer\"][\"last_name\"],\n \"$phone_number\": order[\"customer\"][\"phone\"],\n \"$address1\": order[\"customer\"][\"default_address\"][\"address1\"] if \"default_address\" in order[\"customer\"].keys() else None,\n \"$address2\": order[\"customer\"][\"default_address\"][\"address2\"] if \"default_address\" in order[\"customer\"].keys() else None,\n \"$city\": order[\"customer\"][\"default_address\"][\"city\"] if \"default_address\" in order[\"customer\"].keys() else None,\n \"$zip\": order[\"customer\"][\"default_address\"][\"zip\"] if \"default_address\" in order[\"customer\"].keys() else None,\n \"$region\": order[\"customer\"][\"default_address\"][\"province_code\"] if \"default_address\" in order[\"customer\"].keys() else None,\n \"$country\": order[\"customer\"][\"default_address\"][\"country_name\"] if \"default_address\" in order[\"customer\"].keys() else None,\n },\n \"properties\": {\n \"$event_id\": order[\"id\"],\n \"$value\": order[\"total_price\"],\n \"ItemNames\": [item[\"name\"] for item in order[\"line_items\"]],\n \"DiscountCode\": order[\"discount_codes\"],\n \"DiscountValue\": order[\"total_discounts\"],\n \"Items\": items,\n \"BillingAddress\": None if \"billing_address\" not in order.keys() else\n {\n \"FirstName\": order[\"billing_address\"][\"first_name\"],\n \"LastName\": order[\"billing_address\"][\"last_name\"],\n \"Company\": order[\"billing_address\"][\"company\"],\n \"Addaress1\": order[\"billing_address\"][\"address1\"],\n \"Address2\": order[\"billing_address\"][\"address2\"],\n \"City\": order[\"billing_address\"][\"city\"],\n \"Region\": order[\"billing_address\"][\"province\"],\n \"RegionCode\": order[\"billing_address\"][\"province_code\"],\n \"Country\": order[\"billing_address\"][\"country\"],\n \"CountryCode\": order[\"billing_address\"][\"country_code\"],\n \"Zip\": order[\"billing_address\"][\"zip\"],\n \"Phone\": order[\"billing_address\"][\"phone\"]\n },\n \"ShippingAddress\": None if \"shipping_address\" not in order.keys() else\n {\n \"FirstName\": order[\"shipping_address\"][\"first_name\"],\n \"LastName\": order[\"shipping_address\"][\"last_name\"],\n \"Company\": order[\"shipping_address\"][\"company\"],\n \"Addaress1\": order[\"shipping_address\"][\"address1\"],\n \"Address2\": order[\"shipping_address\"][\"address2\"],\n \"City\": order[\"shipping_address\"][\"city\"],\n \"Region\": order[\"shipping_address\"][\"province\"],\n \"RegionCode\": order[\"shipping_address\"][\"province_code\"],\n \"Country\": order[\"shipping_address\"][\"country\"],\n \"CountryCode\": order[\"shipping_address\"][\"country_code\"],\n \"Zip\": order[\"shipping_address\"][\"zip\"],\n \"Phone\": order[\"shipping_address\"][\"phone\"]\n }\n },\n \"time\": int(time.time())\n }\n )\n \n return placed_orders, ordered_products", "def on_order(self, order: OrderData):\n pass", "def on_order(self, order: OrderData):\n pass", "def on_order(self, order: OrderData):\n pass", "def test_get_order_by_product(self):\n test_order = self._create_orders(1)[0]\n resp = self.app.get('/orders/products/{}'.format(test_order.product_id),\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = resp.get_json()[0]\n self.assertEqual(data['uuid'], test_order.uuid)", "def order(self, typ, price, volume):\r\n self.count_submitted += 1\r\n self.client.send_order_add(typ, price, volume)", "def on_order(self, order: OrderData):\n # print(\"on_order\")\n # print(order)\n pass", "def add_product(self, product):\n return self._make_post_request(self._urls['products'],\n data=dict(name=product))", "def place_order(self, order_event):\n self._check_day_data(order_event.order_time)\n if order_event.order_type == 'MARKET':\n self._fill_market_order(order_event)\n elif order_event.order_type == 'LIMIT':\n if self._check_limit_order(order_event, order_event.order_time):\n pass\n self.resting_orders.append(order_event)", "def add_product(self, store, product):\n self.db.query(\"\"\"\n INSERT IGNORE INTO product_store(product_id, store_id)\n VALUES (:product_id, :store_id)\n \"\"\", product_id=product.id, store_id=store.id)", "def register_product(p: Product) -> ExecRet:\n market = get_market()\n pid = p.pid\n if pid in market.products.keys():\n return ExecRet.err(message='pid %d already exists' % pid)\n market.add_product(p)\n LOGGER.info('added product %s' % p.json())\n return ExecRet.ok()", "def orders(self, orders):\n\n self._orders = orders", "def orders(self, orders):\n\n self._orders = orders", "def add_imported(products):\n \n for product in products:\n add_product(product[\"product_name\"], product[\"product_quantity\"], product[\"product_price\"], product[\"date_updated\"])", "def update_order(self, orderId, order_item):\n \n for order in self.order_lst:\n if int(order.get_orderId()) == int(orderId):\n order.add_item(order_item)\n return None\n \n new_order = Order(orderId)\n new_order.add_item(order_item)\n self.order_lst.append(new_order)", "def add(self, product):\n product_id = str(product.id)\n self.wishlist[product_id] = {'price': str(product.price)}\n self.save()", "def insert_products(self):\n logic = ProductLogic()\n \n try:\n # We create the list of product objects\n products = self.objects_factory.create_product_object_list()\n products = set(products)\n\n for product in products:\n logic.insert(product)\n except:\n print('Il y a eu un problème lors de la récupération des données, veuillez rééssayer')", "def add_products(self, products):\n return [self.add_product(product) for product in set(products)]", "def add_product_ids(self, driver, productlist):\n try:\n frame = driver.find_element_by_class_name(\"divBorder\")\n products = frame.find_elements_by_class_name(\"divProduct\")\n for i in products:\n productlist.append(i.find_element_by_class_name('divItemNumber').text)\n except NoSuchElementException:\n # No product_ids were found.\n pass", "def order(self, order):\n\n #print(\"Evaluating order: \", order)\n if self.security != order.secid:\n raise (\"Cannot place order for security \"\n \"%s on book[%s]\" % (order.security, self.security))\n\n levels = self.bid\n if order.side == Side.SELL:\n levels = self.offer\n\n new_level = OrderBookLevel(price=order.price, qty=order.qty, order_count=1)\n start_index = levels.bisect_right(new_level)\n levels.insert(start_index, new_level)\n OrderBookUtils.compact(levels, start_index)\n\n # Trim list\n if order.side == Side.SELL:\n # Delete from end of list - highest offers\n size = len(self.offer)\n if size > MAX_DEPTH:\n for _ in itertools.repeat(None, size - MAX_DEPTH):\n del self.offer[-1]\n else:\n # Delete from start of list - lowest bids\n size = len(self.bid)\n if size > MAX_DEPTH:\n for _ in itertools.repeat(None, size - MAX_DEPTH):\n del self.bid[0]\n\n return self.match(order.side)", "def update(self, request, pk=None):\n ordered_products = set()\n order = Order.objects.get(pk=pk)\n payment = Payment.objects.get(pk=request.data[\"payment_type\"])\n order.payment_type = payment\n order.save()\n if order.payment_type is not \"NULL\":\n ordered_items = order.invoiceline.all()\n\n for oi in ordered_items:\n ordered_products.add(oi.product)\n\n products = list(ordered_products)\n\n for p in products:\n num_sold = p.item.filter(order=order).count()\n p.quantity = p.new_inventory(num_sold)\n p.save()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def post(self):\n args = parser.parse_args()\n return Products().add_product(\n args['name'],\n args['quantity'],\n args['price'],\n args['reorder'])", "def add_product(self, name, cost, stock, location):\n\n cur.execute(\"\"\"INSERT INTO catalogue(vendorname, productname, unitcost, stock, location) \n VALUES (?, ?, ?, ?, ?)\"\"\", (self.vendorname, name, cost, stock, location))", "def add_product(self, product: Product):\n log.debug(\"Adding a new product\")\n product_parameters = product.to_db()\n try:\n with DBCursor(self.host) as cursor:\n cursor.execute(\"INSERT INTO items VALUES (?, ?, ?, ?, ?)\", (product_parameters['name'].lower(), product_parameters['units'], product_parameters['last_buy'], product_parameters['cost'], product_parameters['price']))\n except sqlite3.IntegrityError:\n log.critical(\"An integrity error was raised. Maybe a matching name or id.\")\n raise DatabaseIntegrityError(\"There's a matching name or id already stored.\")\n else:\n log.info(f\"{product.__repr__} was added successfully.\")", "def fill_data_product(self):\n self.product.fill_data_product(self.list_products, self.mycursor, self.my_database)", "def add_to_cart(self, cart_id, product):\n # Go through each producer\n for i in range(len(self.buff)):\n with self.locks[i]:\n # Go through his queue\n for prod in self.buff[i]:\n # If the product is found take it\n # Add it to the cart\n # Also keep the id of the producer in case we want to return it\n if product.__eq__(prod):\n self.carts[cart_id].append((prod, i))\n self.buff[i].remove(prod)\n return True\n return False", "def add_product(cls, product_name, price, quantity):\n Product.insert(product_name=product_name,\n product_price=price,\n product_quantity=quantity,\n date_updated=date.today()).on_conflict(\n conflict_target=[Product.product_name],\n preserve=[Product.product_price,\n Product.product_quantity,\n Product.date_updated]).execute()\n print(f'\\nProduct added successfully!')\n print(f'Product: {product_name} ' +\n f'Price: ${int(price) / 100:.2f} ' +\n f'Quantity: {quantity}\\n')", "def on_order(self, order: OrderData):\n pass\n\n # self.write_log(f\"on_order: status:{order.status}, orderid: {order.vt_orderid}, offset:{order.offset}, price:{order.price}, volume:{order.volume}, traded: {order.traded}\")\n # self.put_event()", "def add_item(self, order_item):\n self.order_items.append(order_item)", "def update_orders(self, processed_orders_export=None):\n existing_order_ids = set(Order.objects.values_list(\"order_id\", flat=True))\n processed_orders_export = processed_orders_export or ProcessedOrdersExport()\n processed_orders = processed_orders_export.orders\n for order_id, order_rows in processed_orders.items():\n if order_id in existing_order_ids:\n continue\n row = order_rows[0]\n order = self.create_order(order_id, row)\n order.save()\n order_skus = []\n for product_row in order_rows:\n if product_row[ProcessedOrdersExport.COMPOSITE_PARENT_SKU]:\n continue\n if product_row[ProcessedOrdersExport.SKU] in order_skus:\n product_sale = ProductSale.objects.get(\n sku=product_row[ProcessedOrdersExport.SKU],\n order__order_id=order_id,\n )\n product_sale.quantity += int(\n product_row[ProcessedOrdersExport.QUANTITY]\n )\n product_sale.save()\n else:\n product_sale = self.create_product_sale(order, product_row)\n product_sale.save()\n order_skus.append(product_sale.sku)\n try:\n order._set_calculated_shipping_price()\n except Exception:\n pass", "def test_order_product(self):\n self.client.force_authenticate(self.user)\n resp = self.client.post(ORDER_URL, data={\n \"product\": self.product.id,\n \"count\": 1,\n \"option_value\": self.option_value.id\n })\n self.assertEqual(resp.status_code, status.HTTP_201_CREATED)", "def get_order(self):\n #store the orders for the current cycle inside the class\n self.orders = self.firebase.get_data(\"orders\")", "def order_book(self, order_details):\n order_date = datetime.date.today()\n self.cursor.execute(\"INSERT INTO orderlog (loginID, orderDate) VALUES (%s, %s)\",\n (order_details['loginID'], order_date))\n order_id = self.cursor.lastrowid\n for i in range(len(order_details['ISBN'])):\n self.cursor.execute(\"INSERT INTO productof Values (%s, %s, %s)\",\n (order_details['ISBN'][i], order_id, order_details['quantity'][i]))\n self.cursor.execute(\"UPDATE book SET stock=stock-%s WHERE ISBN=%s\",\n (order_details['quantity'][i], order_details['ISBN'][i]))\n self.db.commit()\n return order_id", "def process_new_order(self, order_event):\n self._check_day_data(order_event.order_time)\n self.place_order(order_event)", "def test_product_search(self):\n\n flag = \"user\"\n api = \"product.product.add\"\n current_page = 1\n search_info = json.dumps({\n 'name': \"可爱的小蓝牙呀\"\n })\n print('start------------------------>add')\n result = self.access_api(flag = flag, api = api, current_page = current_page, product_info = search_info)", "def add_orders(self, response_data):\n orders = response_data[self.DATA][self.DATA]\n for order in orders:\n self.orders.append(self.process_order_data(order))", "def on_order(self, order: OrderData):\n\n if order.vt_orderid not in (self.short_orders + self.long_orders):\n return\n\n self.pos_calculator.update_position(order)\n\n self.current_pos = self.pos_calculator.pos\n self.avg_price = self.pos_calculator.avg_price\n\n if order.status == Status.ALLTRADED:\n\n if order.vt_orderid in self.long_orders:\n self.long_orders.remove(order.vt_orderid)\n self.trade_count += 1\n\n short_price = order.price + self.step_price\n if short_price <= self.high_price:\n orders = self.short(short_price, self.order_volume)\n self.short_orders.extend(orders)\n\n if len(self.long_orders) < self.max_open_orders:\n long_price = order.price - self.step_price * self.max_open_orders\n if long_price >= self.low_price:\n orders = self.buy(long_price, self.order_volume)\n self.long_orders.extend(orders)\n\n if order.vt_orderid in self.short_orders:\n self.short_orders.remove(order.vt_orderid)\n self.trade_count += 1\n long_price = order.price - self.step_price\n if long_price >= self.low_price:\n orders = self.buy(long_price, self.order_volume)\n self.long_orders.extend(orders)\n\n if len(self.short_orders) < self.max_open_orders:\n short_price = order.price + self.step_price * self.max_open_orders\n if short_price <= self.high_price:\n orders = self.short(short_price, self.order_volume)\n self.short_orders.extend(orders)\n\n if not order.is_active():\n if order.vt_orderid in self.long_orders:\n self.long_orders.remove(order.vt_orderid)\n\n elif order.vt_orderid in self.short_orders:\n self.short_orders.remove(order.vt_orderid)\n\n self.put_event()", "def search_orders(product_id='', customer_id='', quantity='', pay='', send='', location=''):\n with MY_CONNECTION as connection:\n cursor = connection.cursor()\n cursor.execute(\n \"\"\"\n SELECT id_order, id_customer, id_product, quantity, total_price, payment_status,\n send_status, order_date, location\n FROM Orders\n WHERE id_customer=? OR id_product=? OR quantity=? OR payment_status=? OR\n send_status=? OR location=?\n \"\"\",\n (product_id, customer_id, quantity, pay, send, location))\n return cursor.fetchall()", "def test_purchase_products(self, driver):\n logging.info(\"Start test case: checkout product successfully\")\n products = self.test_data[\"Purchase Products\"][\"Products\"]\n address = self.test_data[\"Purchase Products\"][\"Address\"]\n payment_info = self.test_data[\"Purchase Products\"][\"Payment Info\"]\n logging.info(f\"Test Data: {self.test_data['Purchase Products']}\")\n\n select_product(driver, products[0][\"Page\"], products[0][\"Product Name\"])\n add_product_to_cart(driver, products[0][\"Size\"], products[0][\"Color\"], products[0][\"Quantity\"])\n checkout_from_order_summary(driver)\n set_address(driver, address[\"Billing Address\"], address[\"Country\"], address[\"City\"], address[\"Zip\"])\n checkout_order_to_pay(driver, payment_info[\"Payment Type\"])\n pay_order(driver, payment_info[\"Card ID\"], payment_info[\"Expired Date\"], payment_info[\"CVC\"])\n verify_message(driver, \"Order was successful\")", "def __init__(self, product):\n\n self.codes = list(\n Products.objects.all().values_list(\n 'code',\n flat=True\n )\n )\n\n self.product = product\n self._check_product()\n if self.importable:\n self.product_object = self.import_in_db()\n self.categories = self.create_categories()\n self.brands = self.import_brands()\n self.stores = self.import_stores()", "def default_get(self, cr, uid, fields, context=None):\n if context is None:\n context = {}\n\n exchang_obj = self.pool.get('exchange.order')\n res ={}\n exchang_ids = context.get('active_ids', [])\n if not exchang_ids:\n return res\n\n result = []\n for req in exchang_obj.browse(cr, uid, exchang_ids, context=context):\n for product in req.order_line:\n result.append(self.__create_products(product))\n res.update({'products_ids': result})\n if 'current_date' in fields:\n res.update({'current_date': time.strftime('%Y-%m-%d %H:%M:%S')})\n return res", "def create_product_sale(self, order, product_row):\n cols = ProcessedOrdersExport\n sku = product_row[cols.SKU]\n product = BaseProduct.objects.get(sku=sku)\n product_sale = ProductSale(\n order=order,\n sku=product_row[cols.SKU],\n name=product_row[cols.ITEM_TITLE],\n weight=product.weight_grams,\n quantity=product_row[cols.QUANTITY],\n supplier=product.supplier,\n purchase_price=self.convert_integer_price(product.purchase_price),\n tax=self.convert_integer_price(product_row[cols.LINE_TAX]),\n unit_price=self.convert_integer_price(product_row[cols.UNIT_COST]),\n item_price=self.convert_integer_price(product_row[cols.LINE_TOTAL]),\n item_total_before_tax=self.convert_integer_price(\n product_row[cols.LINE_TOTAL_EXCLUDING_TAX]\n ),\n )\n return product_sale", "def on_order(self, order: OrderData):\n # self.on_event(EVENT_ORDER, order)\n # self.on_event(EVENT_ORDER + order.vt_orderid, order)\n pass", "def execute_order(self, event):\n if event.type == 'ORDER':\n fill_event = FillEvent(datetime.datetime.utcnow(), event.symbol,\n 'ARCA', event.quantity, event.direction, None)\n self.events.put(fill_event)", "def add(self, product, product_qty):\n product_id = str(product.id)\n if product_id in self.cart:\n self.cart[product_id][\"qty\"] = product_qty\n else:\n self.cart[product_id] = {'price': str(product.price), 'qty':int(product_qty)}\n self.save()", "def place_order(self, order: Order) -> None:\n\n if order.id in [order.id for order in self.orders]:\n raise OrderAlreadyCreatedError(order)\n\n if not order.symbol.is_enabled:\n raise SymbolIsNotEnabledError(order.symbol)\n\n t = Thread(target=self.__place_order, args=(order,))\n t.start()\n\n self.__sort_orders_by_price()", "def _enqueue_order(self, customer_order):\n def enqueue_node(head, node):\n tail = head\n while tail.next is not None:\n tail = tail.next\n tail.next = node\n node.prev = tail\n node.next = None\n assert customer_order.order_id not in self.order_lookup, 'one order can only be added once'\n for origin_id in customer_order.fulfillment_origin_ids:\n self.origin_due_quantity_counter[origin_id] += customer_order.origin_average_quantity()\n self.order_lookup[customer_order.order_id] = []\n self.queued_orders[customer_order.order_id] = customer_order\n for origin_id in customer_order.fulfillment_origin_ids:\n node = FulfillmentQueueNode(origin_id, customer_order.order_id)\n if self.origin_queue_lookup[origin_id] is None:\n self.origin_queue_lookup[origin_id] = node\n else:\n enqueue_node(self.origin_queue_lookup[origin_id], node)\n self.order_lookup[customer_order.order_id].append(node)", "def order(self, order_id, symbol, **kwargs):\n pass", "def order_products(self, obj):\n table = \"\"\"<table id=\"result_list\">\n <thead>\n <tr>\n <th scope=\"col\">\n <div class=\"text\"><span>Product ID</span></div>\n <div class=\"clear\"></div>\n </th>\n <th scope=\"col\">\n <div class=\"text\"><span>Product Name</span></div>\n <div class=\"clear\"></div>\n </th>\n <th scope=\"col\">\n <div class=\"text\"><span>Quantity</span></div>\n <div class=\"clear\"></div>\n </th>\n <th scope=\"col\">\n <div class=\"text\"><span>Price</span></div>\n <div class=\"clear\"></div>\n </th>\n </tr>\n </thead>\n <tbody>\"\"\"\n for order_item in obj.order_items.all():\n table += f\"\"\"<tr>\n <td class=\"field-id\">{order_item.product.id}</td>\n <td class=\"field-name\">{order_item.product.name}</td>\n <td class=\"field-quantity\">{order_item.quantity}</td>\n <td class=\"field-price\">{order_item.price}</td>\n </tr>\"\"\"\n table += \"</tbody></table>\"\n return format_html(table)", "def add_product(self, name, energy_points):\n now = datetime.datetime.now()\n date = \"{}-{}-{}\".format(now.year, now.month, now.day)\n Product(productName=name, energyPoints=energy_points, date=date)", "def order(request):\n if request.method == 'GET':\n try:\n order = Order.objects.filter()\n serializer = OrderSerializer(order, many=True)\n except Order.DoesNotExist:\n message = 'An order does not exist in this ID({})!'.format(order)\n data = {'error': message}\n return Response(data, status=status.HTTP_403_FORBIDDEN)\n\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n if request.method == 'POST':\n user = request.data['user']\n products = request.data['products']\n\n try:\n user_qry = User.objects.get(username=user)\n except User.DoesNotExist:\n message = 'An user does not exist in this name({})!'.format(user)\n data = {'error': message}\n return Response(data, status=status.HTTP_403_FORBIDDEN)\n\n try:\n total_amount = 0\n for prd in products:\n prd_qry = Product.objects.get(id=prd)\n total_amount += prd_qry.price\n except Product.DoesNotExist:\n message = 'An product does not exist in this ID({})!'.format(prd)\n data = {'error': message}\n return Response(data, status=status.HTTP_403_FORBIDDEN)\n\n ordr = Order()\n ordr.user = user_qry\n ordr.total_amount = total_amount\n ordr.save()\n ordr.product.set(products)\n\n data = {'Success': 'Success'}\n return Response(data, status=status.HTTP_201_CREATED)", "def add(self, product, qty):\n product_id = str(product.id)\n\n if product_id in self.basket:\n self.basket[product_id]['qty'] = qty\n else:\n self.basket[product_id] = {'price': str(product.price), 'qty': qty}\n\n self.save()", "def get_customer_order(self, order_id):\n resp = self._request_json(\"/api/orders/fetch-detail\",\n # Not sure what this getPayments does\n params={\"order_id\": order_id,\n \"getPayments\": \"true\"})\n order = _fix_order_fields(resp[\"order\"])\n products_infos = {}\n for product_info in order.pop(\"producerproducts\"):\n product_info = _strip_mongodb_id(product_info)\n pid = product_info[\"id\"]\n del product_info[\"id\"]\n products_infos[pid] = product_info\n\n for product in order[\"products\"]:\n product = _strip_mongodb_id(product)\n product.update(products_infos[product[\"id\"]])\n\n return order", "def add_order(customer_id, product_id, quantity, location, payment_status=0, send_status=0):\n in_stock = return_product(product_id)[3]\n if in_stock - float(quantity) < 0:\n return False\n\n with MY_CONNECTION as connection:\n # decreasing number of products in stock\n connection.execute(\"UPDATE Products SET in_stock=? WHERE id_product=?\",\n (in_stock - float(quantity), product_id))\n\n # adding new Order\n total_price = float(return_product(product_id)[2]) * float(quantity)\n connection.execute(\n \"\"\"\n INSERT INTO Orders\n (id_customer, id_product, quantity, total_price, payment_status, send_status, location)\n VALUES(?,?,?,?,?,?,?)\n \"\"\",\n (customer_id, product_id, quantity, total_price, payment_status, send_status, location))\n return True", "def createOrders(self):\n self.ordersDict = {}\n for pstep in self.processingSteps:\n if pstep.orderid not in self.ordersDict:\n self.ordersDict[pstep.orderid] = Order()\n self.ordersDict[pstep.orderid].addProcessingStep(pstep)", "def place(self, order_params):\n\n # Prevent multiple invocations with the same OID.\n if self.oid() is not None:\n return self.oid()\n\n # Common params across all orders\n # https://docs.gdax.com/?python#orders\n data = {\n 'side': self.__side,\n 'type': self.__order_type,\n 'product_id': self.__product,\n }\n data.update(order_params)\n\n log.info('placing ORDER')\n self.__resp = httpapi.post(\n common.api_url + 'orders',\n data=json.dumps(data),\n auth=common.auth,\n )\n\n return self.oid(), self.__resp", "def list(self, request):\n order_products = Order_Products.objects.all()\n\n order = self.request.query_params.get('order', None)\n product = self.request.query_params.get('product', None)\n payment = self.request.query_params.get('payment', None)\n\n if product is not None:\n orderproducts = orderproducts.filter(product__id=product)\n if order is not None:\n orderproducts = orderproducts.filter(order_payment=None)\n\n\n serializer = Order_Products_Serializer(\n order_products, many=True, context={'request': request}\n )\n return Response(serializer.data)", "async def on_orders_replaced(self, orders: List[MetatraderOrder]):\n self._orders = orders", "def add_product(block, product):\n block.append(Product(product['Номер'], product['Продукт'],\n product['ккал'].replace(',', '.'),\n product['білок'].replace(',', '.'),\n product['жири'].replace(',', '.'),\n product['вуглеводи'].replace(',', '.'),\n product['холестерин'].replace(',', '.'),\n product['1 ХO є в граммах продукта (Старыи розрах)'].replace(',', '.'),\n product['1 ХO є в граммах продукта (новий розрах)'].replace(',', '.')))", "def _create_customer_package(order):\n \n next_date = datetime.datetime.now()\n \n # RUN THE ITEMS THROUGH THE OFFERS FILTER, JUST TO CHECK\n items = _apply_deals(order.items.filter(monthly_order=False))\n print \"woohoo\"\n\n \n # GO THROUGH EACH SINGLE ITEM, GET/CREATE A WAREHOUSE ITEM, AND ADD IT TO A PACKAGE\n for x in items[0]:\n loop = x.quantity\n while loop >= 1:\n \n try:\n # GET A WAREHOUSE ITEM THAT MATCHES THE CURRENCY PARENT_PRODUCT, WEIGHT AND CURRENCY\n wh_item = WarehouseItem.objects.filter(\n unique_product__parent_product=x.item.parent_product, \n unique_product__weight=x.item.weight,\n unique_product__currency__code='GBP',\n sold__isnull=True,\n )[0]\n \n wh_item.sold = datetime.datetime.now()\n wh_item.reason = WarehouseItem.SOLD\n preorder = False \n \n except: \n # IF THERE'S NONE IN STOCK, CREATE A NEW ITEM AND MARK THE PACKAGE AS A PREORDER \n up = UniqueProduct.objects.filter( \n currency__code='GBP', \n parent_product=x.item.parent_product,\n weight=x.item.weight,\n is_active=True, \n )[0]\n \n wh_item = WarehouseItem.objects.create(\n unique_product=up,\n hashkey=uuid.uuid1().hex,\n created=datetime.datetime.now(),\n batch='TEMP',\n )\n preorder = True\n \n try:\n package = CustomerPackage.objects.get(\n order=order, \n is_preorder=preorder,\n )\n except:\n package = CustomerPackage.objects.create(\n order=order,\n is_preorder=preorder\n )\n\n wh_item.package = package \n \n # UPDATE THE FINAL FIGURES FOR POSTERITY\n wh_item.sale_currency = x.item.currency\n wh_item.list_price = x.item.price\n wh_item.sale_price = x.item.get_price()\n wh_item.save()\n \n loop -= 1\n\n\n # APPLY THE DISCOUNT/POSTAGE COSTS TO ONLY 1 PACKAGE\n try:\n package = CustomerPackage.objects.filter(order=order)[0]\n package.discount_amount = order.get_discount()\n \n amount = 0\n for x in order.items.filter(monthly_order=False):\n amount += x.item.price\n \n if amount > order.get_currency().postage_discount_threshold:\n postage_amount = 0\n else:\n postage_amount = order.get_currency().postage_cost\n \n package.postage_paid = postage_amount\n package.save()\n except:\n pass\n\n\n # NOTE: WE AREN'T SELLING MONTHLY ITEMS NOW!\n # NOW DEAL WITH THE MONTHLY ITEMS\n for x in order.items.filter(monthly_order=True):\n \n months = x.months \n while months >= 1:\n \n # THIS WILL CREATE THE FIRST PACKAGE TO BE SENT (ie. THE FIRST MONTH)\n if months == x.months:\n \n \n # CREATE A MONTHLY PACKAGE IF ONE DOESN\"T ALREADY EXIST\n if not monthly_package:\n monthly_package = CustomerPackage.objects.create(\n order=order,\n created=datetime.datetime.now(),\n )\n \n \n # TAKE THESE ITEMS OUT OF CURRENT AVAILABLE STOCK\n quantity = x.quantity\n while quantity >= 1:\n try:\n wh_item = WarehouseItem.objects.filter(\n unique_product__parent_product=x.item.parent_product, \n unique_product__weight=x.item.weight,\n unique_product__currency__code='GBP',\n sold__isnull=True,\n )[0]\n except:\n wh_item = WarehouseItem.objects.create(\n unique_product=x.item,\n hashkey=uuid.uuid1().hex,\n created=datetime.datetime.now(),\n batch='TEMP',\n )\n \n wh_item.sold = datetime.datetime.now()\n wh_item.reason = WarehouseItem.SOLD\n wh_item.package = package\n wh_item.save() \n \n # FOR ALL OTHER MONTHS\n else:\n monthly_package = CustomerPackage.objects.create(\n order=order,\n created=datetime.datetime.now(),\n shipping_due_date=next_date\n )\n \n # ADD ITEMS AS PREORDER ITEMS, NOT FROM CURRENT STOCK\n quantity = x.quantity\n while quantity >= 1:\n wh_item = WarehouseItem.objects.create(\n unique_product=x.item,\n hashkey=uuid.uuid1().hex,\n created=datetime.datetime.now(),\n batch='TEMP',\n ) \n \n wh_item.sold = datetime.datetime.now()\n wh_item.reason = WarehouseItem.SOLD\n wh_item.package = monthly_package\n wh_item.save()\n \n quantity -= 1\n \n next_date = add_months(next_date, 1)\n months -= 1\n\n return", "def printOrders(self, event):\n \n pass", "def place_order(env, inventory_stock):\n yield env.timeout(LEAD_TIME)\n #amount = inventory_stock.capacity - inventory_stock.level\n amount = EOQ\n print('Inventory refilled by {1} products at {0} '.format(env.now, amount))\n print('Inventory Level = {}'.format(inventory_stock.capacity))\n order_arrival_time.append(env.now)\n order_amount.append(amount)\n yield inventory_stock.put(amount)", "def received_order(self, order):\n\t\tif order.direction == ORDERDIR.IN:\n\t\t\tself.set_button_light(order.floor, OUTPUT.IN_LIGHTS, 1)\n\t\telse:\n\t\t\tself.startedOrderQueue.put(order)\n\t\tself.orderQueue.add_order(order)\n\t\tself.update_and_send_elevator_info()\n\t\tself.should_drive()", "def add_product(body): # noqa: E501\n if connexion.request.is_json:\n body = Product.from_dict(connexion.request.get_json()) # noqa: E501\n return 'do some magic!'", "def add_to_basket(self, item):\n self._products.append(item)", "def action_consume_custom(self, cr, uid, ids, product_qty, location_id=False, restrict_lot_id=False, restrict_partner_id=False,\r\n consumed_for=False, context=None):\r\n if context is None:\r\n context = {}\r\n res = []\r\n production_obj = self.pool.get('mrp.production.custom')\r\n\r\n if product_qty <= 0:\r\n raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))\r\n #because of the action_confirm that can create extra moves in case of phantom bom, we need to make 2 loops\r\n ids2 = []\r\n for move in self.browse(cr, uid, ids, context=context):\r\n if move.state == 'draft':\r\n ids2.extend(self.action_confirm(cr, uid, [move.id], context=context))\r\n else:\r\n ids2.append(move.id)\r\n\r\n prod_orders = set()\r\n for move in self.browse(cr, uid, ids2, context=context):\r\n prod_orders.add(move.custom_production_id.id)\r\n print\"Total Qty>>>\",product_qty\r\n move_qty = product_qty\r\n if move_qty <= 0.00:\r\n raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))\r\n \r\n quantity_rest = move_qty - product_qty\r\n print\"Rest Qty>>>\",quantity_rest\r\n # Compare with numbers of move uom as we want to avoid a split with 0 qty\r\n quantity_rest_uom = move.product_uom_qty - self.pool.get(\"product.uom\")._compute_qty_obj(cr, uid, move.product_id.uom_id, product_qty, move.product_uom)\r\n if float_compare(quantity_rest_uom, 0, precision_rounding=move.product_uom.rounding) != 0:\r\n new_mov = self.split(cr, uid, move, quantity_rest, context=context)\r\n print\"New Move>>>\",new_mov\r\n res.append(new_mov)\r\n vals = {'restrict_lot_id': restrict_lot_id,\r\n 'restrict_partner_id': restrict_partner_id,\r\n 'consumed_for': consumed_for}\r\n if location_id:\r\n vals.update({'location_id': location_id})\r\n self.write(cr, uid, [move.id], vals, context=context)\r\n # Original moves will be the quantities consumed, so they need to be done\r\n self.action_done(cr, uid, ids2, context=context)\r\n if res:\r\n self.action_assign(cr, uid, res, context=context)\r\n if prod_orders:\r\n \r\n production_obj.action_in_production(cr, uid, list(prod_orders), context=None)\r\n #production_obj.signal_workflow(cr, uid, list(prod_orders), 'button_produce')\r\n return res", "def m_ts_OrderAdded(self, sender, e):\r\n print(\"Order was added with price of {0}.\".format(e.Order.LimitPrice))", "def __orderAddItem(self, order, item):\n cursor = self.__db.cursor()\n iID = self.__id.getID(\"orderitem\")\n cursor.execute(\"INSERT INTO `orderItems` (`insertionID`, `orderID`, `itemID`) VALUES (%s, %s, %s);\",\n (iID, order, item))\n return iID", "def add_order(self, orders):\n if isinstance(orders, list):\n for order in orders:\n self._add_order(order)\n else:\n self._add_order(orders)", "def get_all_products(self):\n\t\tpass", "def order(self, stock, amount):\n self.orders[stock] = amount", "def create_pol(self, order, product):\n order.write({\n 'order_line': [(0, 0, {\n 'product_id': product.id,\n 'product_qty': 10.0,\n 'product_uom': product.uom_id.id,\n 'price_unit': product.price,\n 'name': product.name_template,\n 'sequence': len(order.order_line) + 1,\n 'date_planned': time.strftime('%Y-%m-%d')\n })]})", "def order(self, order):\n\n self._order = order", "def order(self, order):\n\n self._order = order", "def order(self, order):\n\n self._order = order", "def request_orders(self):\r\n if self.use_http():\r\n self.enqueue_http_request(\"money/orders\", {}, \"orders\")\r\n else:\r\n self.send_signed_call(\"private/orders\", {}, \"orders\")", "def addProduct(self, *args):\n return _libsbml.Reaction_addProduct(self, *args)", "def agregarProducto(self):\n itemActual=self.tableFactura.currentItem()\n producto = int(self.tableFactura.item(itemActual.row(),0).text())\n descuento = DescuentoModel.buscar(DescuentoModel.obra_social,self.sesion,self.obraSocial).\\\n filter(DescuentoModel.producto==producto)[0].descuento\n cantidad = int(self.tableFactura.item(itemActual.row(), 1).text())\n importe = float(self.tableFactura.item(itemActual.row(), 2).text()) * descuento\n row = self.tableNC.rowCount()\n self.tableNC.insertRow(row)\n self.tableNC.setItem(row, 0, QtGui.QTableWidgetItem(str(producto)))\n self.tableNC.setItem(row, 1, QtGui.QTableWidgetItem(str(cantidad)))\n self.tableNC.setItem(row, 2, QtGui.QTableWidgetItem(str(importe)))\n self.detallesReintegrables.append([int(self.numeroFacturaActual),itemActual.row()+1,descuento,importe])\n self.detallesImprimibles.append([producto,cantidad,descuento,importe])\n self.tableFactura.hideRow(itemActual.row())" ]
[ "0.6195393", "0.6153183", "0.6137681", "0.60344553", "0.60218424", "0.5938747", "0.582828", "0.57936937", "0.576499", "0.5742286", "0.57390934", "0.57390934", "0.56984586", "0.56835234", "0.5683081", "0.567723", "0.5611014", "0.55724466", "0.5555511", "0.5555511", "0.5548241", "0.55362535", "0.5534266", "0.55217916", "0.55217916", "0.55217916", "0.5518086", "0.5509723", "0.54920816", "0.5484648", "0.54751664", "0.54667145", "0.54452014", "0.5444309", "0.5444309", "0.54441935", "0.5429542", "0.54150826", "0.5414802", "0.54099995", "0.5406088", "0.5388", "0.5383833", "0.5373937", "0.5367791", "0.53648233", "0.5359846", "0.53597796", "0.5336883", "0.53139853", "0.5311802", "0.530321", "0.5264735", "0.52436554", "0.5241447", "0.5239376", "0.5227661", "0.52232915", "0.52202886", "0.5214082", "0.52115947", "0.5200303", "0.5191101", "0.5189296", "0.518615", "0.5185113", "0.5170689", "0.51685697", "0.5160228", "0.51538485", "0.5150698", "0.5146215", "0.5145052", "0.5139832", "0.5136747", "0.51315606", "0.51241034", "0.5112854", "0.51043487", "0.50937575", "0.50902563", "0.50889164", "0.50755805", "0.5074265", "0.50653833", "0.5056853", "0.50530326", "0.5046726", "0.5041014", "0.5038784", "0.50289", "0.50286907", "0.5022331", "0.501779", "0.5013041", "0.5013041", "0.5013041", "0.5009207", "0.5006834", "0.5006697" ]
0.7857374
0
Attribute which calculates the total amount on the order after deducting discounts.
def total(self): total_price = self.get_total_amount() discounts = self.get_total_discount() return total_price - discounts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def discount_amount(self):\r\n customer = self.records.find_customers(str(self.__customer).strip())\r\n order_value = self.order_value\r\n discount = customer.get_discount(order_value)\r\n return discount", "def total_amount(self):\n full_price = sum(item.price for item in self._products) if self._products else 0.0\n return full_price - self._get_discount()", "def basket_total_before_discounts_excl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_excl_tax\"))\n return result[\"total\"]", "def total_discount_incl_tax(self):\n discount = D(\"0.00\")\n for line in self.lines.all():\n discount += line.discount_incl_tax\n return discount", "def calculate_total(self):\n if self.total_price == 0:\n for discount in self.discounts:\n for item in self.items:\n item.add_discount(discount)\n\n for item in self.items:\n self.total_price += item.final_price()\n\n return self.total_price", "def discount_amount(self):\n return self._discount_amount", "def amount(self):\n return self.subtotal + self.tax_subtotal + self.shipping", "def get_debt(self):\n sum_import = self.invoice_set.filter(\n expiration_date__lte=date.today(),\n paid=False,\n debited=False,\n canceled=False,\n uncollectible=False,\n ).aggregate(Sum(\"amount\"))\n return sum_import.get(\"amount__sum\", None)", "def total_donated(self):\n if not hasattr(self, 'dynamic_total'):\n agg = self.donations.aggregate(Sum('amount'))\n self.dynamic_total = agg['amount__sum']\n return self.current + (self.dynamic_total or 0)", "def get_total_cost(self):\n total_cost = sum([item.quantity * item.product.price for item in self.orderitem_set.all()])\n return total_cost - total_cost * (self.discount / Decimal('100'))", "def update_on_delete(sender, instance, **kwargs):\n instance.order.update_grand_total()", "def basket_total_before_discounts_incl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_incl_tax\"))\n return result[\"total\"]", "def amount_due(self):\n queryset = self.supplyorderitem_set.filter(delivery_date__isnull=False).aggregate(\n amount_due=Sum(F('unit_price')*F('quantity_ordered'))\n )\n return queryset['amount_due'] or 0", "def _total_d(self):\n debit = 0.0\n for l in self.data:\n debit += l['debit']\n self.t_credit += l['credit']\n self.t_balance += l['balance']\n return debit", "def _compute_amount(self):\n for line in self:\n price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)\n taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': taxes['total_excluded'],\n })\n if(line.is_discount_allow and line.price_subtotal > 100):\n line.price_subtotal = line.price_subtotal - 100", "def get_total_discount(self):\n total_discount = 0.00\n\n for promotion in self.pricing_rules:\n discount = promotion.get_discount(self.order)\n total_discount += discount\n\n return total_discount", "def bulk_item(order: Order) -> Decimal:\n discount = Decimal(0)\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * Decimal('0.1')\n return discount", "def amount(self):\n return(self.order_master.amount)", "def action_update_total(self):\n for order in self:\n amount_untaxed = 0.0\n for line in order.order_line_ids:\n amount_untaxed += line.price_subtotal\n order.price_subtotal = amount_untaxed", "def discount(self, cart):", "def delete_on_save(sender, instance, **kwargs):\n instance.order.update_total()", "def discount_tax_compensation_amount(self):\n return self._discount_tax_compensation_amount", "def basket_total_excl_tax(self):\n return self.total_excl_tax - self.shipping_excl_tax - self.surcharge_excl_tax", "def total_donation(self):\n return self._total_donation", "def _amount_all(self):\n for order in self:\n amount_untaxed = amount_tax = 0.0\n order_amount_total = 0.0\n for line in order.order_line:\n amount_untaxed += line.price_subtotal\n amount_tax += line.price_tax\n self_amount_total = amount_untaxed + amount_tax\n if not order.discount_fixed_percent:\n order_amount_total = self_amount_total\n if order.discount_fixed_percent == 'Percent':\n order_amount_total = self_amount_total * (1 - (order.discount or 0.0) / 100.0)\n if order.discount_fixed_percent == 'Fixed':\n order_amount_total = self_amount_total - order.discount_value\n order.update({\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\n 'amount_before_disc': amount_untaxed + amount_tax,\n 'amount_total': order_amount_total,\n })", "def _compute_amount_qty_delivered(self):\n for line in self:\n # if line.product_id.invoice_policy == 'delivery':\n # qty = line.qty_delivered\n # else:\n # qty = line.product_uom_qty\n # line.price_total_without_discount = qty * line.price_unit\n # line.price_discount = (line.price_total_without_discount * line.discount) / 100\n line.update({\n # 'price_discount': line.price_discount,\n # 'price_total_without_discount': line.price_total_without_discount,\n 'sea_price_total_qty_delivered': line.untaxed_amount_to_invoice + line.untaxed_amount_invoiced,\n })", "def get_quote_discount(self):\n return self.quoteitem_set.all().annotate(\n total_quote_price=F('price') * F('quantity')).annotate(\n calculate_discount=(F('total_quote_price') * F('discount') / 100)).aggregate(\n Sum('calculate_discount'))['calculate_discount__sum']", "def _amount_all(self):\n for order in self:\n amount_untaxed = 0.0\n for line in order.order_items_ids:\n amount_untaxed += line.price_subtotal\n order.update({\n 'amount_untaxed': amount_untaxed,\n })", "def discharge(self):\n return self._discharge", "def _compute_amount(self):\n for line in self:\n price = line.price_unit\n taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty,\n product=line.product_id, partner=line.order_id.partner_shipping_id)\n self_price_subtotal = taxes['total_excluded']\n if not line.discount_fixed_percent:\n self_price_subtotal = self_price_subtotal\n if line.discount_fixed_percent == 'Percent':\n self_price_subtotal = self_price_subtotal * (1 - (line.discount or 0.0) / 100.0)\n if line.discount_fixed_percent == 'Fixed':\n self_price_subtotal = self_price_subtotal - line.discount_value\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': self_price_subtotal,\n })", "def total_purchase(self):\n\n total_amount = 0\n #grab all the item\n items = self.item_set.all()\n for item in items:\n total_amount += item.price\n return total_amount", "def base_discount_amount(self):\n return self._base_discount_amount", "def get_total(self) -> float:\n if self.__open:\n raise RuntimeError(\"Cash drawer must be closed to count.\")\n total: float = 0.0\n for denom in CashDenomination:\n total += self.__contents[denom] * denom.amount\n return total", "def get_discount(self, price):\r\n pass", "def _calculate_total_order_price(self, actual_order_price: int):\n actual_order_price = actual_order_price if actual_order_price else 0\n total_additional_charges = self.total_additional_charges\n self.total_order_price = actual_order_price + total_additional_charges", "def get_total_amount(self):\n total_price = 0.00\n\n for k, v in self.order.product_orders.items():\n total_price += v.quantity * v.product.price\n\n return total_price", "def bulk_item(order):\n discount = 0\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * .1\n return discount", "def _amount_all(self):\r\n for order in self:\r\n amount_untaxed = amount_tax = amount_discount = timbre = 0.0\r\n for line in order.order_line:\r\n amount_untaxed += line.price_subtotal\r\n if line.product_id.timbre_fiscal:\r\n amount_tax += line.price_tax - 0.60\r\n timbre = 0.60\r\n else :\r\n amount_tax += line.price_tax\r\n amount_discount += (line.product_uom_qty * line.price_unit * line.discount)/100\r\n order.update({\r\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\r\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\r\n 'amount_discount': order.pricelist_id.currency_id.round(amount_discount),\r\n 'price_total_no_discount': amount_untaxed + amount_discount,\r\n 'timbre': timbre,\r\n 'amount_total': amount_untaxed + amount_tax + timbre,\r\n })", "def discounted(self):\n return self._discounted", "def amount(self):\n return self.__amount", "def amount(self):\n return self.__amount", "def amount(self) -> float:\n return self._amount", "def amount(self) -> float:\n return self._amount", "def total_price(self) -> Decimal:\n return self.unit_price * self.quantity", "def getTotDonation(self):\n return sum(self.donationList)", "def total_cost(self):\r\n return sum(i.line_cost for i in self.orderitem_set.filter(status=self.status)) # pylint: disable=E1101\r", "def get_total_paid(self):\n return sum(self.paid)", "def total_clearance(self):\n total_clearances = 0\n debit = 0 #variable to track the remaining debit\n clearances = self.clearance_set.all() #grab all the previous clerances\n for clearance in clearances:\n total_clearances += clearance.paid_value\n return total_clearances", "def cost(self):\n assert(self._calculated)\n settings = config_get_group('shipping.modules.ups')\n if settings.HANDLING_FEE and Decimal(str(settings.HANDLING_FEE)) > Decimal(0):\n self.charges = Decimal(self.charges) + Decimal(str(settings.HANDLING_FEE))\n\n return(Decimal(self.charges))", "def update_on_save(sender, instance, created, **kwargs):\n instance.order.update_grand_total()", "def update_total(self):\n # the 'or 0' sets order_total as 0 instead of None,\n # preventing an error when calculating delivery_costs\n self.order_total = self.lineitems.aggregate(\n Sum('lineitem_total'))['lineitem_total__sum'] or 0\n if self.order_total < settings.FREE_DELIVERY_THRESHOLD:\n sdp = settings.STANDARD_DELIVERY_PERCENTAGE\n self.delivery_cost = self.order_total * sdp / 100\n else:\n self.delivery_cost = 0\n self.grand_total = self.order_total + self.delivery_cost\n self.save()", "def get_total(self):\n # method on the class DomesticMelonOrder\n base_price = 5\n\n if self.species == \"Christmas melons\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def calculate(self, order):\n pass", "def debit(self):\n debit = 0 #variable to track the remaining debit\n debit = self.total_purchase() - self.total_clearance()\n return debit", "def dec_total(self, dif):\n if not (is_number_correct(dif)):\n raise ValueError(\"Incorrect total value!\")\n self.total -= int(dif)\n self.budget_holder[datetime.datetime.now()] = self.total", "def get_order_total(self):\n order_total = 0\n for item in self.cart_items:\n order_total += item['price']\n return order_total", "def total_price(self):\n return self.owner.total_price()", "def bogof_discount(self):\n bogof_discount = 0\n for item in self.cart.items:\n if item.quantity > 1:\n bogof_discount += (math.floor(item.quantity / 2) * item.product.price)\n\n self.cart._total -= bogof_discount", "def get_total(self):\n total = 0.00\n\n for _drink in self.drinks:\n total = total + _drink.get_price()\n\n for _food in self.food:\n total = total + _food.get_price()\n\n return total", "def total_damage_dealt(self, total_damage_dealt):\n\n self._total_damage_dealt = total_damage_dealt", "def loyalty_discount(self):\n if self.cart.user.is_loyal:\n self.cart._total *= 0.98", "def total_paid(self) -> Decimal:\n return self.total_principal + self.total_interest", "def _compute_amount(self):\n for line in self:\n line.update({\n 'price_subtotal': line.price_unit * line.quantity,\n })", "def _amount_line(self, cr, uid, ids, prop, unknow_none, unknow_dict):\n res = {}\n tax_obj = self.pool.get('account.tax')\n cur_obj = self.pool.get('res.currency')\n for line in self.browse(cr, uid, ids):\n price = line.price_unit * (1-(line.discount or 0.0)/100.0)\n taxes = tax_obj.compute_all(cr, uid, line.invoice_line_tax_id, price, line.quantity, product=line.product_id, partner=line.invoice_id.partner_id)\n res[line.id] = taxes['total'] + line.variation_amount\n if line.invoice_id:\n cur = line.invoice_id.currency_id\n res[line.id] = cur_obj.round(cr, uid, cur, res[line.id])\n return res", "def compute_total_customs_duty(self):\n for rec in self:\n total = 0.0\n extra_duty = 0.0\n price_total = rec.quantity * rec.unit_price\n# total = (price_total * duty_percentage)/100\n rec.price_total = price_total\n# for hts in rec.hts_ids:\n# if hts.extra_duty_applicable:\n# extra_duty += ((rec.quantity/hts.quantity) * hts.extra_duty)\n# rec.total = total + extra_duty\n\n return True", "def update_on_delete(sender, instance, **kwargs):\n print('delete signal received!')\n instance.order.update_total()", "def total_price(self) -> Decimal:\n total_price: Decimal = ZERO_AMOUNT\n\n # Calculate the total price\n order_item: OrderItem\n for order_item in self.orderitem_set.all():\n total_price += order_item.total_price\n\n return total_price", "def update_order_undiscounted_price(apps, schema_editor):\n\n def on_migrations_complete(sender=None, **kwargs):\n order_ids = list(kwargs.get(\"updated_orders_pks\"))\n send_order_updated.delay(order_ids)\n\n Order = apps.get_model(\"order\", \"Order\")\n OrderLine = apps.get_model(\"order\", \"OrderLine\")\n\n # Take orders that has applied lines voucher discounts, but the discount is\n # not visible in undiscounted price.\n orders_to_update = Order.objects.filter(\n Exists(\n OrderLine.objects.filter(\n order_id=OuterRef(\"id\"), voucher_code__isnull=False\n )\n ),\n total_gross_amount=F(\"undiscounted_total_gross_amount\"),\n ).order_by(\"id\")\n\n updated_orders_pks = []\n for batch_pks in queryset_in_batches(orders_to_update):\n orders = Order.objects.filter(pk__in=batch_pks)\n lines = OrderLine.objects.filter(order_id__in=orders.values(\"id\")).values(\n \"order_id\",\n \"undiscounted_total_price_gross_amount\",\n \"total_price_gross_amount\",\n \"undiscounted_total_price_net_amount\",\n \"total_price_net_amount\",\n )\n lines_discount_data = defaultdict(lambda: (0, 0))\n for data in lines:\n discount_amount_gross = (\n data[\"undiscounted_total_price_gross_amount\"]\n - data[\"total_price_gross_amount\"]\n )\n discount_amount_net = (\n data[\"undiscounted_total_price_net_amount\"]\n - data[\"total_price_net_amount\"]\n )\n current_discount_gross, current_discount_net = lines_discount_data[\n data[\"order_id\"]\n ]\n lines_discount_data[data[\"order_id\"]] = (\n current_discount_gross + discount_amount_gross,\n current_discount_net + discount_amount_net,\n )\n\n for order in orders:\n discount_amount_gross, discount_amount_net = lines_discount_data.get(\n order.id\n )\n if discount_amount_gross > 0 or discount_amount_net > 0:\n order.undiscounted_total_gross_amount += discount_amount_gross\n order.undiscounted_total_net_amount += discount_amount_net\n\n updated_orders_pks.append(order.id)\n\n Order.objects.bulk_update(\n orders,\n [\n \"undiscounted_total_gross_amount\",\n \"undiscounted_total_net_amount\",\n ],\n )\n\n # If we updated any order we should trigger `order_updated` after migrations\n if updated_orders_pks:\n updated_orders_pks = set(updated_orders_pks)\n sender = registry.get_app_config(\"order\")\n post_migrate.connect(\n partial(on_migrations_complete, updated_orders_pks=updated_orders_pks),\n weak=False,\n dispatch_uid=\"send_order_updated\",\n sender=sender,\n )", "def update_on_save(sender, instance, created, **kwargs):\n instance.order.update_total()", "def update_on_save(sender, instance, created, **kwargs):\n instance.order.update_total()", "def save(self, *args, **kwargs):\n self.total = self.quantity * self.price\n super(DeliveryDetail, self).save(*args, **kwargs)", "def apply_discount(self, product):\n pass", "def amount(self):\n return self._amount", "def getamount(self):\n return self.__amount", "def get_total(self):\n\n total = super(InternationalMelonOrder, self).get_total()\n if self.qty < 10:\n total = total + 3\n return total", "def apply_discounts(order_obj):\n all_dedits = order_obj.debits\n other_debit = filter(lambda x: x[\"coll_name\"] != discounts.Discount.coll_name(), all_dedits)\n all_discounts = discounts.get_all()\n valid_discounts = []\n for item_dic in order_obj.items:\n for d in all_discounts:\n item_obj = items.get(coerce_bson_id(item_dic[\"obj_id\"]))\n if item_obj is None: continue\n if discounts.valid_on_item(d, item_obj):\n valid_discounts += [{\n \"obj_id\": d._id,\n \"coll_name\": discounts.Discount.coll_name(),\n \"amount\": discounts.discounted_value(item_obj.price, d),\n }]\n break\n order_obj.debits = other_debit + valid_discounts\n return valid_discounts", "def bulk_item_promo(order: Order):\n discount = 0\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * 0.1\n return discount", "def get_total(self):\n\n total = super().get_total()\n if self.qty < 10:\n total += 3.00\n return total", "def _grand_total(self):\n count = 0\n for product in self.products:\n count += product.price\n return count", "def _compute_amount_subtotal(self):\n for lines in self:\n lines.price_subtotal = lines.price_unit * lines.order_qty", "def getAmount(self):\n return self.amount", "def returnDcto(self):\n if self.newTotal != self.total and self.newTotal >= 0:\n dcto = [None, None, None, None]\n dcto[0] = round(1 - (self.newTotal / self.total), 5)\n dcto[1] = self.percentage\n dcto[2] = self.amount\n dcto[3] = self.code\n else:\n dcto = [0, None, None, None]\n self.parent.orderTotal.updateDcto(dcto)\n self.accept()", "def total_donations(self):\n return sum(self.donations)", "def total_donations(self):\n return sum(self.donations)", "def compute_amount_discounted(promotion, amount):\n if promotion.promo_type == '1': # % off\n amount_discounted = promotion.promo_amount * amount / Decimal(100)\n amount_discounted = Decimal(str(round(amount_discounted, 2)))\n elif promotion.promo_type == '2': # $ off\n if promotion.promo_amount < amount:\n amount_discounted = promotion.promo_amount\n else:\n amount_discounted = amount\n elif promotion.promo_type == '3': # fixed $ cost\n if promotion.promo_amount < amount:\n amount_discounted = amount - promotion.promo_amount\n else:\n # If you have a fixed cost promo of $20, but your items \n # only cost $10, you don't save.\n amount_discounted = 0\n LOG.debug('compute discount: amount_discounted = %s' % amount_discounted)\n return amount_discounted", "def get(self):\n\n bill = {\n 'product': {\n 'name': self.order.product.name,\n 'price': self.order.product.price\n },\n 'order_date_of_creation': self.order.date_of_creation,\n 'bill_date_of_creation': timezone.now(),\n 'discounts': [],\n 'total': self.order.product.price\n }\n\n return self.add_discount(bill)", "def amount(self, day):\n return self.deposits[\"amount\"].get(day, 0)", "def total_quantity(self) -> int:\n total = 0\n for i in self.order_items:\n total += i.quantity\n return total", "def base_discount_tax_compensation_amount(self):\n return self._base_discount_tax_compensation_amount", "def amount(self) -> pulumi.Output['outputs.BudgetAmount']:\n return pulumi.get(self, \"amount\")", "def amount(self) -> pulumi.Output['outputs.BudgetAmount']:\n return pulumi.get(self, \"amount\")", "def get_total_price(self):\n i = self.get_copy_with_resolved_dependencies()\n total_price = Decimal(0)\n for product in i['products']:\n billed_price = Decimal(str(product.get('price', 0))) * Decimal(str(product.get('quantity')))\n total_price += billed_price\n return total_price", "def get_final_quote_price(self):\n total, discount, taxation = self.get_total_quote_price(), self.get_quote_discount(), self.get_quote_taxation()\n return (total - discount) + taxation", "def _compute_amount(self):\n raise NotImplementedError()", "def get_fee(self):\n fee = round(self.order_payment.amount * Decimal(0.015), 2)\n return fee", "def convenience_fee_amount(self):\n return self._convenience_fee_amount", "def get_total(self):\n\n base_price = 5\n total = (1 + int(self.tax)) * int(self.qty) * base_price\n\n return total", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def get_total(self):\n\n subtotal = super(InternationalMelonOrder, self).get_total()\n if self.qty < 10:\n total = subtotal + 3\n\n return total" ]
[ "0.67122465", "0.6551784", "0.6532095", "0.6519561", "0.651557", "0.6478766", "0.6468313", "0.6403922", "0.63672656", "0.63636285", "0.63349086", "0.63340664", "0.6284223", "0.62303615", "0.62296844", "0.6176185", "0.61685467", "0.61363816", "0.61200655", "0.61124754", "0.60852194", "0.60741585", "0.6065453", "0.6024393", "0.6001514", "0.599198", "0.59908557", "0.59903145", "0.5985894", "0.59717315", "0.5969246", "0.59600383", "0.5923193", "0.58947235", "0.5839893", "0.5815247", "0.5814478", "0.581301", "0.5806115", "0.5793695", "0.5793695", "0.57856977", "0.57856977", "0.5775404", "0.5771156", "0.5769213", "0.5752917", "0.57496643", "0.57432294", "0.5721963", "0.570775", "0.5707354", "0.57052577", "0.56924194", "0.56818235", "0.5680274", "0.5666848", "0.56613964", "0.5653229", "0.5651936", "0.5644731", "0.5643941", "0.56400836", "0.5638484", "0.56340814", "0.56238383", "0.562163", "0.5602669", "0.5593017", "0.5593017", "0.55840886", "0.5571592", "0.5570867", "0.55707574", "0.5567359", "0.5563597", "0.55634844", "0.5554609", "0.55482876", "0.55481654", "0.5540149", "0.5539359", "0.55363715", "0.55363715", "0.5531498", "0.55296975", "0.55287796", "0.55211926", "0.55112094", "0.5507248", "0.5507248", "0.54936194", "0.549087", "0.5480226", "0.547294", "0.54724544", "0.545601", "0.5453216", "0.5453216", "0.54531974" ]
0.66989225
1
Returns the total amount of the order without discounts.
def get_total_amount(self): total_price = 0.00 for k, v in self.order.product_orders.items(): total_price += v.quantity * v.product.price return total_price
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def basket_total_before_discounts_excl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_excl_tax\"))\n return result[\"total\"]", "def total_amount(self):\n full_price = sum(item.price for item in self._products) if self._products else 0.0\n return full_price - self._get_discount()", "def total(self):\n total_price = self.get_total_amount()\n discounts = self.get_total_discount()\n\n return total_price - discounts", "def basket_total_before_discounts_incl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_incl_tax\"))\n return result[\"total\"]", "def discount_amount(self):\r\n customer = self.records.find_customers(str(self.__customer).strip())\r\n order_value = self.order_value\r\n discount = customer.get_discount(order_value)\r\n return discount", "def get_total_discount(self):\n total_discount = 0.00\n\n for promotion in self.pricing_rules:\n discount = promotion.get_discount(self.order)\n total_discount += discount\n\n return total_discount", "def amount(self):\n return(self.order_master.amount)", "def total_discount_incl_tax(self):\n discount = D(\"0.00\")\n for line in self.lines.all():\n discount += line.discount_incl_tax\n return discount", "def get_order_total(self):\n order_total = 0\n for item in self.cart_items:\n order_total += item['price']\n return order_total", "def basket_total_excl_tax(self):\n return self.total_excl_tax - self.shipping_excl_tax - self.surcharge_excl_tax", "def calculate_total(self):\n if self.total_price == 0:\n for discount in self.discounts:\n for item in self.items:\n item.add_discount(discount)\n\n for item in self.items:\n self.total_price += item.final_price()\n\n return self.total_price", "def amount(self):\n return self.subtotal + self.tax_subtotal + self.shipping", "def discount_amount(self):\n return self._discount_amount", "def orders_total(self):\n return(len(self._d_orders['trades']))", "def get_total(self) -> float:\n if self.__open:\n raise RuntimeError(\"Cash drawer must be closed to count.\")\n total: float = 0.0\n for denom in CashDenomination:\n total += self.__contents[denom] * denom.amount\n return total", "def _calculate_total_order_price(self, actual_order_price: int):\n actual_order_price = actual_order_price if actual_order_price else 0\n total_additional_charges = self.total_additional_charges\n self.total_order_price = actual_order_price + total_additional_charges", "def get_debt(self):\n sum_import = self.invoice_set.filter(\n expiration_date__lte=date.today(),\n paid=False,\n debited=False,\n canceled=False,\n uncollectible=False,\n ).aggregate(Sum(\"amount\"))\n return sum_import.get(\"amount__sum\", None)", "def total_quantity(self) -> int:\n total = 0\n for i in self.order_items:\n total += i.quantity\n return total", "def total_purchase(self):\n\n total_amount = 0\n #grab all the item\n items = self.item_set.all()\n for item in items:\n total_amount += item.price\n return total_amount", "def total_donation(self):\n return self._total_donation", "def total_amount(self):\n total_amount = 0\n for cart_item in self.get_cart_items():\n total_amount += cart_item.total_price\n return total_amount", "def get_amount_total(self, status_in=None):\n\n if self.amount_asked == 0:\n # No money asked, return 0\n return 0\n\n donations = self.donation_set.all()\n\n if status_in:\n donations = donations.filter(order__status__in=status_in)\n\n total = donations.aggregate(sum=Sum('amount'))\n\n if not total['sum']:\n # No donations, manually set amount to 0\n return 0\n\n return total['sum']", "def total_price(self) -> Decimal:\n total_price: Decimal = ZERO_AMOUNT\n\n # Calculate the total price\n order_item: OrderItem\n for order_item in self.orderitem_set.all():\n total_price += order_item.total_price\n\n return total_price", "def base_discount_amount(self):\n return self._base_discount_amount", "def total_price(self):\n return self.owner.total_price()", "def total_donated(self):\n if not hasattr(self, 'dynamic_total'):\n agg = self.donations.aggregate(Sum('amount'))\n self.dynamic_total = agg['amount__sum']\n return self.current + (self.dynamic_total or 0)", "def _amount_all(self):\n for order in self:\n amount_untaxed = amount_tax = 0.0\n order_amount_total = 0.0\n for line in order.order_line:\n amount_untaxed += line.price_subtotal\n amount_tax += line.price_tax\n self_amount_total = amount_untaxed + amount_tax\n if not order.discount_fixed_percent:\n order_amount_total = self_amount_total\n if order.discount_fixed_percent == 'Percent':\n order_amount_total = self_amount_total * (1 - (order.discount or 0.0) / 100.0)\n if order.discount_fixed_percent == 'Fixed':\n order_amount_total = self_amount_total - order.discount_value\n order.update({\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\n 'amount_before_disc': amount_untaxed + amount_tax,\n 'amount_total': order_amount_total,\n })", "def get_total_paid(self):\n return sum(self.paid)", "def get_total(self):\n # method on the class DomesticMelonOrder\n base_price = 5\n\n if self.species == \"Christmas melons\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def get_total_cost(self):\n total_cost = sum([item.quantity * item.product.price for item in self.orderitem_set.all()])\n return total_cost - total_cost * (self.discount / Decimal('100'))", "def amount_due(self):\n queryset = self.supplyorderitem_set.filter(delivery_date__isnull=False).aggregate(\n amount_due=Sum(F('unit_price')*F('quantity_ordered'))\n )\n return queryset['amount_due'] or 0", "def total_additional_charges(self) -> int:\n total = 0\n additional_charges = self.additional_charges\n if additional_charges:\n for charge in additional_charges:\n total += charge['amount']\n return total", "def amount(self):\n return self.__amount", "def amount(self):\n return self.__amount", "def amount(self) -> float:\n return self._amount", "def amount(self) -> float:\n return self._amount", "def basket_total_incl_tax(self):\n return self.total_incl_tax - self.shipping_incl_tax - self.surcharge_incl_tax", "def total_cost(self):\r\n return sum(i.line_cost for i in self.orderitem_set.filter(status=self.status)) # pylint: disable=E1101\r", "def fidelity(order: Order) -> Decimal:\n if order.customer.fidelity >= 1000:\n return order.total() * Decimal('0.05')\n return Decimal(0)", "def bulk_item(order: Order) -> Decimal:\n discount = Decimal(0)\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * Decimal('0.1')\n return discount", "def _grand_total(self):\n count = 0\n for product in self.products:\n count += product.price\n return count", "def amount(self):\n return self._amount", "def total_spent(self):\n total_sum = Order.objects.filter(\n email=self.email).aggregate(\n Sum('total_price')\n ).get('total_price__sum')\n return round(total_sum, 4) if total_sum else 0", "def total_sales():\n data = []\n orders = Order.objects.all()\n for order in orders:\n data.append(order.get_total_cost())\n return sum(data)", "def karma(self):\n total = (sum(oc.amount for oc in self.order_contributions if oc.is_external)\n - sum(o.external_contribution for o in self.own_orders))\n return total or Decimal('0.00')", "def getTotDonation(self):\n return sum(self.donationList)", "def GetTotal(self):\n return(self.total)", "def total_donations(self):\n return sum(self.donations)", "def total_donations(self):\n return sum(self.donations)", "def _amount_all(self):\n for order in self:\n amount_untaxed = 0.0\n for line in order.order_items_ids:\n amount_untaxed += line.price_subtotal\n order.update({\n 'amount_untaxed': amount_untaxed,\n })", "def get_total(self):\n\n total = super(InternationalMelonOrder, self).get_total()\n if self.qty < 10:\n total = total + 3\n return total", "def total_qty(self):\n return sum(self.quantities)", "def getAmount(self):\n return self.amount", "def _total_d(self):\n debit = 0.0\n for l in self.data:\n debit += l['debit']\n self.t_credit += l['credit']\n self.t_balance += l['balance']\n return debit", "def total(self) -> float:\n return self._total", "def total_clearance(self):\n total_clearances = 0\n debit = 0 #variable to track the remaining debit\n clearances = self.clearance_set.all() #grab all the previous clerances\n for clearance in clearances:\n total_clearances += clearance.paid_value\n return total_clearances", "def get_total_price(self):\n i = self.get_copy_with_resolved_dependencies()\n total_price = Decimal(0)\n for product in i['products']:\n billed_price = Decimal(str(product.get('price', 0))) * Decimal(str(product.get('quantity')))\n total_price += billed_price\n return total_price", "def discount_tax_compensation_amount(self):\n return self._discount_tax_compensation_amount", "def incomes_from_outside(self) -> Decimal:\n return Decimal(\n sum(\n [\n t.amount\n for t in self.transactions_all\n if t.amount > 0 and not t.other_party.is_user_owner\n ]\n )\n )", "def get_inbound_statement_details_total_amount(self):\n return self.get_text_from_element(self.inbound_statements_details_total_amount_locator, True)", "def _amount_all(self):\r\n for order in self:\r\n amount_untaxed = amount_tax = amount_discount = timbre = 0.0\r\n for line in order.order_line:\r\n amount_untaxed += line.price_subtotal\r\n if line.product_id.timbre_fiscal:\r\n amount_tax += line.price_tax - 0.60\r\n timbre = 0.60\r\n else :\r\n amount_tax += line.price_tax\r\n amount_discount += (line.product_uom_qty * line.price_unit * line.discount)/100\r\n order.update({\r\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\r\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\r\n 'amount_discount': order.pricelist_id.currency_id.round(amount_discount),\r\n 'price_total_no_discount': amount_untaxed + amount_discount,\r\n 'timbre': timbre,\r\n 'amount_total': amount_untaxed + amount_tax + timbre,\r\n })", "def total(self):\n\t\treturn self._total", "def get_total(self):\n\n base_price = 5\n \n if self.species == \"Christmas melon\":\n base_price = base_price * 1.5 \n\n total = (1 + self.tax) * self.qty * base_price \n\n if self.order_type == \"international\" and self.qty>10:\n total += 3\n\n\n return total", "def get_inbound_statement_details_total_amount_with_taxes(self):\n return self.get_text_from_element(self.inbound_statements_details_total_amount_with_taxes_locator, True)", "def amount(self) -> int:\n return self._amount", "def get_total(self):\n\n subtotal = super(InternationalMelonOrder, self).get_total()\n if self.qty < 10:\n total = subtotal + 3\n\n return total", "def large_order(order: Order) -> Decimal:\n distinct_items = {item.product for item in order.cart}\n if len(distinct_items) >= 10:\n return order.total() * Decimal('0.07')\n return Decimal(0)", "def bulk_item_promo(order: Order):\n discount = 0\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * 0.1\n return discount", "def sum_total_amount(self):\n\n my_sumtotal = 0\n \n total_amount = self.data['Total Amount Due']\n \n for i in total_amount:\n my_sumtotal = my_sumtotal + float(i)\n\n return my_sumtotal", "def get_total(self):\n\n base_price=5\n if self.species == \"Christmas\":\n base_price=1.5*base_price\n \n total = (1 + self.tax) * self.qty * base_price\n\n if self.order_type==\"international\" and self.qty<10:\n total+=3\n\n return total", "def get_total_payment(self, product, amount):\n pass", "def bogof_discount(self):\n bogof_discount = 0\n for item in self.cart.items:\n if item.quantity > 1:\n bogof_discount += (math.floor(item.quantity / 2) * item.product.price)\n\n self.cart._total -= bogof_discount", "def get_amount_exempt_document(self, txt_line):\n tax = 0\n amount_doc = 0\n for tax_line in txt_line.invoice_id.tax_line:\n if 'SDCF' in tax_line.name or \\\n (tax_line.base and not tax_line.amount):\n tax = tax_line.base + tax\n else:\n amount_doc = tax_line.base + amount_doc\n return (tax, amount_doc)", "def get_total(self):\n\n base_price = 5\n total = (1 + int(self.tax)) * int(self.qty) * base_price\n\n return total", "def getAmount(self):\n return self.base.get(\"amount\", [])", "def get_total(self):\n total = 0.00\n\n for _drink in self.drinks:\n total = total + _drink.get_price()\n\n for _food in self.food:\n total = total + _food.get_price()\n\n return total", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def amount(self):\r\n return self._data['amount']", "def get_quote_discount(self):\n return self.quoteitem_set.all().annotate(\n total_quote_price=F('price') * F('quantity')).annotate(\n calculate_discount=(F('total_quote_price') * F('discount') / 100)).aggregate(\n Sum('calculate_discount'))['calculate_discount__sum']", "def total_price(self) -> Decimal:\n return self.unit_price * self.quantity", "def get_total_price(self):\n return sum(Decimal(item[\"price\"]) * item[\"qty\"] for item in self.cart.values())", "def total_paid(self) -> Decimal:\n return self.total_principal + self.total_interest", "def total(self) -> int:\n return self._total", "def bulk_item(order):\n discount = 0\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * .1\n return discount", "def get_valor_total_no_ciclo(self):\n valor = Decimal(self.coagricultor.coagricultor.identificador * 0.01) \\\n .quantize(TWOPLACES) # Centavos do identificador - decimal\n # com 2 casas\n for item in self.itens.all():\n if(item.cesta.coagricultor.ciclo.ativo == True):\n valor = valor + item.produto.valor\n\n return valor", "def get_total(self):\n\n base_price = self.get_base_price()\n if self.species == \"christmas melon\":\n base_price = base_price * 1.5\n\n total = ((1 + self.tax) * self.qty * base_price)\n\n return total", "def get_total_price(self):\n subtotal = sum(Decimal(item['price']) * item['qty'] for item in self.basket.values())\n\n if subtotal == 0:\n shipping = Decimal(0.00)\n else:\n shipping = Decimal(11.50)\n\n total = subtotal + Decimal(shipping)\n return total", "def get_total_redeem(self):\n total = 0\n for redeem in self.get_redeems():\n total += redeem.get_total()\n return total", "def orders_summary(self):\n return self._orders_summary", "def getamount(self):\n return self.__amount", "def total_volume(self) -> int:\n total = 0\n for i in self.order_items:\n total += i.total_volume\n return total", "def get_total(self):\n\n total = super().get_total()\n if self.qty < 10:\n total += 3.00\n return total", "def total(self) -> int:\n if self._total is None:\n self._total = self.counts.sum()\n return self._total", "def get_total_to_pay(self):\n self.__total_to_pay = Order.get_price_subtotals(self) + \\\n Order.get_qst_subtotals(self) + \\\n Order.get_gst_subtotals(self)\n return self.__total_to_pay", "def get_total(self):\n\n base_price = self.get_base_price()\n\n if self.species == \"Christmas\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def _get_amount_total(self):\n res = {}\n for txt in self:\n res[txt.id] = 0.0\n for txt_line in txt.txt_ids:\n if txt_line.invoice_id.type in ['out_refund', 'in_refund']:\n res[txt.id] -= txt_line.amount_withheld\n else:\n res[txt.id] += txt_line.amount_withheld\n return res", "def discounted(self):\n return self._discounted", "def base_checkout_undiscounted_delivery_price(\n checkout_info: \"CheckoutInfo\",\n lines: Iterable[\"CheckoutLineInfo\"] = None,\n) -> Money:\n from .fetch import ShippingMethodInfo\n\n delivery_method_info = checkout_info.delivery_method_info\n currency = checkout_info.checkout.currency\n\n if not isinstance(delivery_method_info, ShippingMethodInfo):\n return zero_money(currency)\n\n return calculate_base_price_for_shipping_method(\n checkout_info, delivery_method_info, lines\n )", "def discharge(self):\n return self._discharge" ]
[ "0.7163056", "0.69267505", "0.69222707", "0.6920065", "0.66498226", "0.6615668", "0.660972", "0.65836567", "0.6532857", "0.6476015", "0.6449595", "0.6415554", "0.6324086", "0.63018864", "0.62091404", "0.61972386", "0.61502963", "0.61472183", "0.6100125", "0.60200626", "0.6015279", "0.60015374", "0.5995001", "0.5987658", "0.5976651", "0.59709615", "0.5967102", "0.59606487", "0.59580004", "0.595767", "0.58795434", "0.58668643", "0.5857569", "0.5857569", "0.5851983", "0.5851983", "0.58450764", "0.581894", "0.58054084", "0.5804647", "0.58016443", "0.57988364", "0.5764816", "0.57463163", "0.5746105", "0.5746077", "0.57355267", "0.5727142", "0.5727142", "0.57186353", "0.5710823", "0.57079095", "0.5700924", "0.56966627", "0.5684259", "0.5674907", "0.5654221", "0.5651784", "0.56479543", "0.564402", "0.5634078", "0.5620265", "0.56123275", "0.5589125", "0.5581686", "0.55755264", "0.55701375", "0.5562405", "0.555667", "0.5544527", "0.553129", "0.5515031", "0.55079067", "0.55059206", "0.5505524", "0.5505438", "0.5505221", "0.5505221", "0.5500854", "0.5497703", "0.54921836", "0.54839575", "0.5473084", "0.5464528", "0.5462249", "0.5452681", "0.54512763", "0.54440325", "0.5431184", "0.54294217", "0.5427542", "0.5426997", "0.54164505", "0.54140633", "0.54052633", "0.5396294", "0.5376151", "0.5375902", "0.5371943", "0.5365573" ]
0.64590997
10
Calculates total discount applicable on this order.
def get_total_discount(self): total_discount = 0.00 for promotion in self.pricing_rules: discount = promotion.get_discount(self.order) total_discount += discount return total_discount
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def discount_amount(self):\r\n customer = self.records.find_customers(str(self.__customer).strip())\r\n order_value = self.order_value\r\n discount = customer.get_discount(order_value)\r\n return discount", "def calculate_total(self):\n if self.total_price == 0:\n for discount in self.discounts:\n for item in self.items:\n item.add_discount(discount)\n\n for item in self.items:\n self.total_price += item.final_price()\n\n return self.total_price", "def total_discount_incl_tax(self):\n discount = D(\"0.00\")\n for line in self.lines.all():\n discount += line.discount_incl_tax\n return discount", "def get_total_cost(self):\n total_cost = sum([item.quantity * item.product.price for item in self.orderitem_set.all()])\n return total_cost - total_cost * (self.discount / Decimal('100'))", "def total(self):\n total_price = self.get_total_amount()\n discounts = self.get_total_discount()\n\n return total_price - discounts", "def discount_amount(self):\n return self._discount_amount", "def total_amount(self):\n full_price = sum(item.price for item in self._products) if self._products else 0.0\n return full_price - self._get_discount()", "def _compute_amount(self):\n for line in self:\n price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)\n taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': taxes['total_excluded'],\n })\n if(line.is_discount_allow and line.price_subtotal > 100):\n line.price_subtotal = line.price_subtotal - 100", "def compute_amount_discounted(promotion, amount):\n if promotion.promo_type == '1': # % off\n amount_discounted = promotion.promo_amount * amount / Decimal(100)\n amount_discounted = Decimal(str(round(amount_discounted, 2)))\n elif promotion.promo_type == '2': # $ off\n if promotion.promo_amount < amount:\n amount_discounted = promotion.promo_amount\n else:\n amount_discounted = amount\n elif promotion.promo_type == '3': # fixed $ cost\n if promotion.promo_amount < amount:\n amount_discounted = amount - promotion.promo_amount\n else:\n # If you have a fixed cost promo of $20, but your items \n # only cost $10, you don't save.\n amount_discounted = 0\n LOG.debug('compute discount: amount_discounted = %s' % amount_discounted)\n return amount_discounted", "def _get_discount(self):\n\n # For every 2 PENS, one free discount\n number_of_pens = len([x for x in self._products if x.code == 'PEN'])\n discount = 5.0 * int(number_of_pens / 2)\n\n # If there are more than 3 T-Shirts in the basket, 5 EUR of discount in every of them (25%)\n number_of_tshirts = len([x for x in self._products if x.code == 'TSHIRT'])\n if number_of_tshirts >= 3:\n discount += 5.0 * number_of_tshirts\n\n return discount", "def bulk_item(order: Order) -> Decimal:\n discount = Decimal(0)\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * Decimal('0.1')\n return discount", "def compute_quotation_price(self):\n result = decimal.Decimal('0')\n if self.vehiculePrice:\n result = self.vehiculePrice * 2 / 100\n if self.covWind:\n result += get_coverage_price_by_name(\"WIND\")\n if self.covPass:\n result += get_coverage_price_by_name(\"PASS\")\n if self.covFlood:\n result += get_coverage_price_by_name(\"FLOOD\")\n return result", "def discount(self, period):\n\t\treturn 1.0/compound(period)", "def _compute_amount(self):\n for line in self:\n price = line.price_unit\n taxes = line.tax_id.compute_all(price, line.order_id.currency_id, line.product_uom_qty,\n product=line.product_id, partner=line.order_id.partner_shipping_id)\n self_price_subtotal = taxes['total_excluded']\n if not line.discount_fixed_percent:\n self_price_subtotal = self_price_subtotal\n if line.discount_fixed_percent == 'Percent':\n self_price_subtotal = self_price_subtotal * (1 - (line.discount or 0.0) / 100.0)\n if line.discount_fixed_percent == 'Fixed':\n self_price_subtotal = self_price_subtotal - line.discount_value\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': self_price_subtotal,\n })", "def discount(self, cart):", "def discounted(self):\n return self._discounted", "def bulk_item_promo(order: Order):\n discount = 0\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * 0.1\n return discount", "def get_quote_discount(self):\n return self.quoteitem_set.all().annotate(\n total_quote_price=F('price') * F('quantity')).annotate(\n calculate_discount=(F('total_quote_price') * F('discount') / 100)).aggregate(\n Sum('calculate_discount'))['calculate_discount__sum']", "def get_discount(self, price):\r\n pass", "def apply_discount(self, product):\n pass", "def implied_discount_factor(p1: Instrument, c1: Instrument, p2: Instrument, c2: Instrument) -> float:\n return (c1.price - p1.price - c2.price + p2.price)/ (c2.strike - c1.strike)", "def discounted_reward(self, discount):\n\n tl = len(self)\n return (1 - discount) * np.sum(discount ** np.arange(tl) * self.rewards)", "def base_discount_amount(self):\n return self._base_discount_amount", "def discount_tax_compensation_amount(self):\n return self._discount_tax_compensation_amount", "def bogof_discount(self):\n bogof_discount = 0\n for item in self.cart.items:\n if item.quantity > 1:\n bogof_discount += (math.floor(item.quantity / 2) * item.product.price)\n\n self.cart._total -= bogof_discount", "def bulk_item(order):\n discount = 0\n for item in order.cart:\n if item.quantity >= 20:\n discount += item.total() * .1\n return discount", "def calculate_price(self):\n\n cargo_weight = self.cargo.weight\n tax_rate = Decimal(0.18)\n\n untaxed_total = Decimal(cargo_weight) * Decimal(self.price_per_unit_weight)\n\n total_price = (untaxed_total * tax_rate) + untaxed_total\n\n return total_price", "def total_cost(self):\n return (self.food_amount + self.local_transport_amount + self.other_expenses +\n self.travel_amount + self.accomodation_amount)", "def total_price(self) -> Decimal:\n total_price: Decimal = ZERO_AMOUNT\n\n # Calculate the total price\n order_item: OrderItem\n for order_item in self.orderitem_set.all():\n total_price += order_item.total_price\n\n return total_price", "def loyalty_discount(self):\n if self.cart.user.is_loyal:\n self.cart._total *= 0.98", "def get_total(self) -> float:\n if self.__open:\n raise RuntimeError(\"Cash drawer must be closed to count.\")\n total: float = 0.0\n for denom in CashDenomination:\n total += self.__contents[denom] * denom.amount\n return total", "def cost(self):\n assert(self._calculated)\n settings = config_get_group('shipping.modules.ups')\n if settings.HANDLING_FEE and Decimal(str(settings.HANDLING_FEE)) > Decimal(0):\n self.charges = Decimal(self.charges) + Decimal(str(settings.HANDLING_FEE))\n\n return(Decimal(self.charges))", "def _amount_all(self):\n for order in self:\n amount_untaxed = amount_tax = 0.0\n order_amount_total = 0.0\n for line in order.order_line:\n amount_untaxed += line.price_subtotal\n amount_tax += line.price_tax\n self_amount_total = amount_untaxed + amount_tax\n if not order.discount_fixed_percent:\n order_amount_total = self_amount_total\n if order.discount_fixed_percent == 'Percent':\n order_amount_total = self_amount_total * (1 - (order.discount or 0.0) / 100.0)\n if order.discount_fixed_percent == 'Fixed':\n order_amount_total = self_amount_total - order.discount_value\n order.update({\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\n 'amount_before_disc': amount_untaxed + amount_tax,\n 'amount_total': order_amount_total,\n })", "def compute_total_customs_duty(self):\n for rec in self:\n total = 0.0\n extra_duty = 0.0\n price_total = rec.quantity * rec.unit_price\n# total = (price_total * duty_percentage)/100\n rec.price_total = price_total\n# for hts in rec.hts_ids:\n# if hts.extra_duty_applicable:\n# extra_duty += ((rec.quantity/hts.quantity) * hts.extra_duty)\n# rec.total = total + extra_duty\n\n return True", "def get_total(self):\n total = 0.00\n\n for _drink in self.drinks:\n total = total + _drink.get_price()\n\n for _food in self.food:\n total = total + _food.get_price()\n\n return total", "def total_price(self) -> Decimal:\n return self.unit_price * self.quantity", "def total_cost(self):\r\n return sum(i.line_cost for i in self.orderitem_set.filter(status=self.status)) # pylint: disable=E1101\r", "def base_checkout_total(\n checkout_info: \"CheckoutInfo\",\n discounts: Iterable[DiscountInfo],\n lines: Iterable[\"CheckoutLineInfo\"],\n) -> Money:\n currency = checkout_info.checkout.currency\n line_totals = [\n calculate_base_line_total_price(\n line_info,\n checkout_info.channel,\n discounts,\n )\n for line_info in lines\n ]\n subtotal = sum(line_totals, zero_money(currency))\n\n shipping_price = base_checkout_delivery_price(checkout_info, lines)\n discount = checkout_info.checkout.discount\n\n is_shipping_voucher = (\n checkout_info.voucher.type == VoucherType.SHIPPING\n if checkout_info.voucher\n else False\n )\n # Discount is subtracted from both gross and net values, which may cause negative\n # net value if we are having a discount that covers whole price.\n if not is_shipping_voucher:\n subtotal = max(zero_money(currency), subtotal - discount)\n return subtotal + shipping_price", "def get_total_amount(self):\n total_price = 0.00\n\n for k, v in self.order.product_orders.items():\n total_price += v.quantity * v.product.price\n\n return total_price", "def _compute_amount_qty_delivered(self):\n for line in self:\n # if line.product_id.invoice_policy == 'delivery':\n # qty = line.qty_delivered\n # else:\n # qty = line.product_uom_qty\n # line.price_total_without_discount = qty * line.price_unit\n # line.price_discount = (line.price_total_without_discount * line.discount) / 100\n line.update({\n # 'price_discount': line.price_discount,\n # 'price_total_without_discount': line.price_total_without_discount,\n 'sea_price_total_qty_delivered': line.untaxed_amount_to_invoice + line.untaxed_amount_invoiced,\n })", "def _calculate_total_order_price(self, actual_order_price: int):\n actual_order_price = actual_order_price if actual_order_price else 0\n total_additional_charges = self.total_additional_charges\n self.total_order_price = actual_order_price + total_additional_charges", "def _compute_calculate_cost(self):\n for order in self:\n amount_calculate_cost = 0.0\n for line in order.order_line:\n amount_calculate_cost += (line.product_id.standard_price * line.product_uom_qty)\n order.update({\n 'amount_calculate_cost': amount_calculate_cost\n })", "def _amount_all(self):\r\n for order in self:\r\n amount_untaxed = amount_tax = amount_discount = timbre = 0.0\r\n for line in order.order_line:\r\n amount_untaxed += line.price_subtotal\r\n if line.product_id.timbre_fiscal:\r\n amount_tax += line.price_tax - 0.60\r\n timbre = 0.60\r\n else :\r\n amount_tax += line.price_tax\r\n amount_discount += (line.product_uom_qty * line.price_unit * line.discount)/100\r\n order.update({\r\n 'amount_untaxed': order.pricelist_id.currency_id.round(amount_untaxed),\r\n 'amount_tax': order.pricelist_id.currency_id.round(amount_tax),\r\n 'amount_discount': order.pricelist_id.currency_id.round(amount_discount),\r\n 'price_total_no_discount': amount_untaxed + amount_discount,\r\n 'timbre': timbre,\r\n 'amount_total': amount_untaxed + amount_tax + timbre,\r\n })", "def apply_discounts(self):\n # for each valid discount...\n for discount in list(DiscountTypes):\n # only apply the discount if it is set in the cart\n if(discount in self.cart.discounts):\n getattr(self, discount.value)()", "def get_total_price(self):\n return sum(Decimal(item[\"price\"]) * item[\"qty\"] for item in self.cart.values())", "def total(self) -> float:\n\n remained_to_be_taxed = self.income\n # taxed = list()\n self.tax_amounts = []\n start_tax_range = 0\n end_tax_range = self.bracket\n\n for i, b in enumerate(self.bracket):\n\n amount_to_tax = b.end - start_tax_range\n t = Taxed(min(amount_to_tax, remained_to_be_taxed), b.rate,\n min(amount_to_tax, remained_to_be_taxed) * b.rate)\n self.tax_amounts.append(t)\n # print(i, start_t ax_range, b.end, amount_to_tax, b.rate)\n\n remained_to_be_taxed -= amount_to_tax\n # print(remained_to_be_taxed)\n\n if b.end > self.income:\n break\n\n start_tax_range = b.end\n\n # print(taxed)\n return sum([t.tax for t in self.tax_amounts])", "def calculate(self):\n\n rating = 0\n\n props = ['aroma', 'appearance', 'taste', 'palate', 'bottle_style']\n for item in props:\n rating += getattr(self, item, 0)\n\n self.overall = (rating / self.total) / .2", "def total_price(self):\n return self.owner.total_price()", "def get_total_price(self):\n i = self.get_copy_with_resolved_dependencies()\n total_price = Decimal(0)\n for product in i['products']:\n billed_price = Decimal(str(product.get('price', 0))) * Decimal(str(product.get('quantity')))\n total_price += billed_price\n return total_price", "def discount(ir, period):\n\treturn ir.discount(period)", "def basket_total_before_discounts_incl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_incl_tax\"))\n return result[\"total\"]", "def _compute_amount_subtotal(self):\n for lines in self:\n lines.price_subtotal = lines.price_unit * lines.order_qty", "def get_total(self):\n\n base_price = 5\n total = (1 + int(self.tax)) * int(self.qty) * base_price\n\n return total", "def total_balance(self) -> Decimal:\n return self.incomes_from_outside + self.expenses_to_outside", "def amount(self):\n return self.subtotal + self.tax_subtotal + self.shipping", "def genericDiscountRate(self, items):\n rate = float(items)\n if items >= self.minUnits:\n rate = (float(int(items / self.divisor)) * self.multiplier * (1.0 - self.discountPerc)) + (float(int(items % self.divisor)) * self.multiplier * (1.0 - self.discountPerc))\n return rate", "def total_paid(self) -> Decimal:\n return self.total_principal + self.total_interest", "def get_total(self):\n\n base_price = 5\n \n if self.species == \"Christmas melon\":\n base_price = base_price * 1.5 \n\n total = (1 + self.tax) * self.qty * base_price \n\n if self.order_type == \"international\" and self.qty>10:\n total += 3\n\n\n return total", "def get_total(self):\n # method on the class DomesticMelonOrder\n base_price = 5\n\n if self.species == \"Christmas melons\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def amount_to_charge(opportunity):\n amount = float(opportunity.amount)\n if opportunity.agreed_to_pay_fees:\n total = (amount + 0.30) / (1 - 0.022)\n else:\n total = amount\n return quantize(total)", "def subtotal(self):\n return self.cantidad * self.precio", "def get_total_price(self):\n subtotal = sum(Decimal(item['price']) * item['qty'] for item in self.basket.values())\n\n if subtotal == 0:\n shipping = Decimal(0.00)\n else:\n shipping = Decimal(11.50)\n\n total = subtotal + Decimal(shipping)\n return total", "def add_discount(self, bill):\n\n discounts_queryset = Discount.objects.prefetch_related('product')\n\n total_discount = 0\n\n for discount in discounts_queryset:\n discount_products = discount.product.all()\n if self.order.product in discount_products:\n bill['discounts'].append({'discount_title': discount.title,\n 'discount_size': discount.size})\n\n total_discount += discount.size\n if total_discount > 100:\n total_discount = 100\n\n bill['total'] = bill['total'] - bill['total'] / 100 * total_discount\n\n return bill", "def get_total(self):\n\n base_price=5\n if self.species == \"Christmas\":\n base_price=1.5*base_price\n \n total = (1 + self.tax) * self.qty * base_price\n\n if self.order_type==\"international\" and self.qty<10:\n total+=3\n\n return total", "def get_total(self):\n\n base_price = self.get_base_price()\n\n if self.species == \"Christmas\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def get_order_total(self):\n order_total = 0\n for item in self.cart_items:\n order_total += item['price']\n return order_total", "def get_total(self):\n\n base_price = self.get_base_price()\n if self.species == \"christmas melon\":\n base_price = base_price * 1.5\n\n total = ((1 + self.tax) * self.qty * base_price)\n\n return total", "def subtotal(self):\n return self.precio_unitario * self.cantidad", "def action_update_total(self):\n for order in self:\n amount_untaxed = 0.0\n for line in order.order_line_ids:\n amount_untaxed += line.price_subtotal\n order.price_subtotal = amount_untaxed", "def _compute_amount(self):\n for line in self:\n line.update({\n 'price_subtotal': line.price_unit * line.quantity,\n })", "def fidelity_promo(order: Order) -> float: # <3>\n return order.total() * 0.05 if order.customer.fidelity >= 1000 else 0", "def duty_free(price: int, discount: int, holiday_cost: int) -> int:\n if holiday_cost == 500:\n return holiday_cost\n\n discount /= 100\n price = holiday_cost / (price * discount)\n price = int(price)\n return price", "def _compute_amount(self):\n for line in self:\n price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)\n new_price = price\n if line.lot_id and line.product_id.tracking in ['lot','serial']:\n lot_id = self.env['stock.production.lot'].search([('name', '=', line.lot_id), ('product_id', '=', line.product_id.id)])\n if lot_id.tax_ids.filtered(lambda tax: tax.amount_type == 'based_on_margin'):\n if lot_id.cost_price:\n new_price -= lot_id.cost_price\n sh_tax = line.tax_id.filtered(lambda tax: tax.amount_type =='based_on_margin').compute_all(new_price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n taxes = line.tax_id.filtered(lambda tax: tax.amount_type !='based_on_margin').compute_all(price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n print(taxes)\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])) + sum(t.get('amount', 0.0) for t in sh_tax.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': taxes['total_excluded'],\n })\n if self.env.context.get('import_file', False) and not self.env.user.user_has_groups('account.group_account_manager'):\n line.tax_id.invalidate_cache(['invoice_repartition_line_ids'], [line.tax_id.id])", "def total_amount(self):\n total_amount = 0\n for cart_item in self.get_cart_items():\n total_amount += cart_item.total_price\n return total_amount", "def get_total(self):\n\n self.base_price = self.get_base_price()\n\n if self.species == \"christmas melon\":\n self.base_price = self.base_price * 1.5\n\n total = (1 + self.tax) * self.qty * self.base_price\n return total", "def apply_discounts(order_obj):\n all_dedits = order_obj.debits\n other_debit = filter(lambda x: x[\"coll_name\"] != discounts.Discount.coll_name(), all_dedits)\n all_discounts = discounts.get_all()\n valid_discounts = []\n for item_dic in order_obj.items:\n for d in all_discounts:\n item_obj = items.get(coerce_bson_id(item_dic[\"obj_id\"]))\n if item_obj is None: continue\n if discounts.valid_on_item(d, item_obj):\n valid_discounts += [{\n \"obj_id\": d._id,\n \"coll_name\": discounts.Discount.coll_name(),\n \"amount\": discounts.discounted_value(item_obj.price, d),\n }]\n break\n order_obj.debits = other_debit + valid_discounts\n return valid_discounts", "def total_commission(self):\n return sum(item.commission for item in self.itens.all())", "def total_final(subtotal, discount, iva):\n return (subtotal - discount) + iva", "def basket_total_before_discounts_excl_tax(self):\n result = self.lines.aggregate(total=Sum(\"line_price_before_discounts_excl_tax\"))\n return result[\"total\"]", "def dilutionneeded(self) -> float:\n return self.stock*1.0/self.final", "def get_final_quote_price(self):\n total, discount, taxation = self.get_total_quote_price(), self.get_quote_discount(), self.get_quote_taxation()\n return (total - discount) + taxation", "def _calculate_costs(self):\n cost = 0\n cost += self._cost_route_fine()\n cost += self._cost_petrol()\n cost += self._cost_wage()\n cost += self._cost_refueling()\n cost += self._cost_caught_by_police()\n cost += self._cost_vehicle_malfunction()\n return cost", "def base_discount_tax_compensation_amount(self):\n return self._base_discount_tax_compensation_amount", "def cash_ratio(self):\n return self.cash / self.current_liabilities", "def profit_per_item(self, pk=None):\n total_profit = 0\n total_cost = self.item_cost + self.shipping_cost + self.listing_fee + self.final_value_fee\n total_paid = self.shipping_paid + self.item_paid\n total_profit = total_paid - total_cost\n return total_profit", "def discount(self,discountFactor,type='geometric'):\n for e in self.estimators:\n e.discount(discountFactor,type)\n return", "def total_donated(self):\n if not hasattr(self, 'dynamic_total'):\n agg = self.donations.aggregate(Sum('amount'))\n self.dynamic_total = agg['amount__sum']\n return self.current + (self.dynamic_total or 0)", "def cash_flow(self):\n _cash_flow = self.after_tax_profit() + self.depreciation()\n return _cash_flow", "def cash(self) -> float:\n return self._cash", "def recurring_costs(self):\n\n system = self.system()\n \n discount_factor = float(\n self['finance']['discounted cash flow factor'])\n\n if system != 'grid':\n cost_per_year = float(\n self['system (%s)' % system]['system recurring cost per year'])\n else:\n cost_per_year = float(\n self['system (grid)']['internal system recurring cost per year'])\n \n return cost_per_year * discount_factor", "def get_total(self):\n\n base_price = self.get_base_price()\n\n # Christmas Melons are more x1.5 expensive than other melons\n if self.species == \"Christmas Melon\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def discount(self):\r\n return DiscountResource(self)", "def tax(subtotal, discount):\n return (subtotal - discount) * 0.12", "def get_fee(self):\n fee = round(self.order_payment.amount * Decimal(0.015), 2)\n return fee", "def calculate_cost(self):\n booking_days, booking_hours = self.calculate_daily_hourly_billable_counts()\n day_cost = booking_days * Decimal(self.vehicle.type.daily_rate)\n hour_cost = booking_hours * Decimal(self.vehicle.type.hourly_rate)\n if hour_cost > self.vehicle.type.daily_rate:\n hour_cost = self.vehicle.type.daily_rate\n return float(day_cost + hour_cost)", "def total_earnings(self):\n return self.total_balance - self.net_invested", "def get_discount(self, *args: Any) -> DiscountStrategy:\n\n return self.discount_strategy.get_discount(*args)", "def calculateCosts(self):\n self.costs = 0\n for house in self.houses:\n if not house.distance == 1000:\n self.costs += house.distance * 9\n for battery in self.batteries:\n self.costs += battery.costs\n return self.costs" ]
[ "0.76241744", "0.73756224", "0.7202873", "0.70788705", "0.6990974", "0.6952235", "0.6787426", "0.67570555", "0.6640429", "0.6552841", "0.652841", "0.64726514", "0.64331007", "0.64018595", "0.6394077", "0.6384539", "0.6307665", "0.62925327", "0.6282091", "0.6279652", "0.627704", "0.6252722", "0.6191731", "0.6179777", "0.6114019", "0.6084302", "0.6071334", "0.59924626", "0.59892535", "0.59868103", "0.598205", "0.5975626", "0.59547925", "0.59364736", "0.5932661", "0.59163654", "0.5905445", "0.58718365", "0.58610994", "0.58469826", "0.5837138", "0.5819253", "0.57979876", "0.5792497", "0.5744533", "0.5732966", "0.57163465", "0.5714971", "0.57060695", "0.56846213", "0.5667581", "0.565903", "0.565238", "0.5640946", "0.55930567", "0.55895454", "0.55866945", "0.55823326", "0.5577913", "0.55676913", "0.5564086", "0.5555671", "0.5555357", "0.5553857", "0.5553337", "0.5543046", "0.5525988", "0.5507463", "0.5500767", "0.5496844", "0.54959136", "0.54944736", "0.54824173", "0.5472062", "0.5467688", "0.5466002", "0.54615986", "0.54586625", "0.5453053", "0.54496837", "0.54384214", "0.54377145", "0.5423695", "0.54168886", "0.5410766", "0.5409448", "0.53916657", "0.53780115", "0.536377", "0.53637445", "0.5360858", "0.53515077", "0.53515077", "0.5350837", "0.53446305", "0.53441405", "0.5338047", "0.5315087", "0.53102076", "0.5310106" ]
0.8006258
0
Return total but in a pretty format with Euro sign.
def get_total_display(self): total = self.total return '%.2f\N{euro sign}' % total
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_total_elle(self):\r\n \r\n return str(round(self._total_elle, 2))", "def get_total(self):\r\n \r\n return str(round(self._total, 2))", "def amount_ui(self) -> str:\n return \"{:,.2f}\".format(self.amount)", "def display_price(self):\n return '$ '+str(self.price)", "def currency(self, commas=True):\n sign, digits, exp = self.quantize(Decimal('0.01')).as_tuple()\n digits = list(map(unicode, digits))\n result = []\n for i in range(2):\n result.append(digits.pop() if digits else u'0')\n result.append(u'.')\n if not digits:\n result.append(u'0')\n count = 0\n while digits:\n result.append(digits.pop())\n count += 1\n if count == 3 and digits and commas:\n count = 0\n result.append(u',')\n result.append(u'-' if sign else u'')\n return u''.join(reversed(result))", "def eur(value):\n float(value)\n return f\"€{value:,.2f}\"", "def format_usd(my_price):\n return f\"${my_price:,.2f}\"", "def money_format(ammount):\n\td = Decimal(ammount) / Decimal(\"100\")\n\treturn u'£%s' % d.quantize(Decimal(\"0.01\"))", "def format_as_usd(value):\n return f\"${value:,.2f}\"", "def text_transform(val):\n if CURRENCY == \"USD\":\n return \"$%d\" % val\n if CURRENCY == \"EUR\":\n return \"‎€%d\" % val\n if CURRENCY == \"GBP\":\n return \"£%d\" % val\n return \"%d\" % val", "def format_currency(amount):\n pretty_amount = str(amount)\n\n if amount < 0:\n pretty_amount = pretty_amount[:1] + \"$\" + pretty_amount[1:]\n else:\n pretty_amount = \"$%s\" % pretty_amount\n\n return pretty_amount", "def __str__(self) -> str:\n return f'{self.amount}{self.currency}'", "def func(pct, allvals):\n return str(format(round(pct/100.*np.sum(allvals), 2),\".2f\")) + \"€\"", "def convert_to_euros(self):\n return 'Currency is', self.currency_type", "def display(self):\n\n return str(self.total)", "def to_usd(my_price):\n return f\"${my_price:,.2f}\"", "def format_amount(self) -> str:\n if self.amount_debit != '':\n return self.amount_debit.replace('-', '')\n return self.amount_credit.replace('-', '')", "def __str__(self):\n return str(self.currentTotal)", "def format_tuition(self, data):\n d = u'$%.2f' % data\n return d.replace('.00','')", "def to_usd(price):\n return \"${0:,.2f}\".format(price)", "def calculate_tax(subtotal):\n return \"TAX: \"+format_usd(0.0875*subtotal)", "def to_usd(my_price):\n return f\"${my_price:,.2f}\" #> $12,000.71", "def to_usd(my_price):\n return f\"${my_price:,.2f}\" #> $12,000.71", "def to_usd(my_price):\n return f\"${my_price:,.2f}\" #> $12,000.71", "def to_usd(my_price):\n return f\"${my_price:,.2f}\" #> $12,000.71", "def to_usd(my_price):\n return f\"${my_price:,.2f}\" #> $12,000.71", "def usd(value):\r\n return f\"${Decimal(value):,.2f}\"", "def to_usd(my_price):\n return \"${0:,.2f}\".format(my_price)", "def pretty_print(self, value, add_unit=False):\n s = \"%.1f\" % self.internal_to_friendly(value)\n if add_unit: s += \" \" + self.friendly_units\n return s", "def dollar():\r\n price = give_price_website_2(\"https://www.tgju.org/%D9%82%DB%8C%D9%85%D8%AA-%D8%AF%D9%84%D8%A7%D8%B1\")\r\n\r\n if users_language[update.effective_chat.id] == \"english\":\r\n return \"dollar : \" + format(price/10000, '.2f') + \" kTomans\"\r\n elif users_language[update.effective_chat.id] == \"persian\":\r\n return \" هزارتومان\" + format(price/10000000, '.3f') + \"دلار : \"", "def format_price(value: Decimal, order: Order, request: HttpRequest) -> str:\n context = {\n \"request\": request,\n \"order\": order,\n \"admin\": True,\n }\n return app_settings.SALESMAN_PRICE_FORMATTER(value, context=context)", "def total_terbilang(self, amount_total):\n unit = [\"\",\"Satu\",\"Dua\",\"Tiga\",\"Empat\",\n \"Lima\",\"Enam\",\"Tujuh\",\"Delapan\",\n \"Sembilan\",\"Sepuluh\",\"Sebelas\"]\n result = \" \"\n total_terbilang = self.total_terbilang\n for line in self:\n n = int(amount_total)\n if n >= 0 and n <= 11:\n result = result + unit[n]\n elif n < 20:\n result = total_terbilang(n % 10) + \" Belas\"\n elif n < 100:\n result = total_terbilang(n / 10) + \" Puluh\" + total_terbilang(n % 10)\n elif n < 200:\n result = \" Seratus\" + total_terbilang(n - 100)\n elif n < 1000:\n result = total_terbilang(n / 100) + \" Ratus\" + total_terbilang(n % 100)\n elif n < 2000:\n result = \" Seribu\" + total_terbilang(n - 1000)\n elif n < 1000000:\n result = total_terbilang(n / 1000) + \" Ribu\" + total_terbilang(n % 1000)\n elif n < 1000000000:\n result = total_terbilang(n / 1000000) + \" Juta\" + total_terbilang(n % 1000000)\n else:\n result = total_terbilang(n / 1000000000) + \" Miliar\" + total_terbilang(n % 1000000000)\n return result", "def format_currency(value, currency=None, show_if_zero=False):\n if not value and not show_if_zero:\n return ''\n if value == 0.0:\n return g.ledger.quantize(Decimal(0.0), currency)\n return g.ledger.quantize(value, currency)", "def __str__(self):\n return \"Current total: {}\".format(self.current_total)", "def euro():\r\n price = give_price_websites_1(\"https://www.tgju.org/profile/price_eur\")\r\n\r\n if users_language[update.effective_chat.id] == \"english\":\r\n return \"euro : \" + format(price/10000, '.2f') + ' kTomans'\r\n elif users_language[update.effective_chat.id] == \"persian\":\r\n return \" هزارتومان\" + format(price/10000000, '.3f') + \"یورو : \"", "def format_currency(value, currency=\"USD\"):\n symbol = CURRENCIES[currency][\"symbol\"]\n\n return \"%s%s\" % (symbol, round(value) / 100)", "def local_price(amount, currency):\n amt = convert(amount, currency)\n sym = symbol(currency)\n return f'{sym}{amt}'", "def report(self):\n print(f\"Money: {self.CURRENCY}{self.profit}\")", "def __str__(self) -> str:\n\n return f\"{self.user.name}'s balance is **$`{self.balance:,.2f}`**.\"", "def format_price(self, price):\n precision = self._price_limits[3] or 8\n tick_size = self._price_limits[2] or 0.00000001\n\n adjusted_price = truncate(round(price / tick_size) * tick_size, precision)\n formatted_price = \"{:0.0{}f}\".format(adjusted_price, precision)\n\n # remove tailing 0s and dot\n if '.' in formatted_price:\n formatted_price = formatted_price.rstrip('0').rstrip('.')\n\n return formatted_price", "def l10n_mx_edi_amount_to_text(self, amount_total):\n self.ensure_one()\n currency = self.currency_id.name.upper()\n # M.N. = Moneda Nacional (National Currency)\n # M.E. = Moneda Extranjera (Foreign Currency)\n currency_type = 'M.N' if currency == 'MXN' else 'M.E.'\n # Split integer and decimal part\n amount_i, amount_d = divmod(amount_total, 1)\n amount_d = round(amount_d, 2)\n amount_d = int(round(amount_d * 100, 2))\n words = self.currency_id.with_context(lang='es_ES').amount_to_text(amount_i).upper()\n invoice_words = '%(words)s %(amount_d)02d/100 %(curr_t)s' % dict(\n words=words, amount_d=amount_d, curr_t=currency_type)\n return invoice_words", "def format_amount(amount):\n if not amount:\n return ''\n return \"{} {}\".format(format_currency(amount.number, amount.currency),\n amount.currency)", "def deltastr(num, include_sign=True, currency=False):\n if num == 0:\n return ''\n elif num > 0:\n b4 = Fore.GREEN\n elif num < 0:\n b4 = Fore.RED\n signage = '+' if include_sign else ''\n b4 += '$' if currency else ''\n numfmt = ',.0f' if currency else ''\n return f'{b4}{num:{signage}{numfmt}}{Style.RESET_ALL}'", "def amount(self):\n return self.subtotal + self.tax_subtotal + self.shipping", "def get_total_price(self):\n return sum(Decimal(item[\"price\"]) * item[\"qty\"] for item in self.cart.values())", "def total_price(self) -> Decimal:\n return self.unit_price * self.quantity", "def calculate_total_price(total, taxes):\n total_price = total + taxes\n return total_price", "def currency_word(self):\n result = \" \"\n for cr in self:\n if cr.currency_id.name == 'USD':\n result = \" Dollar\"\n elif cr.currency_id.name == 'EUR':\n result = \" Euro\"\n elif cr.currency_id.name == 'JPY':\n result = \" Yen\"\n elif cr.currency_id.name == 'IDR':\n result = \" Rupiah\"\n else:\n result = \" \"\n return result", "def string(self):\n table = Table(2)\n table.add_row(['Name:', f'{self._name}'])\n table.add_row(['Type:', f'{self.account_type}'])\n total = sum([asset.adjusted_value()\n for asset in self._assets.values()])\n table.add_row(['Total:', utils.format_money(total)])\n if self._cash:\n table.add_row(['Available Cash:',\n utils.format_money_delta(self._cash)])\n return table.string(tablefmt='plain')", "def get_total(self):\n\n subtotal = super(InternationalMelonOrder, self).get_total()\n if self.qty < 10:\n total = subtotal + 3\n\n return total", "def getCurrencySymbol():", "def get_total_price(self):\n subtotal = sum(Decimal(item['price']) * item['qty'] for item in self.basket.values())\n\n if subtotal == 0:\n shipping = Decimal(0.00)\n else:\n shipping = Decimal(11.50)\n\n total = subtotal + Decimal(shipping)\n return total", "def get_total(self):\n\n total = super(InternationalMelonOrder, self).get_total()\n if self.qty < 10:\n total = total + 3\n return total", "def get_total(self):\n\n total = super().get_total()\n if self.qty < 10:\n total += 3.00\n return total", "def currency(self) -> str:\n return self._currency", "def total_equity(self):\n return self.total_market_value + self.cash", "def update_total(self):\n self.objects[self.ids.AMOUNT].setText(\"Total Spend: \\xA3%.2f\" % (self.owner.total_price() / 100))", "def print_total_value():\n sum = 0.0\n for item in data:\n sum += (item['price'] * item['stock'])\n\n print(f\"total stock value = {sum}\")", "def get_total(self):\n\n base_price = 5\n total = (1 + int(self.tax)) * int(self.qty) * base_price\n\n return total", "def __str__(self):\n return \"{} ({}) : ${:,.2f}\".format(self.name, self.year, self.cost)", "def __str__(self):\n return \"{} ({}) : ${:,.2f}\".format(self.name, self.year, self.cost)", "def render_money(amount: Money, message: str = \"\") -> str:\n\n return f\"{message} {amount.amount} {amount.currency}\"", "def test_default_w_decimals(self):\n self.assertEqual(currency(188.00), \"$188.00\")", "def get_formated_price(\n amount: Decimal,\n precision: int = DEFAULT_DECIMAL_PLACES\n) -> str:\n return \"{:0.0{}f}\".format(amount, precision)", "def subtotal(prices):\n\ttotal = Decimal(\"0.00\")\n\n\tfor price in prices:\n\t\ttotal += price[1]\n\n\treturn total", "def table_total(self):\n total = 0.00\n\n for customer in self.customers:\n total = total + customer.get_total()\n\n return total", "def get_price(self):\n return f'{self.soup.find(attrs={\"class\": \"woocommerce-Price-amount\"}).text}'", "def to_currency(cents: int) -> str:\n return f\"${cents / 100:.2f}\"", "def clean_currency(x: str):\n # cprint(f\"### Function Name:-> {inspect.stack()[0][3]} ###\", 'yellow', 'on_grey', attrs=['bold'])\n try:\n # x = str(x)\n if isinstance(x, str):\n if x.startswith(\"$\"):\n return x.replace('$', '').replace(',', '')\n # return float(x)\n return x\n except Exception as ex:\n cprint(traceback.format_exc(), 'red')\n log_exception(traceback.format_exc())", "def get_total(self):\n\n base_price = 5\n \n if self.species == \"Christmas melon\":\n base_price = base_price * 1.5 \n\n total = (1 + self.tax) * self.qty * base_price \n\n if self.order_type == \"international\" and self.qty>10:\n total += 3\n\n\n return total", "def price_pounds(self):\n price = '{0:03d}'.format(self.price)\n return price[:-2] + '.' + price[-2:]", "def silver_card(subtotal):\n return subtotal * 0.02", "def convert_to_dollars(self):\n return 'Currency is', self.currency_type", "def total(self):\n return self._total_name", "def calc_total_btc():\n total_btc_val = 0\n for holding in val[\"accHoldings\"]:\n free = val[\"accHoldings\"][holding][\"free\"]\n locked = val[\"accHoldings\"][holding][\"locked\"]\n total = float(free) + float(locked)\n\n if holding + \"BTC\" in val[\"coins\"]:\n if holding != \"BTC\" and total * float(val[\"tickers\"][holding + \"BTC\"][\"lastPrice\"]) > 0.001:\n\n coin_total = total * float(val[\"tickers\"][holding + \"BTC\"][\"lastPrice\"])\n total_btc_val += coin_total\n\n elif holding == \"BTC\":\n total_btc_val += total\n\n total_formatted = '{number:.{digits}f}'.format(number=float(total_btc_val), digits=8) + \" BTC\"\n # print(\"total: \" + total_formatted)\n return total_formatted", "def get_total(self):\n\n base_price=5\n if self.species == \"Christmas\":\n base_price=1.5*base_price\n \n total = (1 + self.tax) * self.qty * base_price\n\n if self.order_type==\"international\" and self.qty<10:\n total+=3\n\n return total", "def somme_encaissee(self) -> Numeric:\n return query_sum(\n self.offres().filter(paye=True),\n \"prix\",\n output_field=models.DecimalField(),\n )", "def get_total(self):\n\n base_price = self.get_base_price()\n\n if self.species == \"Christmas\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def get_total(self):\n\n base_price = self.get_base_price()\n if self.species == \"christmas melon\":\n base_price = base_price * 1.5\n\n total = ((1 + self.tax) * self.qty * base_price)\n\n return total", "def sub_total():\n return sum(SAVE_PRICE)", "def subtotal(self):\n return self.cantidad * self.precio", "def get_inbound_statement_details_total_amount_with_taxes(self):\n return self.get_text_from_element(self.inbound_statements_details_total_amount_with_taxes_locator, True)", "def __str__(self):\n number_stars = (30-len(self.name))//2\n title_line = '*'*number_stars+self.name+'*'*number_stars\n corpus = ''\n for i in range(len(self.ledger)):\n corpus += (((self.ledger[i])['description']))[0:min(23, len((self.ledger[i])['description']))].ljust(23)+(\n str(\"{:.2f}\".format(round(float((self.ledger[i])['amount']), 2)))).rjust(7)+'\\n'\n Total = 'Total: '+str(\"{:.2f}\".format((round(float(self.get_balance()), 2))))\n return title_line+'\\n'+corpus+Total", "def total_price(self):\n return self.owner.total_price()", "def total_principal(self) -> Decimal:\n return self._quantize(self.principal)", "def __str__(self):\n return \"{}, {}km on current fare, ${:.2f}/km\".format(super().__str__(),\n self.current_fare_distance,\n self.price_per_km)", "def _get_toal_cp_(obj):\n \n fTotal = 0.0\n for item in obj.order_line:\n fTotal += item.purchase_price * item.product_uom_qty\n \n return fTotal", "def subtotal(self):\n return self.precio_unitario * self.cantidad", "def _get_decimal(decimalPart):\n units = [u'sıfır', u'bir', u'iki', u'üç', u'dört', u'beş', u'altı', u'yedi', u'sekiz', u'dokuz']\n comma = u'virgül'\n return ' '.join([units[int(i)] for i in decimalPart]), comma", "def format_value(self, value: float) -> str:\r\n ...", "def format_value(self, value: float) -> str:\r\n ...", "def total(self):\n total_price = self.get_total_amount()\n discounts = self.get_total_discount()\n\n return total_price - discounts", "def __str__(self):\n string = \"\"\n for i in range(len(self.book[Trade.WAY_SELL])-1, -1, -1):\n string = string + \"%.10f\\t\\t%.8f\\n\" % (self.book[Trade.WAY_SELL][i].get_price(),\n self.book[Trade.WAY_SELL][i].get_quote_amount())\n string = string + \"-----------------------------------\\n\"\n for i in range(len(self.book[Trade.WAY_BUY])):\n string = string +\"%.10f\\t\\t%.8f\\n\" % (self.book[Trade.WAY_BUY][i].get_price(),\n self.book[Trade.WAY_BUY][i].get_quote_amount())\n return string", "def format_price(self, price_text):\n return int(re.sub(r\"\\D\", \"\", price_text))", "def convert_to_currency(self, value):\n return (Decimal(value) * Decimal(self.kurzMnozstvi) / \\\n Decimal(self.nbStred))", "def printCurrent():\n print (\"Total:\", calc_get_total())", "def total_discount_incl_tax(self):\n discount = D(\"0.00\")\n for line in self.lines.all():\n discount += line.discount_incl_tax\n return discount", "def format_str() -> str:\r\n decimal_places = conf.instance[\"general\"][\"output\"][\"model_results_decimal_places\"]\r\n return f\"{{:.{decimal_places}f}}\"", "def credits_to_string_with_exact_value(amount: int, separator: str = ' ', significant_numbers: int = 3) -> str:\n if amount >= 10**6:\n return '{}{}({:,} C)'.format(credits_to_string(amount), separator, amount)\n else:\n return '{:,} C'.format(amount)", "def calc_total_money(stock):\n tot_amount = stock[\"five\"] * 5\n tot_amount += stock[\"one\"]\n tot_amount += stock[\"quarter\"] / 4\n tot_amount += stock[\"dime\"] / 10\n tot_amount += stock[\"nickel\"] / 20\n \n return (int(tot_amount), int(str(tot_amount)[str(tot_amount).find('.')+1::]))" ]
[ "0.6930728", "0.6926814", "0.6910748", "0.69088656", "0.65733093", "0.6563159", "0.65392554", "0.6340894", "0.62750125", "0.62716407", "0.62608546", "0.6224019", "0.6152539", "0.61229", "0.6107103", "0.60937685", "0.60425156", "0.6040899", "0.60386825", "0.6008453", "0.59992117", "0.597815", "0.597815", "0.597815", "0.597815", "0.597815", "0.5967632", "0.5877372", "0.58641386", "0.5839082", "0.5836186", "0.582157", "0.57954216", "0.57570106", "0.57382184", "0.5728359", "0.57264245", "0.57114804", "0.56891966", "0.5645381", "0.561123", "0.55915564", "0.5585375", "0.55806965", "0.5571616", "0.5568905", "0.5566557", "0.55625314", "0.55621594", "0.554628", "0.5546104", "0.5530384", "0.5529327", "0.5523819", "0.5523265", "0.5508854", "0.5507474", "0.5495174", "0.5491699", "0.54823786", "0.54823786", "0.54777694", "0.54726714", "0.54681545", "0.546405", "0.5451127", "0.5446564", "0.543648", "0.54335487", "0.54296786", "0.54028314", "0.53893685", "0.5386175", "0.5385845", "0.5383304", "0.5379237", "0.5368862", "0.5350386", "0.53460085", "0.534244", "0.5340363", "0.5335611", "0.5331463", "0.5329677", "0.5328189", "0.5326515", "0.532457", "0.5320386", "0.53135043", "0.5309894", "0.5309894", "0.53060895", "0.53033584", "0.52941346", "0.5291802", "0.52892435", "0.5275516", "0.52697563", "0.52610767", "0.525847" ]
0.81358236
0
Create variables for tests.
def setUp(self): self.sync = synchronization.Sync() self.game = game.Game() self.leaderboards = leaderboards.Leaderboards() self.leaderboards.scoreboard = leaderboards.Leaderboards.scoreboard self.sync.file1 = self.f1 self.sync.file2 = self.f2 self.player1 = self.game.create_player("Drake testing") self.player2 = self.game.create_player("Benson testing") os.mkdir(self.directory)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testTurntableVariables(self):\n crawler = Crawler.create(PathHolder(self.__exrFile))\n self.assertEqual(crawler.var(\"type\"), \"turntable\")\n self.assertEqual(crawler.var(\"category\"), \"render\")\n self.assertEqual(crawler.var(\"renderType\"), \"tt\")\n self.assertEqual(crawler.var(\"assetName\"), \"ass\")\n self.assertEqual(crawler.var(\"step\"), \"lookdev\")\n self.assertEqual(crawler.var(\"pass\"), \"beauty\")\n self.assertEqual(crawler.var(\"renderName\"), \"ass-default-beauty\")", "def test_variables_get(self):\n pass", "def _get_test_variables():\n if __name__ == \"__main__\":\n return _MOCKUP_TEST_VARIABLES\n else:\n return BuiltIn().get_variables()", "def test_variables_post(self):\n pass", "def get_variables(test_case, name):\n\n test_case = test_case.lower()\n variables = {\n # Variables for control packet\n \"src_ip\": \"16.0.0.1\",\n \"dst_ip\": \"16.0.1.1\",\n \"src_net\": \"16.0.0.0\",\n \"dst_net\": \"16.0.1.0\",\n \"src_port\": \"1234\",\n \"dst_port\": \"1234\",\n \"src_mac\": \"01:02:03:04:05:06\",\n \"dst_mac\": \"10:20:30:40:50:60\"}\n\n test_vars = {\n \"macip\": {\n # MACs classified directly\n \"classify_src\": \"12:23:34:45:56:67\",\n \"classify_dst\": \"89:9A:AB:BC:CD:DE\",\n # MACs classified through mask\n \"classify_src2\": \"01:02:03:04:56:67\",\n \"classify_dst2\": \"89:9A:AB:BC:50:60\",\n \"src_mask\": \"00:00:00:00:FF:FF\",\n \"dst_mask\": \"FF:FF:FF:FF:00:00\"\n },\n \"l3_ip4\": {\n # IPs for DUT interface setup\n \"dut_to_tg_if1_ip\": \"16.0.0.2\",\n \"dut_to_tg_if2_ip\": \"192.168.0.2\",\n \"prefix_length\": 24,\n \"gateway\": \"192.168.0.1\",\n # classified networks\n \"classify_src_net\": \"16.0.2.0\",\n \"classify_dst_net\": \"16.0.3.0\",\n # IPs in classified networks\n \"classify_src\": \"16.0.2.1\",\n \"classify_dst\": \"16.0.3.1\",\n },\n \"l3_ip6\": {\n # Override control packet addresses with IPv6\n \"src_ip\": \"10::1\",\n \"dst_ip\": \"11::1\",\n \"dst_net\": \"11::\",\n # IPs for DUT interface setup\n \"dut_to_tg_if1_ip\": \"10::2\",\n \"dut_to_tg_if2_ip\": \"20::2\",\n \"prefix_length\": 64,\n \"gateway\": \"20::1\",\n # classified networks\n \"classify_src_net\": \"12::\",\n \"classify_dst_net\": \"13::\",\n # IPs in classified networks\n \"classify_src\": \"12::1\",\n \"classify_dst\": \"13::1\",\n },\n \"l4\": {\n # IPs for DUT interface and route setup\n \"dut_to_tg_if1_ip\": \"16.0.0.2\",\n \"dut_to_tg_if2_ip\": \"192.168.0.2\",\n \"prefix_length\": 24,\n \"gateway\": \"192.168.0.1\",\n \"classify_dst_net\": \"16.0.3.0\",\n # Ports in classified ranges\n \"classify_src\": 60000,\n \"classify_dst\": 61000,\n },\n \"mixed\": {\n # IPs for DUT interface and route setup\n \"dut_to_tg_if1_ip\": \"16.0.0.2\",\n \"dut_to_tg_if2_ip\": \"192.168.0.2\",\n \"prefix_length\": 24,\n \"gateway\": \"192.168.0.1\",\n \"classify_dst_net\": \"16.0.3.0\",\n # IPs in classified networks\n \"classify_src_ip\": \"16.0.2.1\",\n \"classify_dst_ip\": \"16.0.3.1\",\n # Ports in classified ranges\n \"classify_src_port\": 60000,\n \"classify_dst_port\": 61000,\n },\n \"icmp\": {\n # ICMP code and type for control packet\n \"icmp_type\": 0,\n \"icmp_code\": 0,\n # classified ICMP code and type\n \"classify_type\": 3,\n \"classify_code\": 3\n\n },\n \"icmpv6\": {\n # Override control packet addresses with IPv6\n \"src_ip\": \"10::1\",\n \"dst_ip\": \"11::1\",\n \"dst_net\": \"11::\",\n # IPs for DUT interface setup\n \"dut_to_tg_if1_ip\": \"10::2\",\n \"dut_to_tg_if2_ip\": \"20::2\",\n \"prefix_length\": 64,\n \"gateway\": \"20::1\",\n # classified networks\n \"classify_src_net\": \"12::\",\n \"classify_dst_net\": \"13::\",\n # ICMP code and type for control packet\n \"icmp_type\": 1,\n \"icmp_code\": 0,\n # classified ICMP code and type\n \"classify_type\": 4,\n \"classify_code\": 2\n\n },\n \"reflex\": {\n # IPs for DUT interface setup\n \"dut_to_tg_if1_ip\": \"16.0.0.2\",\n \"dut_to_tg_if2_ip\": \"192.168.0.2\",\n \"prefix_length\": 24,\n \"gateway\": \"192.168.0.1\",\n \"gateway2\": \"192.168.0.1\",\n # classified networks\n \"classify_src_net\": \"16.0.2.0\",\n \"classify_dst_net\": \"16.0.3.0\",\n # IPs in classified networks\n \"classify_src\": \"16.0.2.1\",\n \"classify_dst\": \"16.0.3.1\",\n },\n \"block_all\": {}\n }\n acl_data = {\n # ACL configuration for L2 tests\n\n \"macip\": {\n \"acl\": [{\n \"name\": name,\n \"type\": \"vpp-acl:vpp-macip-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n\n \"eth\": {\n \"source-mac-address\": test_vars[\"macip\"][\"classify_src\"],\n \"source-mac-address-mask\": test_vars[\"macip\"][\"src_mask\"]\n },\n \"ipv4\": {\n\n \"source-ipv4-network\": \"16.0.0.0/24\"\n }\n },\n\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n\n \"eth\": {\n \"source-mac-address\": test_vars[\"macip\"][\"classify_src\"],\n \"source-mac-address-mask\": \"00:00:00:00:00:00\"\n },\n\n \"ipv4\": {\n \"source-ipv4-network\": \"0.0.0.0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]}\n }\n ]\n },\n # ACL configuration for L3 IPv4 tests\n \"l3_ip4\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n \"ipv4\": {\n \"destination-ipv4-network\": \"{0}/{1}\".format(\n test_vars[\"l3_ip4\"][\"classify_dst_net\"],\n test_vars[\"l3_ip4\"][\"prefix_length\"]),\n \"source-ipv4-network\": \"{0}/{1}\".format(\n test_vars[\"l3_ip4\"][\"classify_src_net\"],\n test_vars[\"l3_ip4\"][\"prefix_length\"])\n },\n \"udp\":{\n \"source-port\": {\n \"lower-port\": \"0\",\n \"upper-port\": \"65535\"\n },\n \"destination-port\": {\n \"lower-port\": \"0\",\n \"upper-port\": \"65535\"\n }\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv4\": {\n \"destination-ipv4-network\": \"0.0.0.0/0\",\n \"source-ipv4-network\": \"0.0.0.0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]\n }\n }\n ]\n },\n # ACL settings for L3 IPv6 tests\n \"l3_ip6\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n \"ipv6\": {\n \"destination-ipv6-network\": \"{0}/{1}\".format(\n test_vars[\"l3_ip6\"][\"classify_dst_net\"],\n test_vars[\"l3_ip6\"][\"prefix_length\"]),\n \"source-ipv6-network\": \"{0}/{1}\".format(\n test_vars[\"l3_ip6\"][\"classify_src_net\"],\n test_vars[\"l3_ip6\"][\"prefix_length\"])\n },\n \"udp\":{\n \"source-port\": {\n \"lower-port\": \"0\",\n \"upper-port\": \"65535\"\n },\n \"destination-port\": {\n \"lower-port\": \"0\",\n \"upper-port\": \"65535\"\n }\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv6\": {\n \"destination-ipv6-network\": \"0::0/0\",\n \"source-ipv6-network\": \"0::0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]\n }\n }\n ]\n },\n # ACL configuration for L4 tests\n \"l4\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n \"ipv4\": {\n \"source-ipv4-network\": \"0.0.0.0/0\"\n },\n \"tcp\": {\n \"source-port\": {\n \"lower-port\": test_vars[\"l4\"][\"classify_src\"],\n \"upper-port\": test_vars[\"l4\"][\"classify_src\"] + 10\n },\n \"destination-port\":{\n \"lower-port\": test_vars[\"l4\"][\"classify_dst\"],\n \"upper-port\": test_vars[\"l4\"][\"classify_dst\"] + 10\n }\n }\n },\n \"actions\":{\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv4\": {\n \"source-ipv4-network\": \"0.0.0.0/0\",\n \"destination-ipv4-network\": \"0.0.0.0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]\n }\n }\n ]\n },\n \"mixed\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"ports\",\n \"matches\": {\n \"ipv4\": {\n \"source-ipv4-network\": \"0.0.0.0/0\"\n },\n \"tcp\": {\n \"source-port\": {\n \"lower-port\": test_vars[\"l4\"][\"classify_src\"],\n \"upper-port\": test_vars[\"l4\"][\"classify_src\"] + 10\n },\n \"destination-port\":{\n \"lower-port\": test_vars[\"l4\"][\"classify_dst\"],\n \"upper-port\": test_vars[\"l4\"][\"classify_dst\"] + 10\n }\n }\n },\n \"actions\":{\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv4\": {\n \"destination-ipv4-network\": \"0.0.0.0/0\",\n \"source-ipv4-network\": \"0.0.0.0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]\n }\n }\n ]\n },\n \"icmp\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n \"ipv4\": {\n \"source-ipv4-network\": \"0.0.0.0/0\"\n },\n \"icmp\": {\n \"vpp-acl:vpp-icmp-ace\": {\n \"vpp-acl:icmp-type-range\": {\n \"first\": \"1\",\n \"last\": \"5\"\n },\n \"vpp-acl:icmp-code-range\": {\n \"first\": \"1\",\n \"last\": \"5\"\n }\n }\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv4\": {\n \"source-ipv4-network\": \"0.0.0.0/0\",\n \"destination-ipv4-network\": \"0.0.0.0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]\n }\n }\n ]\n },\n \"icmpv6\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n \"ipv6\": {\n \"source-ipv6-network\": \"::/0\",\n },\n \"icmp\": {\n \"vpp-acl:vpp-icmp-ace\": {\n \"vpp-acl:icmp-type-range\": {\n \"first\": \"1\",\n \"last\": \"5\"\n },\n \"vpp-acl:icmp-code-range\": {\n \"first\": \"1\",\n \"last\": \"5\"\n }\n }\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n },\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv6\": {\n \"destination-ipv6-network\": \"0::0/0\",\n \"source-ipv6-network\": \"::/0\",\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:accept\"\n }\n }\n ]\n }\n }\n ]\n },\n \"reflex\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule1\",\n \"matches\": {\n \"ipv4\": {\n \"destination-ipv4-network\": \"{0}/{1}\".format(\n test_vars[\"reflex\"][\"classify_src_net\"],\n test_vars[\"reflex\"][\"prefix_length\"]),\n \"source-ipv4-network\": \"{0}/{1}\".format(\n test_vars[\"reflex\"][\"classify_dst_net\"],\n test_vars[\"reflex\"][\"prefix_length\"])\n }\n },\n \"actions\": {\n \"forwarding\": \"vpp-acl:accept-and-reflect\"\n }\n }\n ]\n }\n }\n ]\n },\n \"block_all\": {\n \"acl\": [\n {\n \"name\": name,\n \"type\": \"vpp-acl:vpp-acl\",\n \"aces\": {\n \"ace\": [\n {\n \"name\": \"rule_all\",\n \"matches\": {\n \"ipv4\": {\n \"destination-ipv4-network\": \"0.0.0.0/0\",\n \"source-ipv4-network\": \"0.0.0.0/0\"\n }\n },\n \"actions\": {\n \"forwarding\": \"ietf-access-control-list:drop\"\n }\n }\n ]\n }\n }\n ]\n },\n }\n\n try:\n ret_vars = {}\n ret_vars.update(variables)\n ret_vars.update(test_vars[test_case])\n ret_vars.update(\n {\"acl_settings\": acl_data[test_case]}\n )\n except KeyError:\n raise KeyError(\n \"Unrecognized test case {0}. Valid options are: {1}\".format(\n test_case, acl_data.keys()))\n return ret_vars", "def make_vars(tups):\n return dict([(varname, value) for varname, value in tups])", "def make_vars(self):\n here=self.PWD\n PWD_UP1=os.path.dirname(here)\n PWD_UP2=os.path.dirname(PWD_UP1)\n PWD_UP3=os.path.dirname(PWD_UP2)\n PWD_UP4=os.path.dirname(PWD_UP3)\n PWD_UP5=os.path.dirname(PWD_UP4)\n return { 'PWD_UP1':PWD_UP1, 'PWD_UP2':PWD_UP2,\n 'PWD_UP3':PWD_UP3, 'PWD_UP4':PWD_UP4,\n 'PWD_UP5':PWD_UP5, 'OUTPUT_PATH':self.outloc,\n 'PWD': here }", "def test_variablepresentations_post(self):\n pass", "def set_vars():\n return dict()", "def _test_template_data(self):\n chars=string.ascii_uppercase + string.digits\n id = ''.join(random.choice(chars) for x in range(6))\n\n return {\n 'test_module': self.test_modulename(),\n 'driver_module': self.driver_modulename(),\n 'driver_dir': self.driver_dir(),\n 'file': self.driver_relative_path(),\n 'author': self.metadata.author,\n 'driver_name': self.metadata.driver_name,\n 'constructor': self.metadata.constructor,\n 'full_instrument_lower': self.metadata.driver_name.lower(),\n 'full_instrument_camelcase': self.driver_name_camelcase(),\n }", "def test_swift_globals(self):\n self.build()\n self.do_test()", "def setUp_extra(self):\n #todo: is this ugly? At least there is explicit assignment of vars.\n # How to do this better? \n [self.testproject,\n self.root,\n self.projectadmin,\n self.participant,\n self.registered_user] = self._create_dummy_project(\"view-test\")", "def test_variablepresentations_get(self):\n pass", "def test_adding_variable(self):\n\t\turl = reverse('variables')\n\t\tdata = {'variableName': 'a', 'variableValue': 123}\n\t\tresponse = self.client.post(url, data, format='json')\n\t\ts = self.client.session\n\t\tself.assertEqual(s['variables'], {'a': 123, 'b':567, 'c': 936})\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data, {'variableName': 'a', 'variableValue': 123})", "def create_test_file(test_path, robot_test_name, entry_url, full_path):\n new_test_file = test_path + '\\\\' + robot_test_name + '.tstest'\n shutil.copyfile(template_test_file, new_test_file) #note shutil.copyfile() overwrites target file if it exists\n r = requests.get(entry_url)\n # print r.content\n # fill in TestPrototypeParameter interface XML element and replace hard coded Param1 by variable name\n # fill in SingleVariable interface XML element and replace hard coded default_val by default value\n robot_arguments = ''\n replacements = dict()\n if VAR:\n interface_section = ''\n variable_section = ''\n report_expression_section = ''\n\n # by default, no need to rename robot variable in test unless there is space in the name\n variable_renames = dict()\n for variable in retrieve_variables(r.content):\n variable_name = variable[0]\n variable_renames[variable_name] = variable_name\n # print variable_name\n\n # if variable name has single spaces in it, e.g. 'Example Input 1', replace by '_', e.g. 'Example_Input_1'\n # however if there is also robot variable 'Example_Input_1', then keep appending '_' for the corresponding\n # TestShell test variable until it is unique\n for variable_name, rename in variable_renames.iteritems():\n if ' ' in variable_name:\n # rename = variable_name.replace(' ', '_') #replace space in the name by underscore\n rename = re.sub('[^0-9a-zA-Z_]', '_', variable_name) # replace each unsupported char by underscore\n while rename in variable_renames:\n rename += '_'\n variable_renames[variable_name] = rename\n\n for variable in retrieve_variables(r.content):\n variable_name = variable[0]\n default_value = variable[1]\n replacements[variable_name_in_template] = variable_renames[variable_name]\n replacements[variable_original_name_in_template] = variable_name\n replacements[variable_default_value_in_template] = default_value\n interface_section += fill_template(test_interface_template, replacements)\n variable_section += fill_template(test_variable_template, replacements)\n report_expression_section += fill_template(report_expression_template, replacements)\n robot_arguments += \" --variable \\'\" + variable_name + \"\\':\\'{\" + variable_renames[variable_name] + \"}\\'\"\n\n replacements = {\"test1.robot\": robot_arguments + \" \\'\" + full_path + \"\\'\"} # reset dictionary\n if VAR:\n replacements[test_interface_template_fill_tag] = interface_section\n replacements[test_variable_template_fill_tag] = variable_section\n replacements[report_expression_template_fill_tag] = report_expression_section\n # the following initial values of required variables are hard coded in test template\n replacements['CLOUDSHELL_SERVER_ADDRESS_VALUE'] = cloudshell_server_address\n replacements['CLOUDSHELL_SERVER_PORT_VALUE'] = cloudshell_server_port\n replacements['CLOUDSHELL_USERNAME_VALUE'] = cloudshell_server_username\n replacements['CLOUDSHELL_PASSWORD_VALUE'] = cloudshell_server_password\n replacements['CLOUDSHELL_DOMAIN_VALUE'] = cloudshell_server_domain\n replacements['EXEC_SERVER_ADDRESS_VALUE'] = exec_server_address\n replacements['EXEC_USERNAME_VALUE'] = exec_server_username\n replacements['EXEC_PASSWORD_VALUE'] = exec_server_password\n replacements['BITBUCKET_REPOSITORY_URL'] = bitbucket_repository_url\n replacements['EXEC_SERVER_WORKING_DIR'] = exec_server_working_directory\n replacements['ROBOT_TESTS_DIR'] = robot_tests_directory\n replacements['ARCHIVE_OUTPUT_DIR'] = archive_output_directory\n replacements['LOCAL_WORKING_DIR'] = local_working_directory\n # print replacements\n substitute_string_in_tstest_file(new_test_file, replacements)\n new_test_file_ascii_name = new_test_file.encode('ascii', 'ignore') # otherwise UnicodeDecodeError\n return new_test_file_ascii_name", "def setUp(self):\n\n self.to_test = {\n \"Hello\": \"world\",\n \"World\": {\"world\", \"hello\"},\n \"funilrys\": [\"Fun\", \"Ilrys\"],\n \"Py\": \"Funceble\",\n \"pyfunceble\": [\"funilrys\"],\n }", "def create_variables(self, source, prior, data, ds):\n\n # Store variable-length data\n if source == \"gbnode\" and prior in self.NODE_LIST:\n node_id = ds.createVariable(\"node_id\", \"i8\", (\"num_nodes\",))\n node_id[:] = data[\"node_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_nodes\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_nodes\",))\n values[:] = data[\"values\"]\n\n elif prior in self.PROB_LIST:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\", \"probability\"))\n values[:] = data[\"values\"]\n\n elif prior in self.MONTHS_LIST:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\", \"num_months\"))\n values[:] = data[\"values\"]\n\n elif prior in self.DAYS_LIST:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\", \"num_days\"))\n values[:] = data[\"values\"]\n\n value_t = ds.createVariable(\"value_t\", \"f8\", (\"num_reaches\", \"num_days\"))\n value_t[:] = data[\"value_t\"]\n\n else:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\",))\n values[:] = data[\"values\"]", "def _create_variables(\n\t\tself,\n\t\tfood_items,\n\t\tnutrients):\n\n\t\t# Create food qty variables (1x per food item)\n\t\tvariables = {\n\t\t\t'food_qty': {}\n\t\t}\n\t\tfor food_id, food_item in food_items.items():\n\t\t\tvariable_name = self._build_food_qty_variable_name(food_item)\n\t\t\tvariables[self.FOOD_QTY_VARIABLES_CATEGORY][food_id] = pulp.LpVariable(\n\t\t\t\tvariable_name,\n\t\t\t\tlowBound=food_item[\"constraints\"][\"min_qty\"],\n\t\t\t\tupBound=food_item[\"constraints\"][\"max_qty\"])\n\n\t\tif self.use_demerits:\n\t\t\tvariables[self.NUTRIENT_DEMERIT_VARIABLES_CATEGORY] = {}\n\t\t\t# 1x variable per nutrient\n\t\t\tfor nutrient_id in nutrients:\n\t\t\t\tvariable_name = self._build_nutrient_demerit_variable_name(nutrient_id)\n\t\t\t\tvariables[self.NUTRIENT_DEMERIT_VARIABLES_CATEGORY][nutrient_id] = pulp.LpVariable(variable_name)\n\n\t\treturn variables", "def setup(self):\n self.testInst = pysat.Instrument('pysat', 'testing2D_xarray',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'variable_profiles'\n self.test_val_length = 15\n\n return", "def test_variables_id_put(self):\n pass", "def calculate_vars(self):\n pass", "def setup_vars(self):\n # Add Full time positions\n self.manager_id = self._add_person(\"Manager\", \"ARRAY['Database', 'OS', 'AI']\", 30)\n self.admin_id = self._add_person(\"Admin\", salary=40)\n self.full_instructor_id = self._add_person(\n \"Instructor\", \"ARRAY['Database']\", 20\n )\n\n # Add Part time instructor\n self.part_instructor_id = self._add_part_time_instr(\"ARRAY['OS']\", 10)\n self.part_instructor_id = self._add_part_time_instr(\"ARRAY['AI']\", 10)\n\n # Add courses\n self.course_id1 = self._add_course(\"Database\", 1, \"Database\")\n self.course_id2 = self._add_course(\"OS\", 1, \"OS\")\n self.course_id3 = self._add_course(\"AI\", 1, \"AI\")\n\n # Add room\n self.room_id = self._add_room(1, 'Test room', 20)\n self.room_id2 = self._add_room(2, 'Test room 2', 20)\n\n # Add course offerings\n self.course_offering1 = self._add_course_offering('2021-01-21', 10, [('2021-06-21', 9, self.room_id), ('2021-06-21', 11, self.room_id)], '2021-05-31', 20, self.course_id1, self.admin_id)\n self.course_offering2 = self._add_course_offering('2021-01-21', 10, [('2021-06-22', 9, self.room_id), ('2021-06-22', 11, self.room_id)], '2021-05-31', 20, self.course_id2, self.admin_id)\n self.course_offering3 = self._add_course_offering('2021-01-21', 10, [('2021-06-22', 9, self.room_id2), ('2021-06-22', 11, self.room_id2)], '2021-05-31', 20, self.course_id3, self.admin_id)\n\n # Add customers\n self.customer_id1 = self._add_customer('Test1', \"test\", 987654321, 'test@test.com', '1234123412341234', '123', '2025-05-31')\n self.customer_id2 = self._add_customer('Test2', \"test\", 987654321, 'test@test.com', '1234123412341235', '123', '2025-05-31')\n self.customer_id3 = self._add_customer('Test3', \"test\", 987654321, 'test@test.com', '1234123412341236', '123', '2025-05-31')\n\n # Register sessions\n self._register_credit_card('2021-01-21', self.course_id1, 1, self.customer_id1)\n self._register_credit_card('2021-01-21', self.course_id1, 1, self.customer_id2)\n self._register_credit_card('2021-01-21', self.course_id1, 1, self.customer_id3)\n\n # Add course packages\n self.package1 = self._add_course_package(\"Best Package\", 2, '2021-03-01', '2021-08-02', 50)\n self.package2 = self._add_course_package(\"Medium Package\", 2, '2021-03-01', '2021-08-02', 100)\n self.package3 = self._add_course_package(\"Worst Package\", 2, '2021-03-01', '2021-08-02', 150)\n\n # Buy course packages\n self._buy_package(self.customer_id1, self.package1)\n self._buy_package(self.customer_id2, self.package2)\n self._buy_package(self.customer_id3, self.package3)\n\n # Redeem sessions\n self._register_redeems('2021-01-21', self.course_id2, 1, self.customer_id1)\n self._register_redeems('2021-01-21', self.course_id2, 1, self.customer_id2)\n self._register_redeems('2021-01-21', self.course_id2, 1, self.customer_id3)\n\n # Cancel registrations\n self._cancel_registration(self.customer_id1, self.course_id1)\n self._cancel_registration(self.customer_id2, self.course_id2)", "def init_consistent_qa_variables(self):\n return tuple()", "def read_variables(self, dataset):\n if 'variables' in self.configs:\n for variable_name, variable_dict in self.configs['variables'].items():\n if variable_name not in dataset.variables:\n temp_var = dataset.createVariable(variable_name, self.configs['variables'][variable_name]['data_type'])\n temp_var[:] = self.configs['variables'][variable_name]['value']\n \n for key, value in variable_dict.items():\n if (key != 'data_type') and (key != 'value'):\n setattr(temp_var, key, value)", "def _init_tkvars(self,PO):\n for name,param in PO.params().items():\n self._create_tkvar(PO,name,param)", "def tests(self):\n\n return {\n 'variable_boolean': self.variable_boolean\n }", "def generate_variable_names():\n while True:\n name = uuid.uuid4()\n yield f\"_{name.hex}\"", "def create_variables(self, courses):\n has_sections = isinstance(courses, dict)\n for course in courses:\n self.p.add_variable(course, courses.get(course, []) if has_sections else self.get_sections(course))", "def init_vars(self):\n # type: () -> None\n raise NotImplementedError", "def test_all_variables_parsed(self):\n uris = [\n \"https://api.github.com\",\n \"https://api.github.com/users{/user}\",\n \"https://api.github.com/repos{/user}{/repo}\",\n \"https://api.github.com/repos{/user}{/repo}/issues{/issue}\",\n ]\n\n for i, uri in enumerate(uris):\n t = URITemplate(uri)\n self.assertEqual(len(t.variables), i)", "def test_by_variable():\n pass", "def generate_environment(dataset, tmpdir):\n\n print(\">>> Test environment:\")\n print(\"dataset:\", dataset)\n print(\"tmpdir:\", tmpdir)\n\n generate_test_environment(tmpdir, dataset)\n\n return { 'dataset': dataset, 'tmpdir': tmpdir }", "def setUp(self):\r\n self.full_id = 'edX/full/2012_Fall'\r\n self.toy_id = 'edX/toy/2012_Fall'", "def test_variables_id_get(self):\n pass", "def create_test_data(self):\n fake = Faker(['en_US', 'ja_JP', 'el_GR', 'de_DE'])\n\n self.actor_request = {\n 'name': fake.name(),\n 'age': random.randint(22, 88),\n 'gender': random.choice(['M', 'F'])\n }\n\n self.movie_request = {\n 'title': fake.color_name() + ' ' + fake.street_suffix(),\n 'releaseDate': str(fake.date_between())\n }\n\n self.actor_update_request = {\n 'name': fake.name(),\n }\n\n self.movie_update_request = {\n 'title': fake.color_name() + ' ' + fake.street_suffix(),\n }\n\n for _ in range(30):\n actor_name = fake.name()\n actor_age = random.randint(22, 88)\n actor_gender = random.choice(['M', 'F'])\n\n movie_title = fake.color_name() + ' ' + fake.street_suffix()\n movie_release_date = str(fake.date_between())\n\n actor = Actor(actor_name, actor_age, actor_gender)\n actor.insert()\n\n movie = Movie(movie_title, movie_release_date)\n movie.insert()\n\n for _ in range(20):\n actors = Actor.query.all()\n movies = Movie.query.all()\n\n actor_to_update = random.choice(actors)\n movie_to_update = random.choice(movies)\n actor_to_update.movies.append(movie_to_update)", "def test_variables(self):\n with HTTMock(spark_cloud_mock):\n self.assertEqual(self.device.variables, self.cloud_device.variables)", "def prepare_vars_and_flows(self):\n\n # clear previously populated vars dictionary\n self.vars.clear()\n\n # calculate vars and flows sequentially\n self.calculate_scaleup_vars()\n self.calculate_vars()\n self.calculate_flows()", "def _initialize_var(self, variables):\n self.var_list = {}\n for var in variables:\n self.var_list[var.name] = tf.Variable(var.value, trainable=False, dtype=var.dtype)", "def _create_variables(self, n_features, n_classes):\n\n self.W_ = tf.Variable(tf.zeros([n_features, n_classes]), name='weights')\n self.b_ = tf.Variable(tf.zeros([n_classes]), name='biases')", "def read_vars(self, vars):\n fields = {}\n for var in vars:\n try:\n fields[var] = Variable(self.template, var)[:]\n except:\n if var == 'NN':\n fields[var] = self.brunt_vaisalla()\n elif var == 'KE':\n fields[var] = self.kinetic_energy()\n elif var == 'Ep':\n fields[var] = self.potential_energy()\n elif var == 'none':\n fields[var] = np.ones(self.params['global_shape'])\n elif var == 'APE':\n fields[var] = self.available_potential_energy()\n elif var == 'Eb':\n fields[var] = self.background_potential_energy()\n elif var == 'test':\n fields[var] = self.test()\n elif var == 'p_mean':\n fields[var] = self.mean_pressure()\n elif var == 'Q_times_z':\n fields[var] = self.E_2()\n elif var == 'br_times_z':\n fields[var] = self.E_1()\n elif var == 'phi_z':\n fields[var] = self.buoyancy_flux()\n elif var == 'phi_b':\n fields[var] = self.buoyancy_forcing()\n elif var == 'pr':\n fields[var] = self.backgroud_pressure()\n\n if var == 'u':\n fields[var] = fields[var]/self.params['dx']\n elif var == 'v':\n fields[var] = fields[var]/self.params['dy']\n elif var == 'w':\n fields[var] = fields[var]/self.params['dz']\n\n return fields", "def setUp(self) -> None:\n\n self.helper = EnvironmentVariableHelper()\n\n self.test_name = \"PYFUNCEBLE_TESTING\"\n self.temp_env_file = tempfile.NamedTemporaryFile(\"w\", delete=False)", "def setupVariables(self, file, variables, wordsize):\n file.write(self.getStringForVariables(variables, wordsize) + '\\n')\n return", "def init_locals(self):\n pass", "def write_environ(test):\n\n os.environ['TS_BASEDIR'] = test.basedir\n os.environ['TS_CONFIG_NL'] = test.conf.config_nl\n os.environ['TS_NL_TS_SWITCH'] = test.conf.nl_ts_switch\n os.environ['TS_DT_FILE'] = test.conf.dt_file\n os.environ['TS_REFOUTDIR'] = test.refoutdir\n os.environ['TS_VERBOSE'] = str(test.options.v_level)\n os.environ['TS_RUNDIR'] = test.rundir\n os.environ['TS_LOGFILE'] = test.log_file\n os.environ['TS_NAMELISTDIR'] = test.namelistdir\n os.environ['TS_TOLERANCE'] = test.tolerance\n os.environ['TS_FORCEMATCH'] = str(test.forcematch)\n os.environ['TS_TUNING_ITERATIONS'] = str(test.options.tuning_iterations)\n os.environ['TS_TUNE_THRESHOLDS'] = str(test.options.tune_thresholds)\n os.environ['TS_RESET_THRESHOLDS'] = str(test.options.reset_thresholds)\n os.environ['TS_ICON'] = str(test.options.icon)\n os.environ['TS_YUFILE'] = test.conf.yufile", "def _create(self, variables):\n required_vars = ['container']\n variables_dict = self._get_vars(variables, required=required_vars)\n\n container_name = variables_dict.pop('container')\n container_data = self._create_container(container_name=container_name)\n\n if not container_data:\n container_data = self.swift.head_container(container_name)\n\n return self._facts(facts=[container_data])", "def setUp(self):\n\n self.directory = tempfile.mkdtemp(dir=os.getcwd())\n spirv_args = self.test.spirv_args\n # Instantiate placeholders in spirv_args\n self.test.spirv_args = [\n arg.instantiate_for_spirv_args(self)\n if isinstance(arg, PlaceHolder) else arg for arg in self.test.spirv_args\n ]\n # Get all shader files' names\n self.inputs = [arg for arg in spirv_args if isinstance(arg, PlaceHolder)]\n self.file_shaders = [arg.filename for arg in self.inputs]\n\n if 'environment' in get_all_variables(self.test):\n self.test.environment.write(self.directory)\n\n expectations = [\n v for v in get_all_variables(self.test)\n if v.startswith(EXPECTED_BEHAVIOR_PREFIX)\n ]\n # Instantiate placeholders in expectations\n for expectation_name in expectations:\n expectation = getattr(self.test, expectation_name)\n if isinstance(expectation, list):\n expanded_expections = [\n element.instantiate_for_expectation(self)\n if isinstance(element, PlaceHolder) else element\n for element in expectation\n ]\n setattr(self.test, expectation_name, expanded_expections)\n elif isinstance(expectation, PlaceHolder):\n setattr(self.test, expectation_name,\n expectation.instantiate_for_expectation(self))", "def setUp(self):\n patientgen = PatientsGenerator(0, 1, 0, 'a')\n self.record = patientgen.data.find('record')\n self.gender_sex = patientgen.gender_sex_list\n self.ethnicities = patientgen.ethnicity_list\n # self.female_names = patientgen.data_generator.first_names_female\n # self.male_names = patientgen.data_generator.first_names_male\n # self.last_names = patientgen.data_generator.last_names", "def test_set_name_through_init(self) -> None:\n\n given = self.test_name\n expected = given\n\n helper = EnvironmentVariableHelper(given)\n actual = helper.name\n\n self.assertEqual(expected, actual)", "def setUp(self):\n self.name = \"test_a\"\n self.city = \"test_city\"\n self.studio = Studio(name=self.name, city=self.city)", "def make_control_knowledge_variables(self, horizon):\n # You might want to save your variables here, or feel free to make as\n # many data structures as you need to keep track of them.\n\n self.control_fluent_codes = {}\n\n \"\"\" *** YOUR CODE HERE *** \"\"\"\n\n # DID NOT DEFINE ANY EXTRA VARIABLES, ALL DONE IN THE METHOD BELOW", "def test_variables(self):\n self._api.SetVariable(\"debug_file\", \"/dev/null\")\n self.assertEqual(self._api.GetVariableAsString(\"debug_file\"), \"/dev/null\")", "def _init_vars(self):\n if not self._has(\"vars\"):\n if self._has(\"p\"):\n self._.vars = self._.p.variables()\n elif self._has(\"q\"):\n self._.vars = self._.q.variables()\n elif self._has(\"P\"):\n self._.vars = variables(self._.P)\n elif self._has(\"Q\"):\n self._.vars = variables(self._.Q)\n self._.vars_ordered = len(self._.vars) <= 1", "def test_variables(x, y, z):\n a = x * y\n b = y * a\n c = a + b\n return c / z", "def __init__(self, variables):\n self._variables = variables", "def _create_variables(self):\n\n \n with tf.name_scope(\"variable\"):\n if self.reg_type == 'L2':\n regularizer = tf.contrib.layers.l2_regularizer(scale=self.reg_scale)\n else:\n regularizer = tf.contrib.layers.l1_regularizer(scale=self.reg_scale)\n \n self.dim_lst = [self.dim_inputs] + self.dim_hidden_lst + [self.number_structures]\n print(self.dim_lst)\n \n self.W_lst = []\n self.b_lst = []\n for i in range(len(self.dim_lst)-1):\n self.W_lst.append(tf.get_variable(\n \"W{}\".format(i+1),\n shape=[self.dim_lst[i], self.dim_lst[i+1]],\n initializer=tf.contrib.layers.xavier_initializer(),\n regularizer=regularizer)\n )\n # not output layer, has bias term\n if i < len(self.dim_lst) - 2:\n self.b_lst.append(tf.get_variable(\"b{}\".format(i+1), shape=[self.dim_lst[i+1]]))", "def make_env():\n return {\n 'init': init,\n 'step': step,\n 'is_terminal': is_terminal,\n 'state_as_example': state_as_example,\n }", "def setUp(self):\n\n self.test_subject = {\n \"Hello\": \"world\",\n \"World\": {\"world\": \"hello\"},\n \"funilrys\": [\"Fun\", \"Ilrys\"],\n \"Py\": \"Funceble\",\n \"pyfunceble\": [\"funilrys\"],\n }", "def test_documentation_popxl_rts_var(self):\n filename = \"rts_var.py\"\n self.run_python(filename, file_dir=working_dir, working_dir=working_dir)", "def __init__(self):\n self.name = ''\n self.variables = []\n self.assumptions = []\n self.guarantees = []", "def create_variables(self):\n self.create_weight_variable(self.input_size + [self.hidden_size[0]], name=\"W1\")\n\n self.create_bias_variable((1, self.hidden_size[0]), name=\"b1\")\n\n for i in range(self.n_hidden-1):\n self.create_weight_variable([self.hidden_size[i], self.hidden_size[i+1]], \n name=\"W\"+str(i+2))\n\n self.create_bias_variable((1, self.hidden_size[i+1]), name=\"b\"+str(i+2))\n\n for i in range(len(self.output_size)):\n self.create_weight_variable([self.hidden_size[-1], self.output_size[i]], name=\"Wo_%s\"%i)\n\n self.create_bias_variable((1, self.output_size[i]), name=\"bo_%s\"%i)", "def setUp(self):\n self.user = {\n INPUT: \"12345\",\n }", "def set_variable_value():\n\n mp_rp_conf_file = 'entitlement-tests/CCI/ReportPortal/mp_rp_conf.json'\n \n # 1. Set project name which is just the test product name with upper case letter\n cmd = \"sed -i -e 's/PROJECT_NAME/{0}/g' {1}\".format(test_product.upper(), mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 2. Set launch name\n # Launch name examples - Errata-49798_RHEL7_Server_x86_64_Full_ProdCDN; Errata-53717_RHEL8_x86_64_Full_ProdCDN\n cmd = \"sed -i -e 's/LAUNCH_NAME/{0}/g' {1}\".format(get_launch_name(), mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 3. Set variables value in description of launch\n # a) Set Errata url in description of launch\n errata_url = \"[{0}](https:\\/\\/errata.devel.redhat.com\\/advisory\\/{1})\".format(errata_id, errata_id)\n cmd = \"sed -i -e 's/ERRATA_URL/{0}/g' {1}\".format(errata_url, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # b) Set jenkins job url in description of launch\n build_id = build_url.strip('/').split('/')[-1]\n build_url_str = \"[{0}]({1})\".format(build_id, build_url.replace(\"/\",\"\\/\"))\n \n cmd = \"sed -i -e 's/BUILD_URL/{0}/g' {1}\".format(build_url_str, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 4. Set launch tag\n # Tag examples - OpenStack16; Ceph3; CNV2\n cmd = \"cat product_version.txt\"\n (ret, output) = commands.getstatusoutput(cmd)\n \n cmd = \"sed -i -e 's/LAUNCH_TAG/{0}{1}/g' {2}\".format(test_product, output, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def test_variational():\n # iris\n #pres = \"Test pour le data set Iris (facile, classique)\"\n #test_from_func_variational(pres, 15, 10, 3, True, Iris)\n\n # breast cancer\n pres = \"Test pour le data set Breast Cancer (facile, classique)\"\n test_from_func_variational(pres, 15, 10, 3, True, Breast_cancer)\n\n # digits\n # pres = \"Test pour le data set Digits (difficile, classique)\"\n # test_from_func(pres, 10, 10, 10, True, Digits, quantum_instance)\n\n # wine\n # pres = \"Test pour le data set Wine (moyen, classique)\"\n # test_from_func(pres, 15, 10, 5, True, Wine, quantum_instance)\n\n # gaussian\n pres = \"Test pour des données gaussiennes (moyen, classique)\"\n for _ in range(1):\n print(\"\\n\")\n print(\"New iteration\")\n test_from_func_variational(pres, 25, 10, 2, True, Gaussian)\n print(\"\\n\")\n\n # small adn strings\n pres = \"Test pour des séquences ADN courtes (difficile, classique)\"\n test_from_func_variational(pres, 10, 15, 14, True, Sequence)\n\n #Quantum data\n pres = \"Test pour des données générées par ordinateur quantique (facile, quantique)\"\n print(pres)\n _, samp_train, samp_test, labels = ad_hoc_data(15, 10, 2, 0.3, True)\n sample_m, sample_p = stock_get(20, 0.3)\n\n labels_me = [-1, 1]\n samp_train_me = {-1: np.array(sample_m[:15]), 1: np.array(sample_p[:15])}\n samp_test_me = {-1: np.array(sample_m[15:]), 1: np.array(sample_p[15:])}\n print(samp_train)\n print(samp_train_me)\n print(samp_test)\n print(samp_test_me)\n\n my_impl_variational(samp_train, samp_test, labels)\n print(\"Pour autres données quantiques\")\n my_impl_variational(samp_train_me, samp_test_me, labels_me)", "def create_variables(self, request):\n\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.body_wrapper import BodyWrapper\n\t\texcept Exception:\n\t\t\tfrom .body_wrapper import BodyWrapper\n\n\t\tif request is not None and not isinstance(request, BodyWrapper):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables'\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_POST)\n\t\thandler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)\n\t\thandler_instance.set_content_type('application/json')\n\t\thandler_instance.set_request(request)\n\t\thandler_instance.set_mandatory_checker(True)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def _create_data():\n tf.logging.info(\"Create records..\")\n train, val, test = util.load_data(data_dir, FLAGS[\"is_aug\"])\n tf.logging.info(\"Dataset size: Train-{} Test-{} Val-{}\".format(len(train), len(test), len(val)))\n return train, val, test", "def setvariables(self, request, contextvars, thevars):\n postdata = {}\n if request.POST:\n postdata = dict(request.POST.dict())\n for var in thevars:\n if postdata.get(\"custom_\"+var):\n contextvars[var] = postdata.get(\"custom_\"+var)\n else:\n try:\n contextvars[var] = thevars[var]\n except Exception:\n pass\n return contextvars", "def _build_terraform_vars(self, env):\n # Overload Project._build_terraform_vars() \n # Initialize terraform variables with common values\n self._init_terraform_vars(env)\n\n # Configure project specific terraform variables\n ra = self.reference_architecture[env.reference_architecture]\n pg = env.cloud_spec['postgres_server']\n os = env.cloud_spec['available_os'][env.operating_system]\n guc = TPROCC_GUC\n\n self.terraform_vars.update({\n 'gcloud_image': os['image'],\n 'gcloud_region': env.gcloud_region,\n 'gcloud_credentials': env.gcloud_credentials.name if env.gcloud_credentials else None,\n 'gcloud_project_id': env.gcloud_project_id,\n 'guc_effective_cache_size': guc[env.shirt]['effective_cache_size'],\n 'guc_max_wal_size': guc[env.shirt]['max_wal_size'],\n 'guc_shared_buffers': guc[env.shirt]['shared_buffers'],\n 'pg_password': get_password(self.project_path, 'postgres'),\n 'pg_version': env.postgres_version,\n })\n self.terraform_vars['postgres_server'].update({\n 'instance_type': pg.get('instance_type', ''),\n 'volume': pg.get('volume', {}),\n 'count': 0 # do not create since it will be an cloudsql instance\n })\n\n # set variables for use with edbterraform\n if not self.terraform_vars.get(self.cloud_provider):\n self.terraform_vars[self.cloud_provider] = dict()\n # Setup cloudsql\n settings = list()\n settings.extend(self.database_settings(env))\n databases = dict({\n \"postgres\": {\n 'public_access': True,\n 'region': self.terraform_vars['gcloud_region'],\n 'engine': \"postgres\",\n 'engine_version': env.postgres_version,\n 'dbname': 'dbname',\n 'instance_type': self.terraform_vars['postgres_server']['instance_type'],\n 'volume': {\n 'size_gb': self.terraform_vars['postgres_server']['volume'].get('size'),\n 'type': self.terraform_vars['postgres_server']['volume'].get('type'),\n 'iops': self.terraform_vars['postgres_server']['volume'].get('iops'),\n 'encrypted': False,\n },\n 'username':'postgres',\n 'password': get_password(self.project_path, 'postgres'),\n 'settings': settings,\n 'port': 5432,\n 'tags': {\n 'type': 'postgres_server',\n 'priority': 0,\n 'index': 0,\n 'postgres_group': 'postgres_server',\n 'replication_type': self.terraform_vars['replication_type'] if self.terraform_vars.get('replication_type') else 'unset',\n 'pooler_type': self.terraform_vars['pooler_type'] if self.terraform_vars.get('pooler_type') else 'pgbouncer',\n 'pooler_local': self.terraform_vars.get('pooler_local', False),\n }\n },\n })\n self.terraform_vars[self.cloud_provider]['databases'] = databases\n\n gcloud_cli = CloudCli(self.cloud_provider, bin_path=self.cloud_tools_bin_path)\n if not self.terraform_vars.get(self.cloud_provider):\n self.terraform_vars[self.cloud_provider] = dict()\n self.terraform_vars['created_by'] = gcloud_cli.cli.get_caller_info()\n # ports needed\n self.terraform_vars['service_ports'], self.terraform_vars['region_ports'] = super()._get_default_ports()\n self.terraform_vars['image'] = dict({\n 'name': self.terraform_vars['gcloud_image'],\n 'ssh_user': self.terraform_vars['ssh_user'],\n })\n self.terraform_vars['region'] = self.terraform_vars['gcloud_region']\n # create a set of zones from each machine's instance type and region availability zone \n instance_types: set = {\n values['instance_type'] for key, values in self.terraform_vars.items()\n if any(substr in key for substr in ['dbt2_client', 'dbt2_driver', '_server']) and\n isinstance(values, dict) and\n values.get('count', 0) >= 1 and \n values.get('instance_type') and\n key != 'postgres_server' # skip postgres server since sql is set above\n }\n filtered_zones: set = {zone for instance in instance_types for zone in gcloud_cli.check_instance_type_availability(instance, self.terraform_vars['gcloud_region'])}\n filtered_zones.intersection_update(gcloud_cli.cli.get_available_zones(self.terraform_vars['gcloud_region']))\n self.terraform_vars['zones'] = list(filtered_zones)\n databases['postgres']['zone'] = self.terraform_vars['zones'][-1]", "def setUp(self):\n\t\tself.user = create_user()\n\n\t\tself.school_name = 'My Recent School'\n\t\tself.course_name = 'My Course Name'\n\t\tself.start_date = timezone.now()\n\t\tself.end_date = timezone.now() + timedelta(days=365)\n\t\tself.grade_obtained = 'My Grade'", "def test_basic_setup(self):\n random_vars = ['D', 'I', 'G', 'S', 'L']\n\n for rv in random_vars:\n self.assertTrue(rv in self.Gs.nodes)\n self.assertTrue(isinstance(self.Gs.nodes[rv], DiscreteNetworkNode))", "def mk_vars_file(work_dir, server_cfg, provider_name):\n f = open(work_dir + '/vars', 'w')\n f.write('# generated by pentaho_cloud')\n if (server_cfg.ssl):\n f.write('\\nssl=1')\n else:\n f.write('\\nssl=0')\n if server_cfg.passwords:\n for i, p in enumerate(server_cfg.passwords):\n f.write(\"\\npasswords[%d]='%s'\" % (i, p))\n packages = vers[server_cfg.version]\n for k in packages.keys():\n f.write(\"\\n%s='%s'\" % (k, packages[k]))\n f.write(\"\\nprovider='%s'\" % provider_name)\n f.close()\n return f.name", "def test_generate_all_testing(self):\n pass", "def setUp(self):\n self.t = True\n self.f = False\n self.value = 25", "def setUp(self):\n self.data = {'username': 'seiph',\n 'first_name': 'Jean',\n 'last_name': 'Robert',\n 'email': 'jbr@aol.com',\n 'password1': 'kevin1234',\n 'password2': 'kevin1234'}", "def __init__(self, name, variable, variable_info):\n self._name = name\n self.var_id = variable\n self.var_period = variable_info[0]\n self.var_type = variable_info[1]\n self.var_detail = variable_info[2]\n self.var_units = variable_info[3]\n self.var_icon = variable_info[4]\n self.var_state = None", "def _initialize_project_variables(self):\n self.Source = ''\n self.Regional = ''\n self.Vernacular = ''\n self.Fallback = dict()\n self.New_Target = dict()\n self.Biblical_Terms = dict()\n self.Old_Target = dict()\n\n# self.list_projects = []\n# self.project_lines = []\n# self.indent = 0\n# self.Treed = False\n self.root = etree.Element('root')\n# #add child 'settings', all user configurable bits under here\n self.settings = etree.SubElement(self.root, \"settings\")\n# self.old_mode = dict()\n# self.spreferred = etree.SubElement(self.settings, \"preferred\")\n# self.smode = etree.SubElement(self.settings, \"mode\")\n# self.stemp = etree.SubElement(self.settings, \"template\")\n self.sf0 = etree.SubElement(self.settings, \"f0\")\n self.sf1 = etree.SubElement(self.settings, \"f1\")\n self.sf2 = etree.SubElement(self.settings, \"f2\")\n self.trout = etree.SubElement(self.root, \"tree\")", "def generate_temp_variable_name():\n counter = 0\n while True:\n counter += 1\n yield f\"t_{counter}\"", "def __init__(self):\n self.variables = [] # List of all variables in certain scope.\n self.field_id = 0 # Id of next field varibale.\n self.argumen_id = 0 # Id of next argument variable.\n self.local_id = 0 # Id of next local variable.\n self.static_id = 0 # Id of next static variable.", "def _prepare_test_cases(ptfhost, request):\n logger.info(\"Preparing SAI test environment.\")\n _create_sai_test_folders(ptfhost)\n _copy_sai_test_cases(ptfhost, request)", "def check_template_variables(subject, vars):\n for var in vars:\n expect(subject).to(match(r'\\{\\{cookiecutter\\.' + var + '\\}\\}'))", "def setUp(self):\n self.test_cube = set_up_percentiles_cube()\n self.new_name = \"probability\"", "def newTemp():\n global varSeq\n toRet = 'var'+str(varSeq)\n varSeq += 1\n scopeDict[currScope].insert(toRet,\"temporary\")\n return toRet", "def setUp(self):\n assert COMMANDS.keys() == EXPCT_RESULTS.keys()\n self.tests = []\n self.test_numbers = deque(sorted(COMMANDS.keys()))", "def fixtures():", "def setUpTestData(cls):\n countries = [\"MX\", \"CHL\", \"USA\", \"PER\", \"COL\"]\n slack_user_ids = [\"UP0918MAV\", \"UP0918MAV\", \"UP0918MAV\", None, None]\n cls.menu = Menu.objects.create(available_on=date.today())\n for count in range(5):\n user = User.objects.create(username=f\"johny.doe {count}\")\n Employee.objects.create(\n user=user, country=countries[count], slack_user_id=slack_user_ids[count]\n )", "def record_variable_inits(self):\n old_init = getattr(variables.Variable, '__init__')\n\n def record(*args, **kwargs):\n self._in_variable_creation = True\n old_init(*args, **kwargs)\n self._in_variable_creation = False\n\n setattr(variables.Variable, '__init__', record)\n yield\n setattr(variables.Variable, '__init__', old_init)", "def setUpTestData(cls):\n cls.user = UserFactory()\n cls.auth = AuthFactory()\n\n cls.device = TOTPDevice.objects.create(user=cls.user)\n cls.relate = TOTPDevice.challenge.objects.create(\n device=cls.device, token=cls.auth\n )\n\n cls.algorithm = TOTPAlgorithm()", "def standard_variables(self):\n\t\tstd_vars = {\n\t\t\t'time': {\n\t\t\t\t'local': datetime.datetime.now(),\n\t\t\t\t'utc': datetime.datetime.utcnow()\n\t\t\t}\n\t\t}\n\t\treturn std_vars", "def test_variable_assign(self):\n self.trace('x = 1')\n\n events = self.variable_events\n self.assertEqual(len(events), 1)\n event = events[0]\n self.assertIsInstance(event, TraceAssign)\n self.assertEqual(event.name, 'x')\n self.assertEqual(event.value, 1)", "def build_extra_vars_file(self, instance, private_data_dir):", "def regenerate_variables(self):\n\n # Let us not forget to remove fields that might be empty by now\n if hasattr(self, '_var_kinds'):\n for k in self._var_kinds:\n attrname = camel2underscores(k)\n try:\n delattr(self, attrname)\n except AttributeError:\n pass # The attribute may not have been set up yet\n\n _var_kinds = defaultdict(DictList)\n for k, v in self._var_dict.items():\n _var_kinds[v.__class__.__name__].append(v)\n\n for k in _var_kinds:\n attrname = camel2underscores(k)\n setattr(self, attrname, _var_kinds[k])\n\n self._var_kinds = _var_kinds", "def setUp(self):\n self.iv1 = Interval(1, 10)\n self.iv2 = Interval(5, 15)\n self.iv1_r = Interval(10, 1)\n self.iv2_r = Interval(15, 5)\n self.iv3 = Interval(3, 8)\n self.iv4 = Interval(11, 20)", "def setUp(self):\n\n self.data_list = [\n \"hello\", \"world\", \"funilrys\", \"funceble\", \"PyFunceble\", \"pyfunceble\"\n ]\n self.data = \"Hello, this is Fun Ilrys. I just wanted to know how things goes around the tests.\" # pylint: disable=line-too-long", "def test_create10(self):\n pass", "def __create_test_environment(self):\n os.chdir(self.wd)\n temp_dir = tempfile.gettempdir()\n self.test_root = os.path.join(temp_dir, \"test-grpc\")\n print(\"Creating testing environment in {}\".format(self.test_root))\n if os.path.exists(self.test_root):\n # delete any previous environment\n shutil.rmtree(self.test_root)\n # create root directory\n os.makedirs(self.test_root)\n def copy_app(name):\n app_root = os.path.join(self.test_root, name)\n os.makedirs(app_root)\n filename = \"grpc-{}\".format(name)\n src = os.path.join(self.args.bin, filename)\n dst = os.path.join(app_root, filename)\n shutil.copy(src, dst)\n return dst\n # copy client and server into the new test environment\n self.server_path = copy_app(\"server\")\n self.client_path = copy_app(\"client\")", "def test_vars_generator(self):\n iterator = vars_generator()\n\n for char_number in range(ord('a'), ord('z') + 1):\n self.assertEqual(next(iterator), chr(char_number))\n self.assertEqual(next(iterator), 'aa')" ]
[ "0.68591744", "0.67279863", "0.6680693", "0.6607811", "0.65469337", "0.6510623", "0.642608", "0.64156944", "0.633385", "0.62405956", "0.6165352", "0.6113072", "0.59246886", "0.5924667", "0.5894547", "0.58901113", "0.588886", "0.58794427", "0.58783376", "0.58669263", "0.58664006", "0.58203924", "0.5810067", "0.5785507", "0.5725505", "0.57221836", "0.5681816", "0.5678653", "0.5675751", "0.5658942", "0.5646168", "0.56419003", "0.5636056", "0.5634291", "0.56313866", "0.5617503", "0.5612004", "0.5609945", "0.56087804", "0.557306", "0.55600744", "0.5559799", "0.55575126", "0.55573624", "0.5549722", "0.5547504", "0.5546446", "0.55421966", "0.55401564", "0.55378723", "0.5529253", "0.5528039", "0.5522514", "0.55147725", "0.54956084", "0.54923695", "0.54901916", "0.5489398", "0.5485426", "0.54848325", "0.5484552", "0.5481638", "0.54646176", "0.54646176", "0.54646176", "0.54646176", "0.54646176", "0.54646176", "0.5454835", "0.54487467", "0.54477227", "0.5444531", "0.5438027", "0.5435746", "0.54356337", "0.5433579", "0.54326713", "0.54297775", "0.5429658", "0.54289985", "0.54283375", "0.5423424", "0.5411942", "0.5411368", "0.54085785", "0.5408518", "0.5407928", "0.5404371", "0.54040945", "0.5399354", "0.5397473", "0.53940606", "0.5392619", "0.5380708", "0.5379468", "0.53767747", "0.5376527", "0.536044", "0.5359206", "0.53591686", "0.5356356" ]
0.0
-1
Test that writing happens correctly and a file is generated.
def test_self_write(self): self.assertFalse(os.path.exists(self.f1)) self.assertFalse(os.path.exists(self.f2)) self.sync.pickle_write() self.assertTrue(os.path.exists(self.f1)) self.assertTrue(os.path.exists(self.f2))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_writer_with_file():\n outputfile = \"testfile.txt\"\n GCMT(write=outputfile)\n assert os.path.exists(outputfile)\n os.remove(outputfile)", "def test_write_file():\n filename = 'test'\n content = 'hello!'\n\n write_file(content, filename)\n assert read_file(filename) == 'hello!'", "def test_cannot_write_file(self):\n self.api.write_data('/some-fake/path/to-create-file/', 'some-string')", "def test_to_file(self):\n fd, fp = mkstemp()\n close(fd)\n st = SampleTemplate.create(self.metadata, self.new_study)\n st.to_file(fp)\n self._clean_up_files.append(fp)\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE)\n\n fd, fp = mkstemp()\n close(fd)\n st.to_file(fp, {'2.Sample1', '2.Sample3'})\n self._clean_up_files.append(fp)\n\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE_FEWER_SAMPLES)", "def test1_write():\n with open(FILE_DIR + FILE_NAME, mode='w', encoding='utf-8') as f:\n f.write(DATA)", "def test_write_file(self):\n test = Server()\n test.cur_dir = os.getcwd()\n inputs = [['write_file', 'test_file1.txt', 'Hello world'],\n ['write_file', 'test_file2.txt', 'Hello world'],\n ['write_file', 'test_file1.txt']]\n response = ['written successfully',\n 'file created and written successfully',\n 'contents erased successfully']\n res = []\n for val in inputs:\n res.append(test.write_file(val))\n self.assertListEqual(res, response)", "def test_005_write_file(self):\n __test = chess_storage.ChessStorage()\n __test_data = list(range(consts.TEST_LIST_LENGHT))\n __dir_game_saves = os.path.dirname(__file__)\n __dir_game_saves = os.path.join(__dir_game_saves, 'games')\n __dir_game_saves = os.path.join(__dir_game_saves, consts.TEST_FILENAME)\n # pylint: disable = protected-access\n __save_test = __test._ChessStorage__write_file(__dir_game_saves, __test_data)\n # pylint: enable = protected-access\n self.assertEqual(__save_test, consts.ERROR_CODES[\"SUCCESSFULL\"])", "def test_save_to_file(self):\n self.assertFalse(os.path.exists(\"file.json\"))", "def test_file_writer_node(self):\n writer = nodes.FileWriter(filepath='/filepath', safe_file=False)\n channel = FakeChannel(self.loop)\n writer.channel = channel\n msg1 = generate_msg(message_content=\"message_content\")\n with mock.patch(\"builtins.open\", mock.mock_open()) as mock_file:\n result = self.loop.run_until_complete(writer.handle(msg1))\n\n mock_file.assert_called_once_with('/filepath', 'w')\n handle = mock_file()\n handle.write.assert_called_once_with('message_content')\n\n writer2 = nodes.FileWriter(safe_file=False)\n writer.channel = channel\n msg2 = generate_msg(message_content=\"message_content2\")\n msg2.meta['filepath'] = '/filepath2'\n with mock.patch(\"builtins.open\", mock.mock_open()) as mock_file:\n result = self.loop.run_until_complete(writer2.handle(msg2))\n\n mock_file.assert_called_once_with('/filepath2', 'w')\n handle = mock_file()\n handle.write.assert_called_once_with('message_content2')", "def test_outfile():\n\n out_file = random_string() + '.txt'\n try:\n if os.path.isfile(out_file):\n os.remove(out_file)\n\n rv, out = getstatusoutput(f'{prg} -f {repeat} -o {out_file}')\n assert rv == 0\n expected = (f' 1: amigo_repeat.txt\\n'\n f'Wrote 5 gene IDs from 1 file to file \"{out_file}\"')\n assert out == expected\n assert os.path.isfile(out_file)\n exp_repeat = '\\n'.join(\n sorted(\"\"\"\n AT4G14690 AT5G41340 AT5G03720 AT5G12020 AT2G22360\n \"\"\".split()))\n assert open(out_file).read().strip() == exp_repeat.strip()\n\n finally:\n if os.path.isfile(out_file):\n os.remove(out_file)", "def test_write_to_file():\n from scraper import write_to_file\n encoding = 'utf-8'\n write_to_file(TEST_FILE, TEST_CONTENT, encoding)\n assert True", "def test_writing(self):\n with contextlib.closing(logfile.LogFile(self.name, self.dir)) as log:\n log.write(\"123\")\n log.write(\"456\")\n log.flush()\n log.write(\"7890\")\n\n with open(self.path) as f:\n self.assertEqual(f.read(), \"1234567890\")", "def test_write_file_to_disk_str(self):\r\n file_data = 'A' * 100\r\n write_file_to_disk(self.test_file3, file_data)\r\n self.file_contents_is_equal(self.test_file3, file_data)", "def testExampleFileGeneration(ref):\n outdir = ref.tmp_dir\n outpath = os.path.join(outdir, 'file_result.html')\n generate_file(outpath)\n ref.assertTextFileCorrect(outpath, 'file_result.html',\n ignore_substrings=['Copyright', 'Version'])", "def testWriteFiles(self):\n header_files = ['test.h']\n resource_files = ['test.rc']\n source_files = ['test.c']\n\n file_writer = writers.VS2010ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteFiles(source_files, header_files, resource_files)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertTrue(output_data.startswith(b' <ItemGroup>\\r\\n'))\n self.assertTrue(output_data.endswith(b' </ItemGroup>\\r\\n'))", "def testWriteFiles(self):\n header_files = ['test.h']\n resource_files = ['test.rc']\n source_files = ['test.c']\n\n file_writer = writers.VS2008ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteFiles(source_files, header_files, resource_files)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertTrue(output_data.startswith(b'\\t<Files>\\r\\n'))\n self.assertTrue(output_data.endswith(\n b'\\t</Files>\\r\\n'\n b'\\t<Globals>\\r\\n'\n b'\\t</Globals>\\r\\n'))", "def test_writing(self):\n with contextlib.closing(RiggedDailyLogFile(self.name, self.dir)) as log:\n log.write(\"123\")\n log.write(\"456\")\n log.flush()\n log.write(\"7890\")\n\n with open(self.path) as f:\n self.assertEqual(f.read(), \"1234567890\")", "def testWriteLine(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteLine('Line of text')\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = b'Line of text\\r\\n'\n self.assertEqual(output_data, expected_output_data)", "def test_write_file():\n with tempfile.NamedTemporaryFile(delete=False) as t:\n temp_fn = t.name\n try:\n z = XPIManager(temp_fn, mode='w')\n f, d = 'install.rdf', '注目のコレクション'.decode('utf-8')\n z.write(f, d)\n assert z.read(f) == d.encode('utf-8')\n finally:\n os.unlink(temp_fn)", "def test_malformed(self):\n fdesc, fname = tempfile.mkstemp()\n tfile = os.fdopen(fdesc, 'w')\n tfile.write(self.file_str2)\n tfile.close()\n assert_raises(Exception, grades.writers.GradesFile, fname)\n os.unlink(fname)", "def test_write_data(dbh):\n mock_path = '/tmp/test.json'\n if os.path.exists(mock_path):\n os.remove(mock_path)\n assert not os.path.isfile(mock_path)\n assert dbh.write_data(mock_path)\n assert os.path.isfile(mock_path)", "def test_WriteAndFlushFile():\n\n try:\n # Create a directory. Make sure to remove it at the end.\n dirname = tempfile.mkdtemp()\n filename = 'filename.txt'\n text1 = 'The quick brown fox\\n'\n text2 = 'The lazy dog'\n full_path = os.path.join(dirname, filename)\n\n # Open a file and write using both changed methods\n f = prefork.WriteAndFlushFile(full_path, 'w')\n f.write(text1)\n f.writelines(text2)\n f.close()\n\n # Read everything back\n f = open(full_path, 'r')\n data = f.readlines()\n f.close()\n\n assert data[0] == text1\n assert data[1] == text2\n\n finally:\n # Always remove it\n shutil.rmtree(dirname)", "def test_write_to_file(api):\n\ttry:\n\t\twith patch(\"__builtin__.open\", new_callable=mock_open()) as m_open:\n\t\t\twith patch(\"json.dump\") as m_dump:\n\t\t\t\ttest_assertions_for_json_file(m_dump, m_open, api)\n\texcept ImportError:\n\t\twith patch(\"builtins.open\", new_callable=mock_open()) as m_open:\n\t\t\twith patch(\"json.dump\") as m_dump:\n\t\t\t\ttest_assertions_for_json_file(m_dump, m_open, api)", "def test_outfile():\n\n out_file = random_filename()\n if os.path.isfile(out_file):\n os.remove(out_file)\n\n try:\n cmd = f'{prg} --cdhit {cdhit} --proteins {proteins} -o {out_file}'\n rv, out = getstatusoutput(cmd)\n assert rv == 0\n\n assert out == ('Wrote 309 of 220,520 unclustered '\n f'proteins to \"{out_file}\"')\n\n assert os.path.isfile(out_file)\n\n seqs = list(SeqIO.parse(out_file, 'fasta'))\n assert len(seqs) == 309\n\n finally:\n if os.path.isfile(out_file):\n os.remove(out_file)", "def test_file(tmpdir):\n file_path = tmpdir / 'test.txt'\n file_path = file_path.write_binary(b'This is some test data!')\n return file_path", "def test_atomic_write(self):\n with TemporaryDirectory() as tmp:\n fp = os.path.join(tmp, \"asdf.txt\")\n\n # perform an atomic write\n with atomic_write(fp, \"w\") as f:\n assert not os.path.exists(fp)\n tmpfile = f.name\n f.write(\"asdf\")\n\n # ensure tmp file has been deleted\n assert not os.path.exists(tmpfile)\n # ensure file to write to exists\n assert os.path.exists(fp)\n\n # ensure content of destination file is what we expect\n with open(fp) as f:\n self.assertEqual(f.read(), \"asdf\")", "def write_file(self):\n if self._write_file == None:\n return\n\n try:\n out = file(self._write_file, \"w\")\n except IOError, e:\n print e\n sys.exit(1)\n out.writelines(\"A cases\") \n out.close()", "def test_001(compiler, temp_builds_dir):\n filepath = temp_builds_dir.join(\"compiler_write_001\")\n\n content = \"\"\"Some sample latin text\"\"\"\n\n compiler.write_content(content, filepath.strpath)\n\n # Read file to compare\n with io.open(filepath.strpath, \"r\", encoding=\"utf-8\") as f:\n result = f.read()\n\n assert content == result", "def generate_test_txt(name, path):\n with open(path + '/test.txt', 'a') as file:\n file.write('data/test/' + name + '\\n')", "def test_custom_local_output_file_with_overwrite() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n output_file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Create a file in the temporary directory\n with open(output_file_location, \"wb\") as write_file:\n write_file.write(b\"foo\")\n\n # Instantiate an output file\n output_file = PyArrowFileIO().new_output(location=f\"{output_file_location}\")\n\n # Confirm that a FileExistsError is raised when overwrite=False\n with pytest.raises(FileExistsError):\n f = output_file.create(overwrite=False)\n f.write(b\"foo\")\n\n # Confirm that the file is overwritten with overwrite=True\n f = output_file.create(overwrite=True)\n f.write(b\"bar\")\n with open(output_file_location, \"rb\") as f:\n assert f.read() == b\"bar\"", "def test_write(self):\n temp_file = tempfile.mkstemp()[1]\n try:\n with open(temp_file, \"w+\") as fh:\n self.new_manifest.write(fh)\n tools.eq_(self.new_manifest, load_manifest(temp_file))\n finally:\n os.unlink(temp_file)", "def test_write_to_file(self):\n test_filename = \"test_write.cf\"\n cf1 = ConfigFile()\n cf1[\"key1\"] = \"val1\"\n cf1[\"key2\"] = \"val2\"\n cf1[\"key3\"] = \"val3\"\n cf1.write_to_file(filename=test_filename)\n\n cf2 = ConfigFile()\n cf2.load_from_file(filename=test_filename)\n\n self.assertTrue(cf1 == cf2)", "def test_madlib_file_write():\n madlib(input_values)\n file_text = ''\n with open('assets/updated_madlib_text', 'r') as file:\n for line in file:\n file_text += line\n assert file_text == output_text", "def test_custom_local_output_file() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Instantiate the output file\n absolute_file_location = os.path.abspath(file_location)\n output_file = PyArrowFileIO().new_output(location=f\"{absolute_file_location}\")\n\n # Create the output file and write to it\n f = output_file.create()\n f.write(b\"foo\")\n\n # Confirm that bytes were written\n with open(file_location, \"rb\") as f:\n assert f.read() == b\"foo\"\n\n assert len(output_file) == 3", "def test_write_overwrite_delete(self):\n\n expected = \"Hello, World! I'm domain2idna\"\n File(\"hi\").write(expected)\n\n with open(\"hi\") as file:\n actual = file.read()\n\n self.assertEqual(expected, actual)\n\n expected = \"Hello, World! Python is great, you should consider learning it!\"\n File(\"hi\").write(expected, overwrite=True)\n\n with open(\"hi\") as file:\n actual = file.read()\n\n self.assertEqual(expected, actual)\n\n expected = False\n File(\"hi\").delete()\n actual = PyFunceble.path.isfile(\"hi\")\n\n self.assertEqual(expected, actual)", "def test_write_race_results_to_file():\n number = random.randint(1, 3)\n f1.write_race_results_to_file(number)\n with open(f\"results_for_race_{number}.txt\", encoding=\"utf-8\") as opened_file:\n list_of_lines = opened_file.readlines()\n assert len(list_of_lines) == 13", "def test_to_file(self):\n fd, fp = mkstemp()\n close(fd)\n pt = PrepTemplate.create(self.metadata, self.new_raw_data,\n self.test_study, self.data_type)\n pt.to_file(fp)\n self._clean_up_files.append(fp)\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_PREP_TEMPLATE)", "def test_write(self):\n path = os.path.join(self.tmp_dir, 'foo.tfrecord')\n writer = tfrecords_writer.Writer('some spec', path)\n to_write = [\n (1, b'a'), (2, b'b'),\n (3, b'c'),\n (4, b'd'), (5, b'e'),\n (6, b'f'),\n (7, b'g'), (8, b'h'),\n ]\n for key, record in to_write:\n writer.write(key, record)\n with absltest.mock.patch.object(tfrecords_writer, '_get_number_shards',\n return_value=5):\n shards_length = writer.finalize()\n self.assertEqual(shards_length, [2, 1, 2, 1, 2])\n written_files, all_recs = _read_records(self.tmp_dir)\n self.assertEqual(written_files,\n ['foo.tfrecord-0000%s-of-00005' % i for i in range(5)])\n self.assertEqual(all_recs, [\n [b'f', b'e'], [b'b'], [b'a', b'g'], [b'h'], [b'c', b'd'],\n ])", "def test_file_creation(data, logging_file_name):\n create_instance(data, logging_file_name)\n log_file_name = create_file_path(logging_file_name)\n print(log_file_name)\n if data is None or len(data) == 0:\n assert not os.path.exists(log_file_name)\n else:\n assert os.path.exists(log_file_name)", "def __create_test_file(self):\n self.test_file = os.path.join(os.path.dirname(self.server_path), \"data\")\n with open(self.test_file, \"ab+\") as f:\n n_blocks = int(self.args.size) // self.max_block_size\n for i in range(n_blocks):\n f.write(bytearray(os.urandom(self.max_block_size)))\n remaining = int(self.args.size) % self.max_block_size\n if remaining > 0:\n f.write(bytearray(os.urandom(remaining)))\n self.assertEqual(int(self.args.size), os.path.getsize(self.test_file))", "def test_write(self):\r\n\r\n b = BufferedWriter(self.test_fp, buf_size=2)\r\n b.write(\"1\")\r\n content = open(self.test_fp, \"r\").readlines()\r\n self.assertEquals(content, [])\r\n\r\n # still nothing\r\n b.write(\"2\")\r\n content = open(self.test_fp, \"r\").readlines()\r\n self.assertEquals(content, [])\r\n\r\n # finally, buffer is flushed\r\n b.write(\"3\")\r\n content = open(self.test_fp, \"r\").readlines()\r\n self.assertEquals(content, [\"123\"])", "def test_write_to_bug_file_if_good(self):\n mock = Mock(return_value=3)\n\n @write_error_to_file\n def everything_works_without_exceptions():\n mock()\n\n everything_works_without_exceptions()\n self.assertFalse(os.path.isfile(LOGFILENAME))", "def test_write(self):\n with TemporaryDirectoryChanger():\n nhflux.NhfluxStream.writeBinary(self.nhf, \"NHFLUX2\")\n with open(SIMPLE_HEXZ_NHFLUX, \"rb\") as f1, open(\"NHFLUX2\", \"rb\") as f2:\n expectedData = f1.read()\n actualData = f2.read()\n for expected, actual in zip(expectedData, actualData):\n self.assertEqual(expected, actual)", "def testOpenClose(self):\n file_writer = writers.FileWriter()\n\n with test_lib.TempDirectory() as temp_directory:\n filename = os.path.join(temp_directory, 'testfile')\n file_writer.Open(filename)\n\n file_writer.Close()", "def test_basic_mech_write(self):\n\n unit = btmux.parse_from_file(os.path.join(BTMUX_SAMPLE_DIR, 'AS7-D'))\n fobj = StringIO()\n write_to_file(unit, fobj)\n #print fobj.getvalue()\n # TODO: Compare to a golden standard.", "def test_write_file_to_disk(self):\r\n file_data = u'ß' * 100\r\n write_file_to_disk(self.test_file2, file_data)\r\n self.file_contents_is_equal(self.test_file2, file_data)", "def test_file_exists(self):\n with TemporaryDirectory() as tmp:\n # define path to file\n fp = os.path.join(tmp, \"asdf.txt\")\n\n # write atomically to file\n with atomic_write(fp, \"w\") as f:\n f.write(\"asdf\")\n\n # ensure file exists\n assert os.path.exists(fp)\n\n # ensure atomic_write to same file raises an error as it already exists\n try:\n with atomic_write(fp, \"w\") as f:\n f.write(\"asdf\")\n except FileExistsError as e:\n self.assertIsInstance(e, FileExistsError)", "def test_002(compiler, temp_builds_dir):\n filepath = temp_builds_dir.join(\"compiler_write_002\")\n\n content = \"\"\"Some sample unicode text: フランス Furansu\"\"\"\n\n compiler.write_content(content, filepath.strpath)\n\n # Read file to compare\n with io.open(filepath.strpath, \"r\", encoding=\"utf-8\") as f:\n result = f.read()\n\n assert content == result", "def test_call_write_to_file(self):\r\n app = ReferenceRepSetPicker(params={'Algorithm': 'first',\r\n 'ChoiceF': first_id})\r\n app(self.tmp_seq_filepath,\r\n self.tmp_otu_filepath,\r\n self.ref_seq_filepath,\r\n result_path=self.result_filepath)\r\n with open(self.result_filepath) as f:\r\n actual = SequenceCollection.from_fasta_records(parse_fasta(f), DNA)\r\n expected = SequenceCollection.from_fasta_records(\r\n parse_fasta(rep_seqs_reference_result_file_exp.split('\\n')), DNA)\r\n # we don't care about order in the results\r\n self.assertEqual(set(actual), set(expected))", "def testWriteProjects(self):\n solution_project = resources.VSSolutionProject('name', 'file', 'guid')\n\n file_writer = writers.VSSolutionFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteProjects([solution_project])\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertEqual(output_data, b'')", "def test_validate_and_write_emit(req):\n handle = StringIO()\n req.get('http://fake/', text=u'This is a sequence file, honest.')\n r = requests.get('http://fake/')\n output = StringIO()\n config = core.Config()\n config.emit = output.write\n core._validate_and_write(r, handle, 'FAKE', config)\n\n assert output.getvalue() == u'.\\n'\n assert handle.getvalue() == u'This is a sequence file, honest.'", "def testInitialize(self):\n file_writer = writers.FileWriter()\n self.assertIsNotNone(file_writer)", "def test_save_node(self):\n\n with mock.patch(\"builtins.open\", mock.mock_open(read_data=\"data\")) as mock_file, \\\n mock.patch('pypeman.nodes.os.makedirs') as mock_makedirs:\n\n mock_makedirs.return_value = None\n\n n = nodes.Save(uri='file:///tmp/test/?filename=%(msg_year)s/%(msg_month)s/message%(msg_day)s-%(counter)s.txt')\n n.channel = FakeChannel(self.loop)\n\n m = generate_msg(timestamp=(1981, 12, 28, 13, 37))\n m.payload = \"content\"\n\n ret = self.loop.run_until_complete(n.handle(m))\n\n self.assertTrue(isinstance(ret, message.Message))\n\n # Asserts\n mock_makedirs.assert_called_once_with('/tmp/test/1981/12')\n mock_file.assert_called_once_with('/tmp/test/1981/12/message28-0.txt', 'w')\n handle = mock_file()\n handle.write.assert_called_once_with('content')", "def test_to_file(self):\n with TemporaryDirectory() as tmp:\n df_test = make_simple_dataframe()\n Base = BaseDataClass.from_object(df_test)\n fp_save = os.path.join(tmp, \"test_save.csv\")\n Base.to_file(fp_save)\n assert os.path.exists(fp_save)", "def testWriteBinaryData(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteBinaryData(b'Binary data')\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = b'Binary data'\n self.assertEqual(output_data, expected_output_data)", "def testWriteLines(self):\n file_writer = writers.FileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteLines([\n 'First line of text',\n 'Second line of text'])\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n expected_output_data = (\n b'First line of text\\r\\nSecond line of text\\r\\n')\n self.assertEqual(output_data, expected_output_data)", "def test_custom_file_exists() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n file_location = os.path.join(tmpdirname, \"foo.txt\")\n with open(file_location, \"wb\") as f:\n f.write(b\"foo\")\n\n nonexistent_file_location = os.path.join(tmpdirname, \"bar.txt\")\n\n # Confirm that the file initially exists\n assert os.path.exists(file_location)\n\n # Get an absolute path for an existing file and a nonexistent file\n absolute_file_location = os.path.abspath(file_location)\n non_existent_absolute_file_location = os.path.abspath(nonexistent_file_location)\n\n # Create InputFile instances\n input_file = PyArrowFileIO().new_input(location=f\"{absolute_file_location}\")\n non_existent_input_file = PyArrowFileIO().new_input(location=f\"{non_existent_absolute_file_location}\")\n\n # Test opening and reading the file\n assert input_file.exists()\n assert not non_existent_input_file.exists()\n\n # Create OutputFile instances\n file = PyArrowFileIO().new_output(location=f\"{absolute_file_location}\")\n non_existent_file = PyArrowFileIO().new_output(location=f\"{non_existent_absolute_file_location}\")\n\n # Test opening and reading the file\n assert file.exists()\n assert not non_existent_file.exists()", "def _test_python_writer(basename):\n converted, expected = _test_writer(basename, 'python')\n assert _diff(converted, expected) == ''", "def test_write_delete(self):\n\n expected = \"Hello, World! I'm domain2idna\"\n File(\"hi\").write(expected)\n\n with open(\"hi\") as file:\n actual = file.read()\n\n self.assertEqual(expected, actual)\n\n expected = False\n File(\"hi\").delete()\n actual = PyFunceble.path.isfile(\"hi\")\n\n self.assertEqual(expected, actual)", "def test_to_json_file(self):\n\n output_file = \"this_file_is_a_ghost\"\n File(output_file).delete()\n\n Dict(self.test_subject.copy()).to_json_file(output_file)\n\n expected = self.test_subject.copy()\n\n actual = Dict().from_json_file(output_file)\n\n self.assertEqual(expected, actual)\n\n File(output_file).delete()", "def testWriteSolutionProperties(self):\n file_writer = writers.VSSolutionFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer._WriteSolutionProperties()\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n expected_output_data = (\n b'\\tGlobalSection(SolutionProperties) = preSolution\\r\\n'\n b'\\t\\tHideSolutionNode = FALSE\\r\\n'\n b'\\tEndGlobalSection\\r\\n')\n self.assertEqual(output_data, expected_output_data)", "def test_write_tfrecord(self):\n path = os.path.join(self.tmp_dir, 'foo-train.tfrecord')\n shards_length, total_size = self._write(to_write=self.RECORDS_TO_WRITE)\n self.assertEqual(self.NUM_SHARDS, len(shards_length))\n self.assertEqual(\n shards_length, [len(shard) for shard in self.SHARDS_CONTENT]\n )\n self.assertEqual(total_size, 9)\n written_files, all_recs = _read_records(path)\n written_index_files, all_indices = _read_indices(path)\n self.assertEqual(\n written_files,\n [\n f'foo-train.tfrecord-{i:05d}-of-{self.NUM_SHARDS:05d}'\n for i in range(self.NUM_SHARDS)\n if shards_length[i]\n ],\n )\n self.assertEqual(all_recs, self.SHARDS_CONTENT)\n self.assertEmpty(written_index_files)\n self.assertEmpty(all_indices)", "def write_to_file(self, filename: str) -> None:", "def test_incorrect_input():\n content = 'hi'\n filename = {}\n\n with pytest.raises(TypeError):\n write_file(content, filename)\n\n content = {}\n filename = 'hi'\n\n with pytest.raises(TypeError):\n write_file(content, filename)", "def test_write_tfrecord(self):\n path = os.path.join(self.tmp_dir, 'foo-train.tfrecord')\n shards_length, total_size = self._write(to_write=self.RECORDS_TO_WRITE)\n self.assertEqual(self.NUM_SHARDS, len(shards_length))\n self.assertEqual(\n shards_length, [len(shard) for shard in self.SHARDS_CONTENT]\n )\n self.assertEqual(total_size, 10)\n written_files, all_recs = _read_records(path)\n written_index_files, all_indices = _read_indices(path)\n self.assertEqual(\n written_files,\n [\n f'foo-train.tfrecord-{i:05d}-of-{self.NUM_SHARDS:05d}'\n for i in range(self.NUM_SHARDS)\n if shards_length[i]\n ],\n )\n self.assertEqual(all_recs, self.SHARDS_CONTENT)\n self.assertEmpty(written_index_files)\n self.assertEmpty(all_indices)", "def create_file():\n with open(\"example.txt\", \"w\") as file:\n file.write(\"\")", "def test_make_file():\n with tempfile.TemporaryDirectory() as STATUS_DIR:\n Status.make_job_file(STATUS_DIR, 'generation', 'test1', TEST_1_ATTRS_1)\n status = Status.retrieve_job_status(STATUS_DIR, 'generation', 'test1')\n msg = 'Failed, status is \"{}\"'.format(status)\n assert status == 'R', msg", "def test_write_source(self):\n req = Request()\n for name in sample_data.keys():\n orig_fn = self._filepath(name)\n temp_fn = self._filepath(name + '-write-source')\n\n # Read the message\n resp = req.get(fromfile=orig_fn)\n\n # Write to a temporary JSON file\n resp.write_source(temp_fn)\n\n # Read the two files and compare JSON (ignores ordering)\n with open(orig_fn) as orig, open(temp_fn) as temp:\n assert json.load(orig) == json.load(temp)\n\n # Delete the temporary file\n os.remove(temp_fn)", "def test_file_ascii_safewrite(self):\n os.remove(self.ascii_path) #remove the existing text file for tests\n if os.path.exists(self.ascii_path):\n raise IOError(\"The ascii test file was not deleted. (test_IO.py.test_file_ascii_safewrite)\")\n else:\n safe_response = FileWriter(self.ascii_path).safe_write(self.ascii_string) # attempt safe_write when no preexisting file present\n ascii_text = FileReader(self.ascii_path).read()\n self.assertEqual(ascii_text, self.ascii_string) # assert that the correct text was written\n self.assertEqual(safe_response, True) # assert that returns True when file not present and writes\n\n if os.path.exists(self.ascii_path):\n self.assertEqual(FileWriter(self.ascii_path).safe_write(self.ascii_string), False) #confirm that returns False to calling function when there is a pre-existing file\n else:\n raise IOError(\"The ascii test file is not present (test_IO.py.test_file_ascii_safewrite)\")", "def test_create1(self):\n fname = TempfileManager.create_tempfile()\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertTrue(fname.startswith('tmp'))", "def test_write(self):\n map_to_write = os.path.join(tests.TEST_DATA_PATH, 'segmentations', 'test_write_map.map')\n written_maps = glob.glob(map_to_write)\n self.assertEqual(len(written_maps), 0)\n with open(map_to_write, 'w') as f:\n map_ = mapreader.get_data(self.map_file)\n map_.write(f)\n written_maps = glob.glob(map_to_write)\n self.assertEqual(len(written_maps), 1)\n map(os.remove, written_maps)", "def test_write_cdd_file(self):\n kwargs = {\n 'profile__id': 14879,\n 'profile__romanized_first_name': 'Jane',\n 'profile__romanized_last_name': 'Smith',\n 'profile__user__email': 'jane@example.com',\n 'profile__address': '1 Main St, Room B345',\n 'profile__city': 'Boston',\n 'profile__state_or_territory': 'US-MA',\n 'profile__country': 'US',\n 'profile__postal_code': '02115',\n 'profile__phone_number': '+1 617 293-3423',\n }\n\n with mute_signals(post_save):\n exam_profiles = [ExamProfileFactory.create(**kwargs)]\n exam_profiles[0].updated_on = FIXED_DATETIME\n\n self.cdd_writer.write(self.tsv_file, exam_profiles)\n\n assert self.tsv_rows[0] == (\n \"14879\\tJane\\tSmith\\tjane@example.com\\t\"\n \"1 Main St, Room B345\\t\\t\\t\" # triple tab is for blank address2 and address3\n \"Boston\\tMA\\t02115\\tUSA\\t\"\n \"6172933423\\t1\\t2016/05/15 15:02:55\"\n )", "def _file_writer(self, lines, filename):\n if self.MockRun:\n return\n\n if self.Verbose:\n print \"Writing file %s\" % filename\n\n updated_file = open(filename, 'w')\n updated_file.write(''.join(lines))\n updated_file.close()", "def write(self, fname):\n pass", "def test_16_0_saveToFile(self):\n\n Rectangle.save_to_file([self.r1, self.r2])\n self.assertTrue(os.path.isfile(\"Rectangle.json\"))", "def write(self, filename): # real signature unknown; restored from __doc__\n pass", "def write(self, filename):\n pass", "def write(self, filename):\n pass", "def testWriteDependencies(self):\n dependencies = []\n solution_projects_by_guid = {}\n\n file_writer = writers.VS2008ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteDependencies(dependencies, solution_projects_by_guid)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertEqual(output_data, b'')", "def test_write(self):\n with TemporaryDirectoryChanger():\n nhflux.NhfluxStreamVariant.writeBinary(self.nhf, \"NHFLUX2\")\n with open(SIMPLE_HEXZ_NHFLUX_VARIANT, \"rb\") as f1, open(\n \"NHFLUX2\", \"rb\"\n ) as f2:\n expectedData = f1.read()\n actualData = f2.read()\n for expected, actual in zip(expectedData, actualData):\n self.assertEqual(expected, actual)", "def testWriteDependencies(self):\n dependencies = []\n solution_projects_by_guid = {}\n\n file_writer = writers.VS2010ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer.WriteDependencies(dependencies, solution_projects_by_guid)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertEqual(output_data, b'')", "def test_write_championship_to_file():\n f1.write_championship_to_file()\n with open(\"championship_results.txt\") as new_file:\n list_of_lines = new_file.readlines()\n assert len(list_of_lines) == 13", "def test_change_dir_to_file(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_dir(dir0, \"foo\")\n self.sync_all()\n self.assertDirPresent(dir0, \"foo\")\n self.assertDirPresent(dir1, \"foo\")\n\n self.delete_dir(dir0, \"foo\")\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"bar\")\n self.assertFile(dir1, \"foo\", \"bar\")", "def test_create():\n\n with tempfile.TemporaryDirectory() as td:\n fp = os.path.join(td, 'outputs.h5')\n\n with Outputs(fp, 'w') as f:\n f.meta = meta\n f.time_index = time_index\n\n with h5py.File(fp, 'r') as f:\n test_meta = pd.DataFrame(f['meta'][...])\n test_ti = f['time_index'][...]\n assert test_meta.shape == (100, 2)\n assert len(test_ti) == 8760\n\n assert f.attrs['package'] == 'reV'\n assert f.attrs['version'] == __version__", "def test_error(file_path):\n assert check_file(file_path), \"Training file is not generated\"", "def test_append_or_createdf_create_file_0size():\n test_df = pd.DataFrame({\"test\": [\"testme\"]})\n with tempfile.NamedTemporaryFile() as temp_file, patch.object(\n test_df, \"to_csv\"\n ) as patch_tocsv:\n temp_file.seek(0)\n process_mutation.append_or_createdf(test_df, temp_file.name)\n patch_tocsv.assert_called_once_with(temp_file.name, sep=\"\\t\", index=False)", "def file_write(stuff, file_path):\n with open(file_path, \"wt\") as fo:\n fo.write(stuff)", "def test_GFD_export_create_file(self):\n filepath = '1.txt'\n gfd = flow_processing_input.GroundFlowData()\n gfd.detector_flow_data = createGFDDataset(1).dataset\n gfd.export_to_file(filepath)\n # Check if file was created at filepath\n self.assertTrue(os.path.exists(filepath))\n os.remove(filepath)", "def testWriteConfiguration(self):\n project_configuration = resources.VSProjectConfiguration()\n\n file_writer = writers.VS2008ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer._WriteConfiguration(project_configuration)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertTrue(output_data.startswith(b'\\t\\t<Configuration\\r\\n'))\n self.assertTrue(output_data.endswith(b'\\t\\t</Configuration>\\r\\n'))", "def test_absolute_outdir(tmp_path):\n # Create destination directory.\n tempdir = tmp_path / \"outdir\"\n tempdir.mkdir(mode=0o700)\n assert tempdir.exists()\n assert tempdir.is_absolute()\n assert len(list(tempdir.glob(\"**/*.*\"))) == 0, \"Must be empty.\"\n # Create a new configuration file with an absolute output_directory.\n # We are cheating a little by writing it to the same directory\n # where the test files will be saved.\n config_file = tempdir / Path(\"rewritten.cfg\")\n contents = Path(\"tests/generate.cfg\").read_text(encoding=\"utf-8\")\n contents = contents.replace(\".gendir-suite-cfg\", str(tempdir))\n contents = contents.replace(\"print = filename, summary\", \"print = summary\")\n _ = config_file.write_text(contents, encoding=\"utf-8\")\n phmdoctest.main.generate_using(config_file=config_file)\n assert config_file.exists(), \"In output_directory and didn't get wiped.\"\n assert (Path(tempdir) / \"test_project.py\").exists()\n assert (Path(tempdir) / \"test_doc__directive1.py\").exists()\n assert (Path(tempdir) / \"test_doc__directive2.py\").exists()\n assert (Path(tempdir) / \"test_doc__directive3.py\").exists()\n assert (Path(tempdir) / \"test_doc__example1.py\").exists()\n assert (Path(tempdir) / \"test_doc__example2.py\").exists()\n assert (Path(tempdir) / \"test_doc__inline_example.py\").exists()\n assert (Path(tempdir) / \"test_tests__managenamespace.py\").exists()\n assert (Path(tempdir) / \"test_tests__one_code_block.py\").exists()\n assert (Path(tempdir) / \"test_tests__output_has_blank_lines.py\").exists()\n assert (Path(tempdir) / \"test_tests__setup_only.py\").exists()\n assert (Path(tempdir) / \"test_tests__twentysix_session_blocks.py\").exists()\n assert len(list(tempdir.glob(\"**/*.*\"))) == 13, \"12 test files and .cfg file.\"", "def test_create(self):\n path = self.tmp_py()\n # Creating a file that doesn't exist should succeed\n self.cls.create(path)\n self.assertTrue(os.path.exists(path))\n # Created file should be a valid script (If not, raises an error)\n self.cls.verify(path)\n # Can't create it again: it already exists\n self.assertRaises(exceptions.PathFoundError,self.cls.create,path)", "def generate_expected_one_file():\n fname = 'resources/simple_data.json'\n\n stress = np.linspace(0, 100)\n stress_time = np.linspace(0, 100)\n strain = np.linspace(0, 100)\n strain_time = np.linspace(0, 100)\n expected = pif.System(\n subSystems=None,\n properties=[\n pif.Property(name='stress',\n scalars=list(stress),\n conditions=pif.Value(\n name='time',\n scalars=list(stress_time))),\n\n pif.Property(name='strain',\n scalars=list(strain),\n conditions=pif.Value(\n name='time',\n scalars=list(strain_time)))\n ])\n with open(fname, 'w') as data:\n pif.dump(expected, data)\n\n return {\n 'file_name': fname,\n 'expected': expected\n }", "def testWriteOutIntDirConditions(self):\n configuration_name = 'Release'\n project_configurations = resources.VSConfigurations()\n\n file_writer = writers.VS2015ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer._WriteOutIntDirConditions(\n configuration_name, project_configurations)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertEqual(output_data, b'')", "def test_change_non_empty_dir_to_file(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo/bar\", \"baz\")\n self.sync_all()\n self.assertFile(dir0, \"foo/bar\", \"baz\")\n self.assertFile(dir1, \"foo/bar\", \"baz\")\n\n self.delete_file(dir0, \"foo/bar\")\n self.delete_dir(dir0, \"foo\")\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"bar\")\n self.assertFile(dir1, \"foo\", \"bar\")", "def write_tests(project_name, root_dir):\r\n test_path = get_file_path(root_dir, \"tests\", \"%s_tests.py\" % project_name) #Get the path for setup.py\r\n test_content = get_test_text(project_name)\r\n \r\n test_file = open(test_path, 'w')\r\n test_file.write(test_content)\r\n test_file.close()\r\n print_file(test_path)", "def test_create3(self):\n fname = TempfileManager.create_tempfile(suffix='bar')\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertTrue(fname.endswith('bar'))", "def makeTestFile(text):\n f = tempfile.NamedTemporaryFile()\n f.write(text)\n f.flush()\n return f", "def testWriteOutIntDirConditions(self):\n configuration_name = 'Release'\n project_configurations = resources.VSConfigurations()\n\n file_writer = writers.VS2010ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer._WriteOutIntDirConditions(\n configuration_name, project_configurations)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertEqual(output_data, b'')", "def testWriteOutIntDirConditions(self):\n configuration_name = 'Release'\n project_configurations = resources.VSConfigurations()\n\n file_writer = writers.VS2012ProjectFileWriter()\n\n file_writer._file = io.BytesIO()\n\n file_writer._WriteOutIntDirConditions(\n configuration_name, project_configurations)\n\n file_writer._file.seek(0, os.SEEK_SET)\n output_data = file_writer._file.read()\n\n self.assertEqual(output_data, b'')", "def test_as_file_false(self):\n with TemporaryDirectory() as tmp:\n # define path to file\n fp = os.path.join(tmp, \"asdf.txt\")\n\n # invoke atomic_write with param as_file set to False\n # this should return a temporary file path string\n with atomic_write(fp, as_file=False) as f:\n self.assertIsInstance(f, str)" ]
[ "0.7806734", "0.7739952", "0.7384342", "0.7323826", "0.7065146", "0.7057785", "0.703889", "0.70044935", "0.700189", "0.69939166", "0.69856334", "0.6979201", "0.69667256", "0.690036", "0.68829405", "0.68710196", "0.68689007", "0.68655556", "0.68140554", "0.67293036", "0.6689329", "0.66832584", "0.66656", "0.6656648", "0.66399837", "0.66390204", "0.6638663", "0.6608509", "0.6604127", "0.65606135", "0.6556688", "0.65501934", "0.6535316", "0.65338767", "0.65190053", "0.65040725", "0.650357", "0.6502254", "0.6500693", "0.6484637", "0.647024", "0.646348", "0.6450312", "0.6442386", "0.6435744", "0.6426033", "0.64036465", "0.63827187", "0.63750106", "0.6367626", "0.6334761", "0.6330952", "0.632556", "0.6324899", "0.6316496", "0.63147914", "0.63082385", "0.63081086", "0.62946695", "0.6294157", "0.6291409", "0.6289559", "0.6287552", "0.6286714", "0.62742877", "0.627173", "0.6267879", "0.62637043", "0.6254515", "0.6249439", "0.62447864", "0.62390566", "0.6235063", "0.62306315", "0.6227427", "0.6226868", "0.62262166", "0.62262166", "0.62251127", "0.62204975", "0.62184817", "0.62083817", "0.6206837", "0.62054205", "0.6204868", "0.62037873", "0.6203322", "0.62004477", "0.6191077", "0.61880624", "0.618756", "0.6171596", "0.6166202", "0.61601615", "0.6136732", "0.6135481", "0.61328155", "0.6124597", "0.6123813", "0.6122226" ]
0.6595651
29
MessagingCampaign a model defined in Swagger
def __init__(self): self.swagger_types = { 'id': 'str', 'name': 'str', 'date_created': 'datetime', 'date_modified': 'datetime', 'version': 'int', 'division': 'DomainEntityRef', 'campaign_status': 'str', 'callable_time_set': 'DomainEntityRef', 'contact_list': 'DomainEntityRef', 'dnc_lists': 'list[DomainEntityRef]', 'always_running': 'bool', 'contact_sorts': 'list[ContactSort]', 'messages_per_minute': 'int', 'errors': 'list[RestErrorDetail]', 'sms_config': 'SmsConfig', 'self_uri': 'str' } self.attribute_map = { 'id': 'id', 'name': 'name', 'date_created': 'dateCreated', 'date_modified': 'dateModified', 'version': 'version', 'division': 'division', 'campaign_status': 'campaignStatus', 'callable_time_set': 'callableTimeSet', 'contact_list': 'contactList', 'dnc_lists': 'dncLists', 'always_running': 'alwaysRunning', 'contact_sorts': 'contactSorts', 'messages_per_minute': 'messagesPerMinute', 'errors': 'errors', 'sms_config': 'smsConfig', 'self_uri': 'selfUri' } self._id = None self._name = None self._date_created = None self._date_modified = None self._version = None self._division = None self._campaign_status = None self._callable_time_set = None self._contact_list = None self._dnc_lists = None self._always_running = None self._contact_sorts = None self._messages_per_minute = None self._errors = None self._sms_config = None self._self_uri = None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self):\n json_data = request.get_json()\n json_data[\"sender_id\"] = current_user.id\n try:\n new_campaign = self.schema.load(json_data)\n except ValidationError as err:\n return {\"message\": err.messages}, HTTPStatus.BAD_REQUEST\n if Campaign.query.filter_by(mailchimp_id=new_campaign.mailchimp_id).first() is not None:\n return {\"message\": \"Campaign already exists.\"}, HTTPStatus.CONFLICT\n db.session.add(new_campaign)\n db.session.commit()\n return self.schema.dump(new_campaign), HTTPStatus.CREATED", "def test_create_campaign(self):\n campaign = self.campaign\n\n self.assertTrue(isinstance(campaign, Campaign))\n self.assertEqual(campaign.name, \"Test Campaign\")", "def __init__(self):\n self.swagger_types = {\n 'id': 'str',\n 'start_date': 'datetime',\n 'length_minutes': 'int',\n 'activities': 'list[BuAgentScheduleActivity]',\n 'manually_edited': 'bool',\n 'schedule': 'BuScheduleReference'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'start_date': 'startDate',\n 'length_minutes': 'lengthMinutes',\n 'activities': 'activities',\n 'manually_edited': 'manuallyEdited',\n 'schedule': 'schedule'\n }\n\n self._id = None\n self._start_date = None\n self._length_minutes = None\n self._activities = None\n self._manually_edited = None\n self._schedule = None", "def create_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)", "def __init__(self, campaign, campaign_email, *args, **kwargs):\n super(TrackedEmailMessage, self).__init__(*args, **kwargs)\n\n self._set_campaign(campaign)\n self._set_campaign_email(campaign_email)", "def __init__(self):\n self.swagger_types = {\n 'id_template_notificacao': 'int',\n 'destinatarios': 'list[str]',\n 'anexos': 'list[AnexoNotificacaoEmailRequest]',\n 'parametros_conteudo': 'dict(str, object)'\n }\n\n self.attribute_map = {\n 'id_template_notificacao': 'idTemplateNotificacao',\n 'destinatarios': 'destinatarios',\n 'anexos': 'anexos',\n 'parametros_conteudo': 'parametrosConteudo'\n }\n\n self._id_template_notificacao = None\n self._destinatarios = None\n self._anexos = None\n self._parametros_conteudo = None", "def test_admin_sms_campaign_view_list(self):\n response = self.client.get('/admin/sms_module/smscampaign/')\n self.failUnlessEqual(response.status_code, 200)", "def __init__(self, email: str=None, is_bot: bool=None, avatar_url: str=None, avatar_version: int=None, full_name: str=None, is_admin: bool=None, is_owner: bool=None, is_billing_admin: bool=None, role: int=None, bot_type: int=None, user_id: int=None, bot_owner_id: int=None, is_active: bool=None, is_guest: bool=None, timezone: str=None, date_joined: str=None, delivery_email: str=None, profile_data: Dict[str, object]=None):\n self.openapi_types = {\n 'email': str,\n 'is_bot': bool,\n 'avatar_url': str,\n 'avatar_version': int,\n 'full_name': str,\n 'is_admin': bool,\n 'is_owner': bool,\n 'is_billing_admin': bool,\n 'role': int,\n 'bot_type': int,\n 'user_id': int,\n 'bot_owner_id': int,\n 'is_active': bool,\n 'is_guest': bool,\n 'timezone': str,\n 'date_joined': str,\n 'delivery_email': str,\n 'profile_data': Dict[str, object]\n }\n\n self.attribute_map = {\n 'email': 'email',\n 'is_bot': 'is_bot',\n 'avatar_url': 'avatar_url',\n 'avatar_version': 'avatar_version',\n 'full_name': 'full_name',\n 'is_admin': 'is_admin',\n 'is_owner': 'is_owner',\n 'is_billing_admin': 'is_billing_admin',\n 'role': 'role',\n 'bot_type': 'bot_type',\n 'user_id': 'user_id',\n 'bot_owner_id': 'bot_owner_id',\n 'is_active': 'is_active',\n 'is_guest': 'is_guest',\n 'timezone': 'timezone',\n 'date_joined': 'date_joined',\n 'delivery_email': 'delivery_email',\n 'profile_data': 'profile_data'\n }\n\n self._email = email\n self._is_bot = is_bot\n self._avatar_url = avatar_url\n self._avatar_version = avatar_version\n self._full_name = full_name\n self._is_admin = is_admin\n self._is_owner = is_owner\n self._is_billing_admin = is_billing_admin\n self._role = role\n self._bot_type = bot_type\n self._user_id = user_id\n self._bot_owner_id = bot_owner_id\n self._is_active = is_active\n self._is_guest = is_guest\n self._timezone = timezone\n self._date_joined = date_joined\n self._delivery_email = delivery_email\n self._profile_data = profile_data", "def get_campaign(self, uuid):\n return Campaign.deserialize(self._get_single('campaigns', {'uuid': uuid}))", "def test_admin_sms_campaign_view_add(self):\n response = self.client.get('/admin/sms_module/smscampaign/add/')\n self.failUnlessEqual(response.status_code, 200)", "def __init__(self, request_url, client, options):\n super(ConversationRequest, self).__init__(request_url, client, options)", "def list_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def campaign(self, campaign):\n\n self._campaign = campaign", "def add_embedded_campaign(self, id, collection, campaign, confidence,\n analyst, date, description):\n if type(id) is not ObjectId:\n id = ObjectId(id)\n # TODO: Make sure the object does not already have the campaign\n # Return if it does. Add it if it doesn't\n obj = getattr(self.db, collection)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result.count() > 0:\n return\n else:\n log.debug('Adding campaign to set: {}'.format(campaign))\n campaign_obj = {\n 'analyst': analyst,\n 'confidence': confidence,\n 'date': date,\n 'description': description,\n 'name': campaign\n }\n result = obj.update(\n {'_id': id},\n {'$push': {'campaign': campaign_obj}}\n )\n return result", "def __init__(self):\n self.swagger_types = {\n 'id': 'str',\n 'ticket_id': 'str',\n 'type': 'str',\n 'from_number': 'str',\n 'from_name': 'str',\n 'to_number': 'str',\n 'to_name': 'str',\n 'via_number': 'str',\n 'date_created': 'datetime',\n 'date_answered': 'datetime',\n 'date_finished': 'datetime'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'ticket_id': 'ticketId',\n 'type': 'type',\n 'from_number': 'fromNumber',\n 'from_name': 'fromName',\n 'to_number': 'toNumber',\n 'to_name': 'toName',\n 'via_number': 'viaNumber',\n 'date_created': 'dateCreated',\n 'date_answered': 'dateAnswered',\n 'date_finished': 'dateFinished'\n }\n\n self._id = None\n self._ticket_id = None\n self._type = None\n self._from_number = None\n self._from_name = None\n self._to_number = None\n self._to_name = None\n self._via_number = None\n self._date_created = None\n self._date_answered = None\n self._date_finished = None", "def get_campaign_command(client: Client, campaign_id: str) -> CommandResults | str:\n try:\n raw_response = client.get_campaign(campaign_id)\n except ValueError:\n return 'Campaign Id not found'\n\n campaign_general_fields = ['id', 'name', 'description', 'startDate', 'notable']\n campaign_fields = ['families', 'techniques', 'actors', 'brands', 'malware']\n\n outputs = {}\n outputs['campaignMembers'] = dict_safe_get(raw_response, ['campaignMembers'])\n outputs['info'] = {key: value for key, value in raw_response.items() if key in campaign_general_fields}\n outputs.update({key: value for key, value in raw_response.items() if key in campaign_fields})\n fields_readable_output = \"\"\n for field in campaign_fields:\n fields_readable_output += \"\\n\" + tableToMarkdown(field.capitalize(),\n dict_safe_get(outputs, [field]), headers=['id', 'name'],\n headerTransform=pascalToSpace\n )\n\n campaign_info_output = tableToMarkdown('Campaign Information',\n outputs['info'],\n headers=['id', 'name', 'description', 'startDate', 'notable'],\n headerTransform=pascalToSpace\n )\n campaign_members_output = tableToMarkdown('Campaign Members',\n outputs['campaignMembers'],\n headers=['id', 'threat', 'type'],\n headerTransform=pascalToSpace\n )\n\n readable_output = campaign_info_output + \"\\n\" + campaign_members_output + fields_readable_output\n\n return CommandResults(\n readable_output=readable_output,\n outputs_prefix='Proofpoint.Campaign',\n outputs=outputs,\n outputs_key_field='id',\n raw_response=raw_response\n )", "def __init__(self):\n self.swagger_types = {\n 'source_contact': 'AddressableEntityRef',\n 'target_contact': 'AddressableEntityRef',\n 'resulting_contact': 'AddressableEntityRef'\n }\n\n self.attribute_map = {\n 'source_contact': 'sourceContact',\n 'target_contact': 'targetContact',\n 'resulting_contact': 'resultingContact'\n }\n\n self._source_contact = None\n self._target_contact = None\n self._resulting_contact = None", "def __init__(self):\n self.swagger_types = {\n 'id_conta': 'int',\n 'id_produto': 'int',\n 'id_pessoa': 'int',\n 'id_parentesco': 'int',\n 'tipo_portador': 'str',\n 'nome_impresso': 'str',\n 'id_tipo_cartao': 'int',\n 'flag_ativo': 'int',\n 'data_cadastro_portador': 'str',\n 'data_cancelamento_portador': 'str'\n }\n\n self.attribute_map = {\n 'id_conta': 'idConta',\n 'id_produto': 'idProduto',\n 'id_pessoa': 'idPessoa',\n 'id_parentesco': 'idParentesco',\n 'tipo_portador': 'tipoPortador',\n 'nome_impresso': 'nomeImpresso',\n 'id_tipo_cartao': 'idTipoCartao',\n 'flag_ativo': 'flagAtivo',\n 'data_cadastro_portador': 'dataCadastroPortador',\n 'data_cancelamento_portador': 'dataCancelamentoPortador'\n }\n\n self._id_conta = None\n self._id_produto = None\n self._id_pessoa = None\n self._id_parentesco = None\n self._tipo_portador = None\n self._nome_impresso = None\n self._id_tipo_cartao = None\n self._flag_ativo = None\n self._data_cadastro_portador = None\n self._data_cancelamento_portador = None", "def testGetCampaign(self):\n if self.__class__.campaign1 is None:\n self.testSaveCampaign()\n self.assert_(isinstance(self.__class__.service.GetCampaign(\n self.__class__.campaign1['id']), tuple))", "def create(self, request, *args, **kwargs):\n self.serializer_class = ConversationDetailSerializer\n return super(ConversationViewSet, self).create(request, *args, **kwargs)", "def get_campaign(self, campaign_id: str) -> dict:\n return self.http_request(\"GET\", f'/campaign/{campaign_id}')", "def get(self):\n query = Campaign.query\n return paginate(Campaign.__tablename__, query, self.schema), HTTPStatus.OK", "def __init__(self):\n self.swagger_types = {\n 'id': 'str',\n 'domain': 'str',\n 'custom_domain': 'str',\n 'customer_email': 'str',\n 'customer_name': 'str',\n 'company': 'str',\n 'date_created': 'datetime',\n 'date_validity': 'datetime',\n 'status': 'str',\n 'account_id': 'str',\n 'cluster_id': 'str',\n 'task_id': 'str',\n 'version': 'str',\n 'is_latest': 'bool',\n 'product_id': 'str',\n 'variation_id': 'str'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'domain': 'domain',\n 'custom_domain': 'custom_domain',\n 'customer_email': 'customer_email',\n 'customer_name': 'customer_name',\n 'company': 'company',\n 'date_created': 'date_created',\n 'date_validity': 'date_validity',\n 'status': 'status',\n 'account_id': 'account_id',\n 'cluster_id': 'cluster_id',\n 'task_id': 'task_id',\n 'version': 'version',\n 'is_latest': 'is_latest',\n 'product_id': 'product_id',\n 'variation_id': 'variation_id'\n }\n\n self._id = None\n self._domain = None\n self._custom_domain = None\n self._customer_email = None\n self._customer_name = None\n self._company = None\n self._date_created = None\n self._date_validity = None\n self._status = None\n self._account_id = None\n self._cluster_id = None\n self._task_id = None\n self._version = None\n self._is_latest = None\n self._product_id = None\n self._variation_id = None", "def __init__(self):\n self.swagger_types = {\n 'enabled': 'bool',\n 'auto_review': 'bool',\n 'allow_direct_trades': 'bool',\n 'min_hours_in_future': 'int',\n 'unequal_paid': 'str',\n 'one_sided': 'str',\n 'weekly_min_paid_violations': 'str',\n 'weekly_max_paid_violations': 'str',\n 'requires_matching_queues': 'bool',\n 'requires_matching_languages': 'bool',\n 'requires_matching_skills': 'bool',\n 'requires_matching_planning_groups': 'bool',\n 'activity_category_rules': 'list[ShiftTradeActivityRule]'\n }\n\n self.attribute_map = {\n 'enabled': 'enabled',\n 'auto_review': 'autoReview',\n 'allow_direct_trades': 'allowDirectTrades',\n 'min_hours_in_future': 'minHoursInFuture',\n 'unequal_paid': 'unequalPaid',\n 'one_sided': 'oneSided',\n 'weekly_min_paid_violations': 'weeklyMinPaidViolations',\n 'weekly_max_paid_violations': 'weeklyMaxPaidViolations',\n 'requires_matching_queues': 'requiresMatchingQueues',\n 'requires_matching_languages': 'requiresMatchingLanguages',\n 'requires_matching_skills': 'requiresMatchingSkills',\n 'requires_matching_planning_groups': 'requiresMatchingPlanningGroups',\n 'activity_category_rules': 'activityCategoryRules'\n }\n\n self._enabled = None\n self._auto_review = None\n self._allow_direct_trades = None\n self._min_hours_in_future = None\n self._unequal_paid = None\n self._one_sided = None\n self._weekly_min_paid_violations = None\n self._weekly_max_paid_violations = None\n self._requires_matching_queues = None\n self._requires_matching_languages = None\n self._requires_matching_skills = None\n self._requires_matching_planning_groups = None\n self._activity_category_rules = None", "def _createConferenceObject(self, request):\n # preload necessary data items\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n user_id = getUserId(user)\n\n if not request.name:\n raise endpoints.BadRequestException(\"Conference 'name' field required\")\n\n # copy ConferenceForm/ProtoRPC Message into dict\n data = {field.name: getattr(request, field.name) for field in request.all_fields()}\n del data['websafeKey']\n del data['organizerDisplayName']\n\n # add default values for those missing (both data model & outbound Message)\n for df in DEFAULTS:\n if data[df] in (None, []):\n data[df] = DEFAULTS[df]\n setattr(request, df, DEFAULTS[df])\n\n # convert dates from strings to Date objects; set month based on start_date\n if data['startDate']:\n data['startDate'] = datetime.strptime(data['startDate'][:10], \"%Y-%m-%d\").date()\n data['month'] = data['startDate'].month\n else:\n data['month'] = 0\n if data['endDate']:\n data['endDate'] = datetime.strptime(data['endDate'][:10], \"%Y-%m-%d\").date()\n\n # set seatsAvailable to be same as maxAttendees on creation\n if data[\"maxAttendees\"] > 0:\n data[\"seatsAvailable\"] = data[\"maxAttendees\"]\n # generate Profile Key based on user ID and Conference\n # ID based on Profile key get Conference key from ID\n p_key = ndb.Key(Profile, user_id)\n c_id = Conference.allocate_ids(size=1, parent=p_key)[0]\n c_key = ndb.Key(Conference, c_id, parent=p_key)\n data['key'] = c_key\n data['organizerUserId'] = request.organizerUserId = user_id\n\n # create Conference, send email to organizer confirming\n # creation of Conference & return (modified) ConferenceForm\n Conference(**data).put()\n taskqueue.add(params={'email': user.email(),\n 'conferenceInfo': repr(request)},\n url='/tasks/send_confirmation_email'\n )\n return request", "def create(self, request, *args, **kwargs):\n self.serializer_class = ConversationDetailSerializer\n return super(PublicChatViewSet, self).create(request, *args, **kwargs)", "def __init__(self):\n self.swagger_types = {\n 'ids': 'list[str]',\n 'consumer': 'str',\n 'entity_type': 'str',\n 'start_date': 'datetime',\n 'end_date': 'datetime',\n 'created_date': 'datetime',\n 'updated_date': 'datetime',\n 'scope': 'str',\n 'disabled': 'bool',\n 'id': 'str'\n }\n\n self.attribute_map = {\n 'ids': 'ids',\n 'consumer': 'consumer',\n 'entity_type': 'entityType',\n 'start_date': 'startDate',\n 'end_date': 'endDate',\n 'created_date': 'createdDate',\n 'updated_date': 'updatedDate',\n 'scope': 'scope',\n 'disabled': 'disabled',\n 'id': 'id'\n }\n\n self._ids = None\n self._consumer = None\n self._entity_type = None\n self._start_date = None\n self._end_date = None\n self._created_date = None\n self._updated_date = None\n self._scope = None\n self._disabled = None\n self._id = None", "def write_campaign(campaign_data):\n\n campaign = Campaign(**campaign_data)\n campaign.save()\n authorization.make_campaign_public(campaign)\n\n return campaign.id", "def campaign_id(request):\n\n user = None\n response = ApiJsonResponse()\n try:\n user = MyUser.objects.get(pk=request.user.pk)\n except ObjectDoesNotExist:\n return Response({\n \"msg\": _('MSG_USER_NOT_EXIST'),\n \"status\": 404\n }, status=404)\n try:\n company = Company.objects.get(owner=user)\n except:\n return Response({\n \"msg\": _('MSG_COMPANY_NOT_EXIST'),\n \"status\": 404\n }, status=404)\n try:\n promotions = Promotion.objects.filter(company=company)\n except ObjectDoesNotExist:\n response.set_data(\"[]\")\n response.set_result_code(200)\n response.set_result_msg(\"MSG_PROMOTIONS_NOT_FOUNDED\")\n return JsonResponse(response.get_dict())\n list_of_promotions = []\n for promotion in promotions:\n list_of_promotions.append({'name': promotion.campaign_name, 'id': promotion.pk})\n return Response({\n \"msg\": _('MSG_PROMOTION_FOUNDED'),\n \"list_of_promotions\": list_of_promotions,\n \"status\": 200\n }, status=200)", "def _createConferenceObject(self, request):\n # Preload necessary data items\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n user_id = user.email()\n if not request.name:\n raise endpoints.BadRequestException(\n \"Conference 'name' field required\")\n # Copy ConferenceForm/ProtoRPC Message into dict\n data = {\n field.name: getattr(request, field.name) for field in\n request.all_fields()\n }\n del data['websafeKey']\n del data['organizerDisplayName']\n # Add default values for those missing (both data model and\n # outbound Message)\n for df in CONF_DEFAULTS:\n if data[df] in (None, []):\n data[df] = CONF_DEFAULTS[df]\n setattr(request, df, CONF_DEFAULTS[df])\n # Convert dates from strings to Date objects; set month based\n # on start_date\n if data['startDate']:\n data['startDate'] = datetime.strptime(\n data['startDate'][:10], \"%Y-%m-%d\").date()\n data['month'] = data['startDate'].month\n else:\n data['month'] = 0\n if data['endDate']:\n data['endDate'] = datetime.strptime(\n data['endDate'][:10], \"%Y-%m-%d\").date()\n # Set seatsAvailable to be same as maxAttendees on creation\n if data[\"maxAttendees\"] > 0:\n data[\"seatsAvailable\"] = data[\"maxAttendees\"]\n # Get the user profile key, then set the conference's parent\n # to that value.\n # NOTE: The original code made a call to allocate_ids in order to\n # generate an ID for the conference. Since the profiles utilize\n # strings (email addresses) for their IDs, resulting in no risk\n # of colliding with NDB's auto-generated numeric IDs, I decided\n # to let NDB generate the conference ID automatically.\n # https://cloud.google.com/appengine/docs/python/ndb/entities?hl=en#numeric_keys\n p_key = ndb.Key(Profile, user_id)\n data['parent'] = p_key\n data['organizerUserId'] = request.organizerUserId = user_id\n # Create Conference, send email to organizer confirming\n # creation of Conference and return (modified) ConferenceForm\n Conference(**data).put()\n taskqueue.add(params={'email': user.email(),\n 'conferenceInfo': repr(request)},\n url='/tasks/send_confirmation_email'\n )\n return request", "def __init__(self):\n self.swagger_types = {\n 'id': 'str',\n 'name': 'str',\n 'channel_id': 'str',\n 'channel_secret': 'str',\n 'switcher_secret': 'str',\n 'service_code': 'str',\n 'self_uri': 'str'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'name': 'name',\n 'channel_id': 'channelId',\n 'channel_secret': 'channelSecret',\n 'switcher_secret': 'switcherSecret',\n 'service_code': 'serviceCode',\n 'self_uri': 'selfUri'\n }\n\n self._id = None\n self._name = None\n self._channel_id = None\n self._channel_secret = None\n self._switcher_secret = None\n self._service_code = None\n self._self_uri = None", "def get(self, campaign_id):\n campaign = Campaign.query.filter_by(mailchimp_id=campaign_id).first()\n if campaign is None:\n return {\"message\": \"Campaign could not be found.\"}, HTTPStatus.NOT_FOUND\n return self.schema.dump(campaign), HTTPStatus.OK", "def test_mesage_model(self):\n msg = Message", "def test_sms_campaign_view_add(self):\n request = self.factory.get('/sms_campaign/add/')\n request.user = self.user\n request.session = {}\n response = sms_campaign_add(request)\n self.assertEqual(response.status_code, 200)\n\n response = self.client.post('/sms_campaign/add/', data={\n \"name\": \"my sms campaign\",\n \"description\": \"xyz\",\n \"sms_gateway\": \"1\",\n }, follow=True)\n self.assertEqual(response.status_code, 200)\n\n request = self.factory.post('/sms_campaign/add/', {\n \"name\": \"my sms campaign 2\",\n \"description\": \"xyz\",\n \"sms_gateway\": \"1\",\n }, follow=True)\n request.user = self.user\n request.session = {}\n response = sms_campaign_add(request)\n self.assertEqual(response.status_code, 200)", "def __init__(self, **kwargs):\n self.swagger_types = {\n 'id': 'str',\n 'name': 'str',\n 'version': 'str',\n 'tagline': 'str',\n 'keywords': 'str',\n 'short_description': 'str',\n 'usage_information': 'str',\n 'long_description': 'str',\n 'license_model_description': 'str',\n 'system_requirements': 'str',\n 'time_released': 'datetime',\n 'release_notes': 'str',\n 'categories': 'list[str]',\n 'publisher': 'Publisher',\n 'languages': 'list[Item]',\n 'screenshots': 'list[Screenshot]',\n 'videos': 'list[NamedLink]',\n 'support_contacts': 'list[SupportContact]',\n 'support_links': 'list[NamedLink]',\n 'documentation_links': 'list[DocumentationLink]',\n 'icon': 'UploadData',\n 'banner': 'UploadData',\n 'regions': 'list[Region]',\n 'package_type': 'str',\n 'default_package_version': 'str',\n 'links': 'list[Link]',\n 'is_featured': 'bool'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'name': 'name',\n 'version': 'version',\n 'tagline': 'tagline',\n 'keywords': 'keywords',\n 'short_description': 'shortDescription',\n 'usage_information': 'usageInformation',\n 'long_description': 'longDescription',\n 'license_model_description': 'licenseModelDescription',\n 'system_requirements': 'systemRequirements',\n 'time_released': 'timeReleased',\n 'release_notes': 'releaseNotes',\n 'categories': 'categories',\n 'publisher': 'publisher',\n 'languages': 'languages',\n 'screenshots': 'screenshots',\n 'videos': 'videos',\n 'support_contacts': 'supportContacts',\n 'support_links': 'supportLinks',\n 'documentation_links': 'documentationLinks',\n 'icon': 'icon',\n 'banner': 'banner',\n 'regions': 'regions',\n 'package_type': 'packageType',\n 'default_package_version': 'defaultPackageVersion',\n 'links': 'links',\n 'is_featured': 'isFeatured'\n }\n\n self._id = None\n self._name = None\n self._version = None\n self._tagline = None\n self._keywords = None\n self._short_description = None\n self._usage_information = None\n self._long_description = None\n self._license_model_description = None\n self._system_requirements = None\n self._time_released = None\n self._release_notes = None\n self._categories = None\n self._publisher = None\n self._languages = None\n self._screenshots = None\n self._videos = None\n self._support_contacts = None\n self._support_links = None\n self._documentation_links = None\n self._icon = None\n self._banner = None\n self._regions = None\n self._package_type = None\n self._default_package_version = None\n self._links = None\n self._is_featured = None", "def __init__(self, id=None, user_id=None, title=None, description=None, readonly=None, hidden=None, refresh_interval=None, refresh_interval_to_i=None, space=None, model=None, content_favorite_id=None, scheduled_plan=None, content_metadata_id=None, query_timezone=None, can=None):\n self.swagger_types = {\n 'id': 'str',\n 'user_id': 'int',\n 'title': 'str',\n 'description': 'str',\n 'readonly': 'bool',\n 'hidden': 'bool',\n 'refresh_interval': 'str',\n 'refresh_interval_to_i': 'int',\n 'space': 'SpaceBase',\n 'model': 'LookModel',\n 'content_favorite_id': 'int',\n 'scheduled_plan': 'ScheduledPlan',\n 'content_metadata_id': 'int',\n 'query_timezone': 'str',\n 'can': 'dict(str, bool)'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'user_id': 'user_id',\n 'title': 'title',\n 'description': 'description',\n 'readonly': 'readonly',\n 'hidden': 'hidden',\n 'refresh_interval': 'refresh_interval',\n 'refresh_interval_to_i': 'refresh_interval_to_i',\n 'space': 'space',\n 'model': 'model',\n 'content_favorite_id': 'content_favorite_id',\n 'scheduled_plan': 'scheduled_plan',\n 'content_metadata_id': 'content_metadata_id',\n 'query_timezone': 'query_timezone',\n 'can': 'can'\n }\n\n self._id = id\n self._user_id = user_id\n self._title = title\n self._description = description\n self._readonly = readonly\n self._hidden = hidden\n self._refresh_interval = refresh_interval\n self._refresh_interval_to_i = refresh_interval_to_i\n self._space = space\n self._model = model\n self._content_favorite_id = content_favorite_id\n self._scheduled_plan = scheduled_plan\n self._content_metadata_id = content_metadata_id\n self._query_timezone = query_timezone\n self._can = can", "async def send(\n self,\n content: undefined.UndefinedOr[typing.Any] = undefined.UNDEFINED,\n *,\n attachment: undefined.UndefinedOr[files.Resourceish] = undefined.UNDEFINED,\n attachments: undefined.UndefinedOr[typing.Sequence[files.Resourceish]] = undefined.UNDEFINED,\n component: undefined.UndefinedOr[special_endpoints.ComponentBuilder] = undefined.UNDEFINED,\n components: undefined.UndefinedOr[typing.Sequence[special_endpoints.ComponentBuilder]] = undefined.UNDEFINED,\n embed: undefined.UndefinedOr[embeds_.Embed] = undefined.UNDEFINED,\n embeds: undefined.UndefinedOr[typing.Sequence[embeds_.Embed]] = undefined.UNDEFINED,\n nonce: undefined.UndefinedOr[str] = undefined.UNDEFINED,\n tts: undefined.UndefinedOr[bool] = undefined.UNDEFINED,\n reply: undefined.UndefinedOr[snowflakes.SnowflakeishOr[messages.PartialMessage]] = undefined.UNDEFINED,\n mentions_everyone: undefined.UndefinedOr[bool] = undefined.UNDEFINED,\n mentions_reply: undefined.UndefinedOr[bool] = undefined.UNDEFINED,\n user_mentions: undefined.UndefinedOr[\n typing.Union[snowflakes.SnowflakeishSequence[PartialUser], bool]\n ] = undefined.UNDEFINED,\n role_mentions: undefined.UndefinedOr[\n typing.Union[snowflakes.SnowflakeishSequence[guilds.PartialRole], bool]\n ] = undefined.UNDEFINED,\n ) -> messages.Message: # noqa: E501 - Line too long\n channel_id = None\n if isinstance(self.app, traits.CacheAware):\n channel_id = self.app.cache.get_dm_channel_id(self.id)\n\n if channel_id is None:\n channel_id = (await self.fetch_dm_channel()).id\n\n return await self.app.rest.create_message(\n channel=channel_id,\n content=content,\n attachment=attachment,\n attachments=attachments,\n component=component,\n components=components,\n embed=embed,\n embeds=embeds,\n nonce=nonce,\n tts=tts,\n reply=reply,\n mentions_everyone=mentions_everyone,\n user_mentions=user_mentions,\n role_mentions=role_mentions,\n mentions_reply=mentions_reply,\n )", "def create_model(self, ApiId: str, Name: str, Schema: str, ContentType: str = None, Description: str = None) -> Dict:\n pass", "def adc_api_notification_workflow():\n json = request.get_json(force=True)\n return jsonify(adc.notification_workflow(json))", "def retrieve(self, request, *args, **kwargs):\n return super(ConversationViewSet, self).retrieve(request, *args, **kwargs)", "def test_create_new_campaign_by_admin_passes(self):\n response = self.client.post(\n self.endpoint_url,\n json={\n \"logo\": None,\n \"name\": NEW_CAMPAIGN_NAME,\n \"organisations\": [self.test_org.id],\n \"url\": None,\n },\n headers={\"Authorization\": self.session_token},\n )\n response_body = response.get_json()\n self.assertEqual(response.status_code, 201)\n self.assertEqual(response_body, {\"campaignId\": 2})", "def make_instance(self, include_optional):\n # model = ICA_SDK.models.create_subscription_request.CreateSubscriptionRequest() # noqa: E501\n if include_optional :\n return CreateSubscriptionRequest(\n type = 'a', \n actions = [\n '0'\n ], \n name = 'a', \n description = '0', \n filter_expression = '0', \n delivery_target = ICA_SDK.models.delivery_target.DeliveryTarget(\n aws_sns_topic = ICA_SDK.models.delivery_target_aws_sns_topic.DeliveryTargetAwsSnsTopic(\n topic_arn = '0', ), \n aws_sqs_queue = ICA_SDK.models.delivery_target_aws_sqs_queue.DeliveryTargetAwsSqsQueue(\n queue_url = '0', ), \n workflow_run_launch = ICA_SDK.models.delivery_target_workflow_run_launch.DeliveryTargetWorkflowRunLaunch(\n id = '0', \n version = '0', \n name = '0', \n input = ICA_SDK.models.input.input(), ), )\n )\n else :\n return CreateSubscriptionRequest(\n type = 'a',\n name = 'a',\n delivery_target = ICA_SDK.models.delivery_target.DeliveryTarget(\n aws_sns_topic = ICA_SDK.models.delivery_target_aws_sns_topic.DeliveryTargetAwsSnsTopic(\n topic_arn = '0', ), \n aws_sqs_queue = ICA_SDK.models.delivery_target_aws_sqs_queue.DeliveryTargetAwsSqsQueue(\n queue_url = '0', ), \n workflow_run_launch = ICA_SDK.models.delivery_target_workflow_run_launch.DeliveryTargetWorkflowRunLaunch(\n id = '0', \n version = '0', \n name = '0', \n input = ICA_SDK.models.input.input(), ), ),\n )", "def __init__(self, web_id=None, id=None, name=None, description=None, path=None, analysis_rule_plug_in_name=None, category_names=None, create_enabled=None, group_id=None, has_notification_template=None, has_target=None, output_time=None, target_name=None, time_rule_plug_in_name=None, links=None):\n self.swagger_types = {\n 'web_id': 'str',\n 'id': 'str',\n 'name': 'str',\n 'description': 'str',\n 'path': 'str',\n 'analysis_rule_plug_in_name': 'str',\n 'category_names': 'list[str]',\n 'create_enabled': 'bool',\n 'group_id': 'int',\n 'has_notification_template': 'bool',\n 'has_target': 'bool',\n 'output_time': 'str',\n 'target_name': 'str',\n 'time_rule_plug_in_name': 'str',\n 'links': 'InlineResponse2008Links'\n }\n\n self.attribute_map = {\n 'web_id': 'WebId',\n 'id': 'Id',\n 'name': 'Name',\n 'description': 'Description',\n 'path': 'Path',\n 'analysis_rule_plug_in_name': 'AnalysisRulePlugInName',\n 'category_names': 'CategoryNames',\n 'create_enabled': 'CreateEnabled',\n 'group_id': 'GroupId',\n 'has_notification_template': 'HasNotificationTemplate',\n 'has_target': 'HasTarget',\n 'output_time': 'OutputTime',\n 'target_name': 'TargetName',\n 'time_rule_plug_in_name': 'TimeRulePlugInName',\n 'links': 'Links'\n }\n\n self._web_id = web_id\n self._id = id\n self._name = name\n self._description = description\n self._path = path\n self._analysis_rule_plug_in_name = analysis_rule_plug_in_name\n self._category_names = category_names\n self._create_enabled = create_enabled\n self._group_id = group_id\n self._has_notification_template = has_notification_template\n self._has_target = has_target\n self._output_time = output_time\n self._target_name = target_name\n self._time_rule_plug_in_name = time_rule_plug_in_name\n self._links = links", "def __init__(self, request):\n self.request = request\n self.fields = Message(request.namespace)", "def test_create_campaign(self):\n acc1 = Account.objects.create(name='acc1', code='111-111')\n row = {'PROJ_NAME1': 'China Fund', 'PROJ_NO': 'CFD-111',\n 'LOCATION': 'CHINA', 'SUMMARY': 'Ssssss'}\n sync.create_campaign(acc1, row, 'China Fund', Account.COUNTRY)\n campaign = Campaign.objects.filter(name='China Fund').first()\n self.assertEqual(self.china.pk, campaign.country.pk)\n\n acc2 = Account.objects.create(name='acc2', code='222-222')\n row = {'PROJ_NAME1': 'Smith Memorial Fund', 'PROJ_NO': 'SPF-222',\n 'SUMMARY': 'Ssssss'}\n sync.create_campaign(acc2, row, 'Smith Memorial Fund',\n Account.MEMORIAL)\n campaign = Campaign.objects.filter(name='Smith Memorial Fund').first()\n self.assertEqual(None, campaign.country)\n self.assertEqual(\n {\"data\": [{\"type\": \"text\", \"data\": {\"text\": \"Ssssss\"}}]},\n json.loads(campaign.description))\n acc1.delete()\n acc2.delete()", "def __init__(self):\n self.swagger_types = {\n 'owner_id': 'str',\n 'created_at': 'datetime',\n 'identifier': 'str',\n 'identifier_type': 'str',\n 'default_language': 'str',\n 'optional_identifier': 'str',\n 'id': 'str',\n 'v': 'float',\n 'id': 'str',\n 'case_records': 'list[str]'\n }\n\n self.attribute_map = {\n 'owner_id': '_ownerId',\n 'created_at': '_createdAt',\n 'identifier': 'identifier',\n 'identifier_type': 'identifierType',\n 'default_language': 'defaultLanguage',\n 'optional_identifier': 'optionalIdentifier',\n 'id': '_id',\n 'v': '__v',\n 'case_records': 'caseRecords'\n }\n\n self._owner_id = None\n self._created_at = None\n self._identifier = None\n self._identifier_type = None\n self._default_language = None\n self._optional_identifier = None\n self._id = None\n self._v = None\n self._id = None\n self._case_records = None", "def get_adcampaign(self, campaign_id, fields, batch=False):\n path = '%s' % campaign_id\n args = {'fields': fields}\n return self.make_request(path, 'GET', args, batch=batch)", "def rpc_campaign_message_new(self, campaign_id, email_id, target_email, company_name, first_name, last_name):\n\t\tsession = db_manager.Session()\n\t\tmessage = db_models.Message()\n\t\tmessage.id = email_id\n\t\tmessage.campaign_id = campaign_id\n\t\tmessage.target_email = target_email\n\t\tmessage.company_name = company_name\n\t\tmessage.first_name = first_name\n\t\tmessage.last_name = last_name\n\t\tsession.add(message)\n\t\tsession.commit()\n\t\tsession.close()\n\t\treturn", "def list_campaigns_extended(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def get_object(self):\n # read the URL data values into variables\n astronaut_pk = self.kwargs['astronaut_pk']\n message_pk = self.kwargs['message_pk']\n\n # find the SendMessage object, and return it\n st_cfh = SendMessage.objects.get(pk=message_pk)\n return st_cfh", "def testDeleteCampaign(self):\n if self.__class__.campaign2 is None:\n self.testSaveCampaign()\n self.assertEqual(self.__class__.service.DeleteCampaign(\n self.__class__.campaign2['id']), None)", "def __init__(self):\n self.swagger_types = {\n 'id': 'str',\n 'name': 'str',\n 'error_entity': 'DomainEntityRef',\n 'related_entity': 'DomainEntityRef',\n 'timestamp': 'datetime',\n 'level': 'str',\n 'category': 'str',\n 'correlation_id': 'str',\n 'event_message': 'EventMessage',\n 'self_uri': 'str'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'name': 'name',\n 'error_entity': 'errorEntity',\n 'related_entity': 'relatedEntity',\n 'timestamp': 'timestamp',\n 'level': 'level',\n 'category': 'category',\n 'correlation_id': 'correlationId',\n 'event_message': 'eventMessage',\n 'self_uri': 'selfUri'\n }\n\n self._id = None\n self._name = None\n self._error_entity = None\n self._related_entity = None\n self._timestamp = None\n self._level = None\n self._category = None\n self._correlation_id = None\n self._event_message = None\n self._self_uri = None", "def __init__(self):\n self.swagger_types = {\n 'id_conta': 'int',\n 'id_pessoa': 'int',\n 'id_cartao': 'int',\n 'id_bandeira': 'int',\n 'id_tipo_cartao': 'int',\n 'numero_cartao': 'str',\n 'nome_plastico': 'str',\n 'cvv2': 'str',\n 'data_geracao': 'str',\n 'data_validade': 'str',\n 'cpf': 'str',\n 'tipo_portador': 'str',\n 'trilha1': 'str',\n 'trilha2': 'str',\n 'trilha_cvv1': 'str',\n 'trilha_cvv2': 'str',\n 'flag_virtual': 'int',\n 'nome_bandeira': 'str',\n 'flag_titular': 'int',\n 'sequencial_cartao': 'int',\n 'id_status': 'int',\n 'descricao_status_cartao': 'str',\n 'data_status': 'str',\n 'id_estagio': 'int',\n 'descricao_estagio': 'str',\n 'data_estagio': 'str',\n 'numero_bin': 'str',\n 'id_produto': 'int',\n 'descricao_produto': 'str',\n 'id_status_conta': 'int',\n 'descricao_status_conta': 'int',\n 'data_embossing': 'str',\n 'codigo_desbloqueio': 'str',\n 'nome_pessoa': 'str',\n 'tipo_pessoa': 'str',\n 'data_nascimento': 'str',\n 'id_endereco': 'int',\n 'id_tipo_endereco': 'int',\n 'descricao_tipo_endereco': 'str',\n 'cep': 'str',\n 'logradouro': 'str',\n 'numero_endereco': 'str',\n 'complemento_endereco': 'str',\n 'bairro': 'str',\n 'cidade': 'str',\n 'uf': 'str',\n 'pais': 'str',\n 'senha_criptografada': 'str',\n 'icvv': 'str',\n 'id_status_impressao': 'int'\n }\n\n self.attribute_map = {\n 'id_conta': 'idConta',\n 'id_pessoa': 'idPessoa',\n 'id_cartao': 'idCartao',\n 'id_bandeira': 'idBandeira',\n 'id_tipo_cartao': 'idTipoCartao',\n 'numero_cartao': 'numeroCartao',\n 'nome_plastico': 'nomePlastico',\n 'cvv2': 'cvv2',\n 'data_geracao': 'dataGeracao',\n 'data_validade': 'dataValidade',\n 'cpf': 'cpf',\n 'tipo_portador': 'tipoPortador',\n 'trilha1': 'trilha1',\n 'trilha2': 'trilha2',\n 'trilha_cvv1': 'trilhaCVV1',\n 'trilha_cvv2': 'trilhaCVV2',\n 'flag_virtual': 'flagVirtual',\n 'nome_bandeira': 'nomeBandeira',\n 'flag_titular': 'flagTitular',\n 'sequencial_cartao': 'sequencialCartao',\n 'id_status': 'idStatus',\n 'descricao_status_cartao': 'descricaoStatusCartao',\n 'data_status': 'dataStatus',\n 'id_estagio': 'idEstagio',\n 'descricao_estagio': 'descricaoEstagio',\n 'data_estagio': 'dataEstagio',\n 'numero_bin': 'numeroBin',\n 'id_produto': 'idProduto',\n 'descricao_produto': 'descricaoProduto',\n 'id_status_conta': 'idStatusConta',\n 'descricao_status_conta': 'descricaoStatusConta',\n 'data_embossing': 'dataEmbossing',\n 'codigo_desbloqueio': 'codigoDesbloqueio',\n 'nome_pessoa': 'nomePessoa',\n 'tipo_pessoa': 'tipoPessoa',\n 'data_nascimento': 'dataNascimento',\n 'id_endereco': 'idEndereco',\n 'id_tipo_endereco': 'idTipoEndereco',\n 'descricao_tipo_endereco': 'descricaoTipoEndereco',\n 'cep': 'cep',\n 'logradouro': 'logradouro',\n 'numero_endereco': 'numeroEndereco',\n 'complemento_endereco': 'complementoEndereco',\n 'bairro': 'bairro',\n 'cidade': 'cidade',\n 'uf': 'uf',\n 'pais': 'pais',\n 'senha_criptografada': 'senhaCriptografada',\n 'icvv': 'icvv',\n 'id_status_impressao': 'idStatusImpressao'\n }\n\n self._id_conta = None\n self._id_pessoa = None\n self._id_cartao = None\n self._id_bandeira = None\n self._id_tipo_cartao = None\n self._numero_cartao = None\n self._nome_plastico = None\n self._cvv2 = None\n self._data_geracao = None\n self._data_validade = None\n self._cpf = None\n self._tipo_portador = None\n self._trilha1 = None\n self._trilha2 = None\n self._trilha_cvv1 = None\n self._trilha_cvv2 = None\n self._flag_virtual = None\n self._nome_bandeira = None\n self._flag_titular = None\n self._sequencial_cartao = None\n self._id_status = None\n self._descricao_status_cartao = None\n self._data_status = None\n self._id_estagio = None\n self._descricao_estagio = None\n self._data_estagio = None\n self._numero_bin = None\n self._id_produto = None\n self._descricao_produto = None\n self._id_status_conta = None\n self._descricao_status_conta = None\n self._data_embossing = None\n self._codigo_desbloqueio = None\n self._nome_pessoa = None\n self._tipo_pessoa = None\n self._data_nascimento = None\n self._id_endereco = None\n self._id_tipo_endereco = None\n self._descricao_tipo_endereco = None\n self._cep = None\n self._logradouro = None\n self._numero_endereco = None\n self._complemento_endereco = None\n self._bairro = None\n self._cidade = None\n self._uf = None\n self._pais = None\n self._senha_criptografada = None\n self._icvv = None\n self._id_status_impressao = None", "def message(**payload):\n web_client = payload[\"web_client\"]\n\n # Getting information from the response\n data = payload[\"data\"]\n channel_id = data.get(\"channel\")\n text = data.get(\"text\")\n subtype = data.get(\"subtype\")\n ts = data['ts']\n user = data.get('username') if not data.get('user') else data.get('user')\n # Creating a Converstion object\n message = Message(ts, user, text)\n\n # Appending the converstion attributes to the logs\n conversation.append(message.toDict())\n\n if subtype == 'bot_message': return\n\n do_respond(web_client, channel_id, text)", "def test_get_campaign_by_id_passes(self):\n response = self.client.get(f\"{self.endpoint_url}{self.test_campaign.id}/\")\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_body, {\"id\": CAMPAIGN_ID, \"name\": CAMPAIGN_NAME})", "def edit_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)", "def __init__(self): # noqa: E501\n self.openapi_types = {\n }\n\n self.attribute_map = {\n }", "def __init__(self):\n self.swagger_types = {\n 'id': 'str',\n 'name': 'str',\n 'device_token': 'str',\n 'notification_id': 'str',\n 'make': 'str',\n 'model': 'str',\n 'accept_notifications': 'bool',\n 'type': 'str',\n 'session_hash': 'str',\n 'self_uri': 'str'\n }\n\n self.attribute_map = {\n 'id': 'id',\n 'name': 'name',\n 'device_token': 'deviceToken',\n 'notification_id': 'notificationId',\n 'make': 'make',\n 'model': 'model',\n 'accept_notifications': 'acceptNotifications',\n 'type': 'type',\n 'session_hash': 'sessionHash',\n 'self_uri': 'selfUri'\n }\n\n self._id = None\n self._name = None\n self._device_token = None\n self._notification_id = None\n self._make = None\n self._model = None\n self._accept_notifications = None\n self._type = None\n self._session_hash = None\n self._self_uri = None", "def update(self, conversation):\n self.content_type = \"application/json\"\n self.method = \"PATCH\"\n entity = Conversation(json.loads(self.send(conversation).content))\n self._initialize_collection_properties(entity)\n return entity", "def rpc_campaign_new(self, name):\n\t\tsession = db_manager.Session()\n\t\tcampaign = db_models.Campaign(name=name, user_id=self.basic_auth_user)\n\t\tsession.add(campaign)\n\t\tsession.commit()\n\t\treturn campaign.id", "def test_sms_campaign_list(self):\n response = self.client.get('/sms_campaign/')\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'frontend/sms_campaign/list.html')\n\n request = self.factory.get('/sms_campaign/')\n request.user = self.user\n request.session = {}\n response = sms_campaign_list(request)\n self.assertEqual(response.status_code, 200)", "def test_SMSCampaign(self):\n self.client.login(username='arch', password='admin')\n response = self.client.get(reverse('echo:sms_campaign'))\n self.assertEqual(response.status_code, 200)", "def get_campaign(self, campaignId, **kwargs) -> ApiResponse:\n return self._request(fill_query_params(kwargs.pop('path'), campaignId), params=kwargs)", "def __init__(self, message_properties: ConfigNodePropertyArray=None, message_box_size_limit: ConfigNodePropertyInteger=None, message_count_limit: ConfigNodePropertyInteger=None, notify_failure: ConfigNodePropertyBoolean=None, failure_message_from: ConfigNodePropertyString=None, failure_template_path: ConfigNodePropertyString=None, max_retries: ConfigNodePropertyInteger=None, min_wait_between_retries: ConfigNodePropertyInteger=None, count_update_pool_size: ConfigNodePropertyInteger=None, inbox_path: ConfigNodePropertyString=None, sentitems_path: ConfigNodePropertyString=None, support_attachments: ConfigNodePropertyBoolean=None, support_group_messaging: ConfigNodePropertyBoolean=None, max_total_recipients: ConfigNodePropertyInteger=None, batch_size: ConfigNodePropertyInteger=None, max_total_attachment_size: ConfigNodePropertyInteger=None, attachment_type_blacklist: ConfigNodePropertyArray=None, allowed_attachment_types: ConfigNodePropertyArray=None, service_selector: ConfigNodePropertyString=None, field_whitelist: ConfigNodePropertyArray=None): # noqa: E501\n self.openapi_types = {\n 'message_properties': ConfigNodePropertyArray,\n 'message_box_size_limit': ConfigNodePropertyInteger,\n 'message_count_limit': ConfigNodePropertyInteger,\n 'notify_failure': ConfigNodePropertyBoolean,\n 'failure_message_from': ConfigNodePropertyString,\n 'failure_template_path': ConfigNodePropertyString,\n 'max_retries': ConfigNodePropertyInteger,\n 'min_wait_between_retries': ConfigNodePropertyInteger,\n 'count_update_pool_size': ConfigNodePropertyInteger,\n 'inbox_path': ConfigNodePropertyString,\n 'sentitems_path': ConfigNodePropertyString,\n 'support_attachments': ConfigNodePropertyBoolean,\n 'support_group_messaging': ConfigNodePropertyBoolean,\n 'max_total_recipients': ConfigNodePropertyInteger,\n 'batch_size': ConfigNodePropertyInteger,\n 'max_total_attachment_size': ConfigNodePropertyInteger,\n 'attachment_type_blacklist': ConfigNodePropertyArray,\n 'allowed_attachment_types': ConfigNodePropertyArray,\n 'service_selector': ConfigNodePropertyString,\n 'field_whitelist': ConfigNodePropertyArray\n }\n\n self.attribute_map = {\n 'message_properties': 'message.properties',\n 'message_box_size_limit': 'messageBoxSizeLimit',\n 'message_count_limit': 'messageCountLimit',\n 'notify_failure': 'notifyFailure',\n 'failure_message_from': 'failureMessageFrom',\n 'failure_template_path': 'failureTemplatePath',\n 'max_retries': 'maxRetries',\n 'min_wait_between_retries': 'minWaitBetweenRetries',\n 'count_update_pool_size': 'countUpdatePoolSize',\n 'inbox_path': 'inbox.path',\n 'sentitems_path': 'sentitems.path',\n 'support_attachments': 'supportAttachments',\n 'support_group_messaging': 'supportGroupMessaging',\n 'max_total_recipients': 'maxTotalRecipients',\n 'batch_size': 'batchSize',\n 'max_total_attachment_size': 'maxTotalAttachmentSize',\n 'attachment_type_blacklist': 'attachmentTypeBlacklist',\n 'allowed_attachment_types': 'allowedAttachmentTypes',\n 'service_selector': 'serviceSelector',\n 'field_whitelist': 'fieldWhitelist'\n }\n\n self._message_properties = message_properties\n self._message_box_size_limit = message_box_size_limit\n self._message_count_limit = message_count_limit\n self._notify_failure = notify_failure\n self._failure_message_from = failure_message_from\n self._failure_template_path = failure_template_path\n self._max_retries = max_retries\n self._min_wait_between_retries = min_wait_between_retries\n self._count_update_pool_size = count_update_pool_size\n self._inbox_path = inbox_path\n self._sentitems_path = sentitems_path\n self._support_attachments = support_attachments\n self._support_group_messaging = support_group_messaging\n self._max_total_recipients = max_total_recipients\n self._batch_size = batch_size\n self._max_total_attachment_size = max_total_attachment_size\n self._attachment_type_blacklist = attachment_type_blacklist\n self._allowed_attachment_types = allowed_attachment_types\n self._service_selector = service_selector\n self._field_whitelist = field_whitelist", "def opt_model_create_rest_api():\n request_json = request.get_json()\n OptimModelRequestAPI(request_json).validate()\n return create_model_data(request_json)", "def createCampaignConfig(docContent, url=reqmgr_url):\n if isinstance(docContent, list) and len(docContent) > 1:\n print(\"ERROR: createCampaignConfig expects a single campaign configuration, not a list of them!\")\n return False\n elif isinstance(docContent, list):\n docContent = docContent[0]\n outcome = True\n headers = {\"Content-type\": \"application/json\", \"Accept\": \"application/json\"}\n conn = make_x509_conn(url)\n url = '/reqmgr2/data/campaignconfig/%s' % docContent['CampaignName']\n json_args = json.dumps(docContent)\n conn.request(\"POST\", url, json_args, headers=headers)\n resp = conn.getresponse()\n if resp.status >= 400:\n print(\"FAILED to create campaign: %s. Response status: %s, response reason: %s\"\n % (docContent['CampaignName'], resp.status, resp.reason))\n outcome = False\n conn.close()\n return outcome", "def test_get_existent_campaigns_returns_campaigns_list(self):\n test_campaign = return_canned_campaign()\n test_campaign.create()\n response = self.client.get(self.endpoint_url)\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(\n response_body, {\"campaigns\": [{\"id\": 1, \"name\": \"Test Campaign\"}]}\n )", "def post(self):\n data = request.json\n\n now = datetime.utcnow().isoformat()\n data['created_time'] = now\n\n # Our type is already registered within the DB, so generate a\n # model object that looks like what we'll be interacting with\n try:\n activity = Activity(data)\n except KeyError:\n raise BadRequest(\"payload validation failed: {}\".format(data))\n\n activity.save()\n log.debug(\"Wrote activity: \" + str(activity._to_dict()))\n return activity._to_dict(), 201", "def _set_campaign(self, campaign):\n if isinstance(campaign, str):\n campaign = TrackedCampaign.objects.create(name=campaign)\n\n campaign.save()\n\n self.campaign = campaign", "def __init__(self, conversation_participant_arn=None, conversation_participant_name=None, conversation_participant_uuid=None, email=None, joined_dts=None, language_iso_code=None, last_message_dts=None, left_dts=None, profile_image_url=None, sms_phone_number=None, status=None, timezone=None, unread_messages=None): # noqa: E501 # noqa: E501\n\n self._conversation_participant_arn = None\n self._conversation_participant_name = None\n self._conversation_participant_uuid = None\n self._email = None\n self._joined_dts = None\n self._language_iso_code = None\n self._last_message_dts = None\n self._left_dts = None\n self._profile_image_url = None\n self._sms_phone_number = None\n self._status = None\n self._timezone = None\n self._unread_messages = None\n self.discriminator = None\n\n if conversation_participant_arn is not None:\n self.conversation_participant_arn = conversation_participant_arn\n if conversation_participant_name is not None:\n self.conversation_participant_name = conversation_participant_name\n if conversation_participant_uuid is not None:\n self.conversation_participant_uuid = conversation_participant_uuid\n if email is not None:\n self.email = email\n if joined_dts is not None:\n self.joined_dts = joined_dts\n if language_iso_code is not None:\n self.language_iso_code = language_iso_code\n if last_message_dts is not None:\n self.last_message_dts = last_message_dts\n if left_dts is not None:\n self.left_dts = left_dts\n if profile_image_url is not None:\n self.profile_image_url = profile_image_url\n if sms_phone_number is not None:\n self.sms_phone_number = sms_phone_number\n if status is not None:\n self.status = status\n if timezone is not None:\n self.timezone = timezone\n if unread_messages is not None:\n self.unread_messages = unread_messages", "def list(self, request, *args, **kwargs):\n return super(ConversationViewSet, self).list(request, *args, **kwargs)", "def _extend_record(self, campaign, fields, pull_ads):\n campaign_out = campaign.api_get(fields=fields).export_all_data()\n if pull_ads:\n campaign_out[\"ads\"] = {\"data\": []}\n ids = [ad[\"id\"] for ad in campaign.get_ads()]\n for ad_id in ids:\n campaign_out[\"ads\"][\"data\"].append({\"id\": ad_id})\n return campaign_out", "def __init__(self, id=None, created_at=None, finalized_at=None, status=None, status_detail=None, user_id=None, runtime=None, query_runtime=None, render_runtime=None, result_format=None, look_id=None, dashboard_id=None, lookml_dashboard_id=None, query_id=None, width=None, height=None, dashboard_style=None, dashboard_filters=None, can=None): # noqa: E501 # noqa: E501\n\n self._id = None\n self._created_at = None\n self._finalized_at = None\n self._status = None\n self._status_detail = None\n self._user_id = None\n self._runtime = None\n self._query_runtime = None\n self._render_runtime = None\n self._result_format = None\n self._look_id = None\n self._dashboard_id = None\n self._lookml_dashboard_id = None\n self._query_id = None\n self._width = None\n self._height = None\n self._dashboard_style = None\n self._dashboard_filters = None\n self._can = None\n self.discriminator = None\n\n if id is not None:\n self.id = id\n if created_at is not None:\n self.created_at = created_at\n if finalized_at is not None:\n self.finalized_at = finalized_at\n if status is not None:\n self.status = status\n if status_detail is not None:\n self.status_detail = status_detail\n if user_id is not None:\n self.user_id = user_id\n if runtime is not None:\n self.runtime = runtime\n if query_runtime is not None:\n self.query_runtime = query_runtime\n if render_runtime is not None:\n self.render_runtime = render_runtime\n if result_format is not None:\n self.result_format = result_format\n if look_id is not None:\n self.look_id = look_id\n if dashboard_id is not None:\n self.dashboard_id = dashboard_id\n if lookml_dashboard_id is not None:\n self.lookml_dashboard_id = lookml_dashboard_id\n if query_id is not None:\n self.query_id = query_id\n if width is not None:\n self.width = width\n if height is not None:\n self.height = height\n if dashboard_style is not None:\n self.dashboard_style = dashboard_style\n if dashboard_filters is not None:\n self.dashboard_filters = dashboard_filters\n if can is not None:\n self.can = can", "def __message_content__(self) -> MessageContent:", "def __init__(self, service_area: object=None, funder: object=None, area_served: object=None, member_of: object=None, events: object=None, sub_organization: object=None, has_offer_catalog: object=None, global_location_number: str=None, reviews: object=None, members: object=None, aggregate_rating: object=None, duns: str=None, tax_id: str=None, award: str=None, makes_offer: object=None, contact_points: object=None, awards: str=None, seeks: object=None, member: object=None, founders: object=None, alumni: object=None, dissolution_date: datetime=None, address: object=None, logo: str=None, employees: object=None, telephone: str=None, email: str=None, department: object=None, contact_point: object=None, parent_organization: object=None, legal_name: str=None, founding_date: datetime=None, employee: object=None, number_of_employees: object=None, naics: str=None, has_pos: object=None, review: object=None, founding_location: object=None, owns: object=None, event: object=None, founder: object=None, publishing_principles: object=None, sponsor: object=None, isic_v4: str=None, location: object=None, brand: object=None, vat_id: str=None, lei_code: str=None, fax_number: str=None, same_as: str=None, url: str=None, image: object=None, additional_type: str=None, name: str=None, identifier: str=None, potential_action: object=None, main_entity_of_page: str=None, description: str=None, disambiguating_description: str=None, alternate_name: str=None): # noqa: E501\n self.swagger_types = {\n 'service_area': object,\n 'funder': object,\n 'area_served': object,\n 'member_of': object,\n 'events': object,\n 'sub_organization': object,\n 'has_offer_catalog': object,\n 'global_location_number': str,\n 'reviews': object,\n 'members': object,\n 'aggregate_rating': object,\n 'duns': str,\n 'tax_id': str,\n 'award': str,\n 'makes_offer': object,\n 'contact_points': object,\n 'awards': str,\n 'seeks': object,\n 'member': object,\n 'founders': object,\n 'alumni': object,\n 'dissolution_date': datetime,\n 'address': object,\n 'logo': str,\n 'employees': object,\n 'telephone': str,\n 'email': str,\n 'department': object,\n 'contact_point': object,\n 'parent_organization': object,\n 'legal_name': str,\n 'founding_date': datetime,\n 'employee': object,\n 'number_of_employees': object,\n 'naics': str,\n 'has_pos': object,\n 'review': object,\n 'founding_location': object,\n 'owns': object,\n 'event': object,\n 'founder': object,\n 'publishing_principles': object,\n 'sponsor': object,\n 'isic_v4': str,\n 'location': object,\n 'brand': object,\n 'vat_id': str,\n 'lei_code': str,\n 'fax_number': str,\n 'same_as': str,\n 'url': str,\n 'image': object,\n 'additional_type': str,\n 'name': str,\n 'identifier': str,\n 'potential_action': object,\n 'main_entity_of_page': str,\n 'description': str,\n 'disambiguating_description': str,\n 'alternate_name': str\n }\n\n self.attribute_map = {\n 'service_area': 'serviceArea',\n 'funder': 'funder',\n 'area_served': 'areaServed',\n 'member_of': 'memberOf',\n 'events': 'events',\n 'sub_organization': 'subOrganization',\n 'has_offer_catalog': 'hasOfferCatalog',\n 'global_location_number': 'globalLocationNumber',\n 'reviews': 'reviews',\n 'members': 'members',\n 'aggregate_rating': 'aggregateRating',\n 'duns': 'duns',\n 'tax_id': 'taxID',\n 'award': 'award',\n 'makes_offer': 'makesOffer',\n 'contact_points': 'contactPoints',\n 'awards': 'awards',\n 'seeks': 'seeks',\n 'member': 'member',\n 'founders': 'founders',\n 'alumni': 'alumni',\n 'dissolution_date': 'dissolutionDate',\n 'address': 'address',\n 'logo': 'logo',\n 'employees': 'employees',\n 'telephone': 'telephone',\n 'email': 'email',\n 'department': 'department',\n 'contact_point': 'contactPoint',\n 'parent_organization': 'parentOrganization',\n 'legal_name': 'legalName',\n 'founding_date': 'foundingDate',\n 'employee': 'employee',\n 'number_of_employees': 'numberOfEmployees',\n 'naics': 'naics',\n 'has_pos': 'hasPOS',\n 'review': 'review',\n 'founding_location': 'foundingLocation',\n 'owns': 'owns',\n 'event': 'event',\n 'founder': 'founder',\n 'publishing_principles': 'publishingPrinciples',\n 'sponsor': 'sponsor',\n 'isic_v4': 'isicV4',\n 'location': 'location',\n 'brand': 'brand',\n 'vat_id': 'vatID',\n 'lei_code': 'leiCode',\n 'fax_number': 'faxNumber',\n 'same_as': 'sameAs',\n 'url': 'url',\n 'image': 'image',\n 'additional_type': 'additionalType',\n 'name': 'name',\n 'identifier': 'identifier',\n 'potential_action': 'potentialAction',\n 'main_entity_of_page': 'mainEntityOfPage',\n 'description': 'description',\n 'disambiguating_description': 'disambiguatingDescription',\n 'alternate_name': 'alternateName'\n }\n\n self._service_area = service_area\n self._funder = funder\n self._area_served = area_served\n self._member_of = member_of\n self._events = events\n self._sub_organization = sub_organization\n self._has_offer_catalog = has_offer_catalog\n self._global_location_number = global_location_number\n self._reviews = reviews\n self._members = members\n self._aggregate_rating = aggregate_rating\n self._duns = duns\n self._tax_id = tax_id\n self._award = award\n self._makes_offer = makes_offer\n self._contact_points = contact_points\n self._awards = awards\n self._seeks = seeks\n self._member = member\n self._founders = founders\n self._alumni = alumni\n self._dissolution_date = dissolution_date\n self._address = address\n self._logo = logo\n self._employees = employees\n self._telephone = telephone\n self._email = email\n self._department = department\n self._contact_point = contact_point\n self._parent_organization = parent_organization\n self._legal_name = legal_name\n self._founding_date = founding_date\n self._employee = employee\n self._number_of_employees = number_of_employees\n self._naics = naics\n self._has_pos = has_pos\n self._review = review\n self._founding_location = founding_location\n self._owns = owns\n self._event = event\n self._founder = founder\n self._publishing_principles = publishing_principles\n self._sponsor = sponsor\n self._isic_v4 = isic_v4\n self._location = location\n self._brand = brand\n self._vat_id = vat_id\n self._lei_code = lei_code\n self._fax_number = fax_number\n self._same_as = same_as\n self._url = url\n self._image = image\n self._additional_type = additional_type\n self._name = name\n self._identifier = identifier\n self._potential_action = potential_action\n self._main_entity_of_page = main_entity_of_page\n self._description = description\n self._disambiguating_description = disambiguating_description\n self._alternate_name = alternate_name", "def custom_openapi() -> Dict:\n if app.openapi_schema:\n return app.openapi_schema\n openapi_schema = get_openapi(\n title=\"The GenomicMedLab Cool Seq Tool\",\n version=__version__,\n description=\"Common Operations On Lots-of Sequences Tool.\",\n routes=app.routes\n )\n\n openapi_schema[\"info\"][\"contact\"] = {\n \"name\": \"Alex H. Wagner\",\n \"email\": \"Alex.Wagner@nationwidechildrens.org\",\n \"url\": \"https://www.nationwidechildrens.org/specialties/institute-for-genomic-medicine/research-labs/wagner-lab\" # noqa: E501\n }\n app.openapi_schema = openapi_schema\n return app.openapi_schema", "def build_model(self) -> DM:\n\n model = DM()\n model['artifact'] = DM()\n model['artifact']['web-link'] = DM()\n if self.url is not None:\n model['artifact']['web-link']['URL'] = self.url\n if self.label is not None:\n model['artifact']['web-link']['label'] = self.label\n if self.filename is not None:\n model['artifact']['web-link']['link-text'] = self.filename\n \n self._set_model(model)\n return model", "def __init__(self, message_id, event_type, generated, traits, raw):\n Model.__init__(self, message_id=message_id, event_type=event_type,\n generated=generated, traits=traits, raw=raw)", "def __init__(self,\r\n id=None,\r\n customer_id=None,\r\n consumer_id=None,\r\n consumer_ssn=None,\r\n requester_name=None,\r\n request_id=None,\r\n mtype=None,\r\n status=None,\r\n created_date=None,\r\n customer_type=None,\r\n title=None,\r\n start_date=None,\r\n end_date=None,\r\n days=None,\r\n seasoned=None,\r\n gse_enabled=None,\r\n consolidated_available_balance=None,\r\n portfolio_id=None,\r\n institutions=None,\r\n assets=None,\r\n errors=None,\r\n constraints=None,\r\n source=None,\r\n additional_properties = {}):\r\n\r\n # Initialize members of the class\r\n self.id = id\r\n self.customer_id = customer_id\r\n self.consumer_id = consumer_id\r\n self.consumer_ssn = consumer_ssn\r\n self.requester_name = requester_name\r\n self.request_id = request_id\r\n self.mtype = mtype\r\n self.status = status\r\n self.errors = errors\r\n self.created_date = created_date\r\n self.constraints = constraints\r\n self.source = source\r\n self.customer_type = customer_type\r\n self.title = title\r\n self.start_date = start_date\r\n self.end_date = end_date\r\n self.days = days\r\n self.seasoned = seasoned\r\n self.gse_enabled = gse_enabled\r\n self.consolidated_available_balance = consolidated_available_balance\r\n self.portfolio_id = portfolio_id\r\n self.institutions = institutions\r\n self.assets = assets\r\n\r\n # Add additional model properties to the instance\r\n self.additional_properties = additional_properties", "def __init__(self):\n self.swagger_types = {\n 'detail_type': 'str',\n 'name': 'str',\n 'store_data': 'object',\n 'discovered': 'datetime',\n 'extraction_failure': 'bool',\n 'in_trash': 'bool',\n 'is_extracted': 'bool',\n 'meta_available': 'bool',\n 'size': 'int',\n 'start_time': 'datetime',\n 'end_time': 'datetime',\n 'duration': 'float',\n 'messages': 'int',\n 'tags': 'list[Tag]'\n }\n\n self.attribute_map = {\n 'detail_type': 'detail_type',\n 'name': 'name',\n 'store_data': 'store_data',\n 'discovered': 'discovered',\n 'extraction_failure': 'extraction_failure',\n 'in_trash': 'in_trash',\n 'is_extracted': 'is_extracted',\n 'meta_available': 'meta_available',\n 'size': 'size',\n 'start_time': 'start_time',\n 'end_time': 'end_time',\n 'duration': 'duration',\n 'messages': 'messages',\n 'tags': 'tags'\n }\n\n self._detail_type = None\n self._name = None\n self._store_data = None\n self._discovered = None\n self._extraction_failure = None\n self._in_trash = None\n self._is_extracted = None\n self._meta_available = None\n self._size = None\n self._start_time = None\n self._end_time = None\n self._duration = None\n self._messages = None\n self._tags = None", "def create(self, client_id, subject, name, from_name, from_email, reply_to, html_url,\n text_url, list_ids, segment_ids):\n body = {\n \"Subject\": subject,\n \"Name\": name,\n \"FromName\": from_name,\n \"FromEmail\": from_email,\n \"ReplyTo\": reply_to,\n \"HtmlUrl\": html_url,\n \"TextUrl\": text_url,\n \"ListIDs\": list_ids,\n \"SegmentIDs\": segment_ids}\n response = self._post(\"/campaigns/%s.json\" %\n client_id, json.dumps(body))\n self.campaign_id = json_to_py(response)\n return self.campaign_id", "def __init__(self, _configuration=None, **kwargs): # noqa: E501 # noqa: E501\n if _configuration is None:\n _configuration = Configuration()\n self._configuration = _configuration\n\n self._envelope_id = None\n self._hmac = None\n self._id = None\n self._mentions = None\n self._read = None\n self._sent_by_email = None\n self._sent_by_full_name = None\n self._sent_by_image_id = None\n self._sent_by_initials = None\n self._sent_by_recipient_id = None\n self._sent_by_user_id = None\n self._signing_group_id = None\n self._signing_group_name = None\n self._subject = None\n self._tab_id = None\n self._text = None\n self._thread_id = None\n self._thread_originator_id = None\n self._timestamp = None\n self._time_stamp_formatted = None\n self._visible_to = None\n self.discriminator = None\n\n setattr(self, \"_{}\".format('envelope_id'), kwargs.get('envelope_id', None))\n setattr(self, \"_{}\".format('hmac'), kwargs.get('hmac', None))\n setattr(self, \"_{}\".format('id'), kwargs.get('id', None))\n setattr(self, \"_{}\".format('mentions'), kwargs.get('mentions', None))\n setattr(self, \"_{}\".format('read'), kwargs.get('read', None))\n setattr(self, \"_{}\".format('sent_by_email'), kwargs.get('sent_by_email', None))\n setattr(self, \"_{}\".format('sent_by_full_name'), kwargs.get('sent_by_full_name', None))\n setattr(self, \"_{}\".format('sent_by_image_id'), kwargs.get('sent_by_image_id', None))\n setattr(self, \"_{}\".format('sent_by_initials'), kwargs.get('sent_by_initials', None))\n setattr(self, \"_{}\".format('sent_by_recipient_id'), kwargs.get('sent_by_recipient_id', None))\n setattr(self, \"_{}\".format('sent_by_user_id'), kwargs.get('sent_by_user_id', None))\n setattr(self, \"_{}\".format('signing_group_id'), kwargs.get('signing_group_id', None))\n setattr(self, \"_{}\".format('signing_group_name'), kwargs.get('signing_group_name', None))\n setattr(self, \"_{}\".format('subject'), kwargs.get('subject', None))\n setattr(self, \"_{}\".format('tab_id'), kwargs.get('tab_id', None))\n setattr(self, \"_{}\".format('text'), kwargs.get('text', None))\n setattr(self, \"_{}\".format('thread_id'), kwargs.get('thread_id', None))\n setattr(self, \"_{}\".format('thread_originator_id'), kwargs.get('thread_originator_id', None))\n setattr(self, \"_{}\".format('timestamp'), kwargs.get('timestamp', None))\n setattr(self, \"_{}\".format('time_stamp_formatted'), kwargs.get('time_stamp_formatted', None))\n setattr(self, \"_{}\".format('visible_to'), kwargs.get('visible_to', None))", "def get_campaign_extended(self, campaignId, **kwargs) -> ApiResponse:\n return self._request(fill_query_params(kwargs.pop('path'), campaignId), params=kwargs)", "def model(self):", "def model(self):", "def model(self):", "def model(self):", "def model(self):", "def adc_api_notification_resource():\n json = request.get_json(force=True)\n return jsonify(adc.notification_resource(json))", "def delete_campaign(self, campaignId, **kwargs) -> ApiResponse:\n return self._request(fill_query_params(kwargs.pop('path'), campaignId), params=kwargs)", "def testCopyCampaigns(self):\n if self.__class__.campaign1 is None:\n self.testSaveCampaign()\n requests = [{\n 'campaignId': self.__class__.campaign1['id']\n }]\n self.assert_(isinstance(self.__class__.service.CopyCampaigns(requests),\n tuple))", "def model(self) -> Type[Model]:", "def adc_api_workflow_create():\n workflow_json = request.get_json(force=True)\n\n return jsonify(adc.workflow_create(workflow_json=workflow_json))", "def create_campaign(client, customer_id, budget_resource_name):\n campaign_service = client.get_service(\"CampaignService\")\n campaign_operation = client.get_type(\"CampaignOperation\")\n campaign = campaign_operation.create\n campaign.name = f\"Interplanetary Cruise App #{uuid4()}\"\n campaign.campaign_budget = budget_resource_name\n # Recommendation: Set the campaign to PAUSED when creating it to\n # prevent the ads from immediately serving. Set to ENABLED once you've\n # added targeting and the ads are ready to serve.\n campaign.status = client.enums.CampaignStatusEnum.PAUSED\n # All App campaigns have an advertising_channel_type of\n # MULTI_CHANNEL to reflect the fact that ads from these campaigns are\n # eligible to appear on multiple channels.\n campaign.advertising_channel_type = (\n client.enums.AdvertisingChannelTypeEnum.MULTI_CHANNEL\n )\n campaign.advertising_channel_sub_type = (\n client.enums.AdvertisingChannelSubTypeEnum.APP_CAMPAIGN\n )\n # Sets the target CPA to $1 / app install.\n #\n # campaign_bidding_strategy is a 'oneof' message so setting target_cpa\n # is mutually exclusive with other bidding strategies such as\n # manual_cpc, commission, maximize_conversions, etc.\n # See https://developers.google.com/google-ads/api/reference/rpc\n # under current version / resources / Campaign\n campaign.target_cpa.target_cpa_micros = 1000000\n # Sets the App Campaign Settings.\n campaign.app_campaign_setting.app_id = \"com.google.android.apps.adwords\"\n campaign.app_campaign_setting.app_store = (\n client.enums.AppCampaignAppStoreEnum.GOOGLE_APP_STORE\n )\n # Optimize this campaign for getting new users for your app.\n campaign.app_campaign_setting.bidding_strategy_goal_type = (\n client.enums.AppCampaignBiddingStrategyGoalTypeEnum.OPTIMIZE_INSTALLS_TARGET_INSTALL_COST\n )\n # Optional fields\n campaign.start_date = (datetime.now() + timedelta(1)).strftime(\"%Y%m%d\")\n campaign.end_date = (datetime.now() + timedelta(365)).strftime(\"%Y%m%d\")\n # Optional: If you select the\n # OPTIMIZE_IN_APP_CONVERSIONS_TARGET_INSTALL_COST goal type, then also\n # specify your in-app conversion types so the Google Ads API can focus\n # your campaign on people who are most likely to complete the\n # corresponding in-app actions.\n #\n # campaign.selective_optimization.conversion_actions.extend(\n # [\"INSERT_CONVERSION_ACTION_RESOURCE_NAME_HERE\"]\n # )\n\n # Submits the campaign operation and print the results.\n campaign_response = campaign_service.mutate_campaigns(\n customer_id=customer_id, operations=[campaign_operation]\n )\n resource_name = campaign_response.results[0].resource_name\n print(f'Created App campaign with resource name: \"{resource_name}\".')\n return resource_name", "def initialise_conversation_model(self):\n self.conversation = model.conversation.ConversationSystem()\n #\n # Set all as alive\n for name in 'abcde':\n self.conversation.addKnowledge(['{0}-alive'.format(name)])\n #\n # And set the requires\n self.conversation.convertPresentToRequires('{0}-alive')", "def __init__(self, jsondict=None, strict=True):\n \n self.source = None\n \"\"\" Associated documentation about the medication.\n Type `FHIRReference` (represented as `dict` in JSON). \"\"\"\n \n self.type = None\n \"\"\" The category of medication document.\n Type `CodeableConcept` (represented as `dict` in JSON). \"\"\"\n \n super(MedicationKnowledgeMonograph, self).__init__(jsondict=jsondict, strict=strict)", "def get_adcampaign_detail(self, account_id, campaign_id, date_preset):\n campaign_fields = [\n 'name', 'campaign_status', 'daily_budget', 'lifetime_budget',\n 'start_time', 'end_time']\n campaign_data_columns = [\n 'campaign_name', 'reach', 'frequency', 'clicks',\n 'actions', 'total_actions', 'ctr', 'spend']\n adgroup_data_columns = [\n 'campaign_id', 'campaign_name', 'adgroup_id', 'adgroup_name',\n 'reach', 'frequency', 'clicks', 'ctr', 'actions', 'cpm', 'cpc',\n 'spend']\n demographic_data_columns = [\n 'campaign_id', 'reach', 'frequency', 'clicks', 'actions', 'spend',\n 'cpc', 'cpm', 'ctr', 'cost_per_total_action', 'age', 'gender']\n placement_data_columns = [\n 'campaign_id', 'reach', 'frequency', 'clicks', 'actions', 'spend',\n 'cpc', 'cpm', 'ctr', 'cost_per_total_action', 'placement']\n campaign_filters = [{\n 'field': 'campaign_id', 'type': 'in', 'value': [campaign_id]}]\n batch = [\n self.get_adaccount(account_id, ['currency'], batch=True),\n self.get_adcampaign(campaign_id, campaign_fields, batch=True),\n self.get_adreport_stats(\n account_id, date_preset, 'all_days', campaign_data_columns,\n campaign_filters, ['action_type'], True),\n self.get_adreport_stats(\n account_id, date_preset, 1, campaign_data_columns,\n campaign_filters, None, True),\n self.get_adreport_stats(\n account_id, date_preset, 'all_days', adgroup_data_columns,\n campaign_filters, None, True),\n self.get_adreport_stats(\n account_id, date_preset, 'all_days', demographic_data_columns,\n campaign_filters, None, True),\n self.get_adreport_stats(\n account_id, date_preset, 'all_days', placement_data_columns,\n campaign_filters, None, True),\n ]\n return self.make_batch_request(batch)", "def __init__(self):\n self.swagger_types = {\n 'app_id': 'int',\n 'app_sw_rev': 'str',\n 'avg_hops': 'float',\n 'avg_latency': 'int',\n 'charge': 'int',\n 'estimated_latency_to_mote': 'int',\n 'hw_model': 'int',\n 'hw_rev': 'int',\n 'id': 'int',\n 'join_sys_time': 'datetime',\n 'last_voltage': 'int',\n 'lost_packet_count': 'int',\n 'mac_address': 'str',\n 'max_current': 'int',\n 'max_num_links': 'int',\n 'max_num_neighbors': 'int',\n 'need_neighbor': 'bool',\n 'num_good_neighbors': 'int',\n 'num_joins': 'int',\n 'num_links': 'int',\n 'num_neighbors': 'int',\n 'num_parents': 'int',\n 'power_cost_rx_link': 'int',\n 'power_cost_tx_link': 'int',\n 'reliability': 'float',\n 'rx_packet_count': 'int',\n 'stack_sw_rev': 'str',\n 'state': 'str',\n 'state_reason': 'str',\n 'state_sys_time': 'datetime',\n 'used_current': 'int'\n }\n\n self.attribute_map = {\n 'app_id': 'appId',\n 'app_sw_rev': 'appSwRev',\n 'avg_hops': 'avgHops',\n 'avg_latency': 'avgLatency',\n 'charge': 'charge',\n 'estimated_latency_to_mote': 'estimatedLatencyToMote',\n 'hw_model': 'hwModel',\n 'hw_rev': 'hwRev',\n 'id': 'id',\n 'join_sys_time': 'joinSysTime',\n 'last_voltage': 'lastVoltage',\n 'lost_packet_count': 'lostPacketCount',\n 'mac_address': 'macAddress',\n 'max_current': 'maxCurrent',\n 'max_num_links': 'maxNumLinks',\n 'max_num_neighbors': 'maxNumNeighbors',\n 'need_neighbor': 'needNeighbor',\n 'num_good_neighbors': 'numGoodNeighbors',\n 'num_joins': 'numJoins',\n 'num_links': 'numLinks',\n 'num_neighbors': 'numNeighbors',\n 'num_parents': 'numParents',\n 'power_cost_rx_link': 'powerCostRxLink',\n 'power_cost_tx_link': 'powerCostTxLink',\n 'reliability': 'reliability',\n 'rx_packet_count': 'rxPacketCount',\n 'stack_sw_rev': 'stackSwRev',\n 'state': 'state',\n 'state_reason': 'stateReason',\n 'state_sys_time': 'stateSysTime',\n 'used_current': 'usedCurrent'\n }\n\n self._app_id = None\n self._app_sw_rev = None\n self._avg_hops = None\n self._avg_latency = None\n self._charge = None\n self._estimated_latency_to_mote = None\n self._hw_model = None\n self._hw_rev = None\n self._id = None\n self._join_sys_time = None\n self._last_voltage = None\n self._lost_packet_count = None\n self._mac_address = None\n self._max_current = None\n self._max_num_links = None\n self._max_num_neighbors = None\n self._need_neighbor = None\n self._num_good_neighbors = None\n self._num_joins = None\n self._num_links = None\n self._num_neighbors = None\n self._num_parents = None\n self._power_cost_rx_link = None\n self._power_cost_tx_link = None\n self._reliability = None\n self._rx_packet_count = None\n self._stack_sw_rev = None\n self._state = None\n self._state_reason = None\n self._state_sys_time = None\n self._used_current = None", "def get(self):\n self.method = \"GET\"\n entity = Conversation(json.loads(self.send().content))\n self._initialize_collection_properties(entity)\n return entity", "def __init__(self):\n self.swagger_types = {\n 'annotations': 'dict(str, str)',\n 'end_time': 'int',\n 'hosts': 'list[str]',\n 'is_ephemeral': 'bool',\n 'is_user_event': 'bool',\n 'name': 'str',\n 'start_time': 'int',\n 'summarized_events': 'int',\n 'table': 'str',\n 'tags': 'list[str]'\n }\n\n self.attribute_map = {\n 'annotations': 'annotations',\n 'end_time': 'endTime',\n 'hosts': 'hosts',\n 'is_ephemeral': 'isEphemeral',\n 'is_user_event': 'isUserEvent',\n 'name': 'name',\n 'start_time': 'startTime',\n 'summarized_events': 'summarizedEvents',\n 'table': 'table',\n 'tags': 'tags'\n }\n\n self._annotations = None\n self._end_time = None\n self._hosts = None\n self._is_ephemeral = False\n self._is_user_event = False\n self._name = None\n self._start_time = None\n self._summarized_events = None\n self._table = None\n self._tags = None" ]
[ "0.5676597", "0.56284505", "0.528872", "0.5195491", "0.5169349", "0.5091118", "0.50815505", "0.5049281", "0.50430477", "0.5014071", "0.4998264", "0.4988041", "0.49813396", "0.49767828", "0.49386248", "0.49370554", "0.49350393", "0.49330032", "0.49310178", "0.49302855", "0.4923266", "0.4905545", "0.49011555", "0.48553076", "0.48452422", "0.48412606", "0.4836652", "0.48364946", "0.48327047", "0.48281068", "0.48185217", "0.481833", "0.48149776", "0.48085245", "0.48064584", "0.47919622", "0.4788631", "0.4776196", "0.475918", "0.4755572", "0.47519496", "0.47471717", "0.47467837", "0.47415343", "0.47120592", "0.47027287", "0.46902576", "0.46842352", "0.46827564", "0.46820635", "0.4675833", "0.46719107", "0.4659449", "0.4644796", "0.46437496", "0.46340245", "0.46279", "0.46276605", "0.46060178", "0.46053395", "0.4603814", "0.45989758", "0.45902377", "0.45877907", "0.45840037", "0.45739165", "0.45723483", "0.45721573", "0.45697105", "0.45535085", "0.45136565", "0.4504902", "0.4503746", "0.4496809", "0.44967136", "0.4493494", "0.44931486", "0.44897595", "0.44880053", "0.44849864", "0.4478205", "0.44689026", "0.44688284", "0.4463554", "0.4463554", "0.4463554", "0.4463554", "0.4463554", "0.44613865", "0.44579628", "0.44507438", "0.4448314", "0.444536", "0.4444878", "0.4431439", "0.4422682", "0.44166192", "0.441473", "0.44046804", "0.44017106" ]
0.5792911
0
Gets the id of this MessagingCampaign. The globally unique identifier for the object.
def id(self): return self._id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self.__id", "def get_id(self):\n return self.__id", "def get_id(self):\n\n\t\treturn self.__id", "def getID(self):\n return self._id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def getId(self):\n\n return self.__id", "def getID(self):\n return self.__id", "def getid(self):\n return self.__id", "def GetID(self):\n return self.id", "def unique_id(self):\n return self._id", "def unique_id(self):\n return self._id", "def _get_id(self):\n return self.id", "def get_id(self) -> int:\n return self.id", "def get_id(self) -> int:\n return self.id", "def get_id(self) -> str: # noqa\n if self._id is None:\n self._id = str(uuid4())\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> str:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id", "def id(self) -> int:\n return self._id" ]
[ "0.77193385", "0.77193385", "0.77193385", "0.77193385", "0.77193385", "0.77193385", "0.765815", "0.765815", "0.7566762", "0.7515753", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.75107026", "0.74921083", "0.748478", "0.74667084", "0.74519056", "0.7438217", "0.7438217", "0.74175125", "0.7356128", "0.7356128", "0.7333958", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.731778", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586", "0.73088586" ]
0.0
-1
Sets the id of this MessagingCampaign. The globally unique identifier for the object.
def id(self, id): self._id = id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_id(self, id):\n self.__id = id", "def set_id(self, id_):\n\n self.id_ = id_", "def set_id(self, id_=None):\n if id_ is None:\n self.id = id(self)\n else:\n self.id = id_", "def set_id(self, id):\n self.data['id'] = id", "def setID(self, id):\n self._id = id\n return self.callRemote('setID', id)", "def SetId(self, id):\n self.id = int(id)", "def id(self, id: str):\n \n self._id = id", "def id(self, id: str):\n self._id = id", "def campaign_id(self, campaign_id):\n\n self._campaign_id = campaign_id", "def id(self, id: str):\n\n self._id = id", "def id(self, id: str):\n\n self._id = id", "def id(self, id: str):\n\n self._id = id", "def id(self, id):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\")\n\n self._id = id", "def id(self, id):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\")\n\n self._id = id", "def id(self, id):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\")\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def id(self, id: int):\n\n self._id = id", "def set_id(self, id):\n self.id = id\n print(\"self id = \" + str(self.id))", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id):\n self._id = id", "def id(self, id: str):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\") # noqa: E501\n\n self._id = id", "def id(self, id: str):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\") # noqa: E501\n\n self._id = id", "def id(self, id: str):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\") # noqa: E501\n\n self._id = id", "def id(self, id: str):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\") # noqa: E501\n\n self._id = id", "def id(self, id):\n if self._configuration.client_side_validation and id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\") # noqa: E501\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id", "def id(self, id):\n\n self._id = id" ]
[ "0.7718702", "0.7609532", "0.757609", "0.74520427", "0.7333251", "0.73325944", "0.72652215", "0.7247922", "0.720335", "0.7196361", "0.7196361", "0.7196361", "0.71722925", "0.71722925", "0.71722925", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.71394897", "0.712948", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7111163", "0.7082416", "0.7082416", "0.7082416", "0.7082416", "0.7071507", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563", "0.70613563" ]
0.72002715
13
Gets the name of this MessagingCampaign.
def name(self): return self._name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def campaign_name(self):\n\n return self._campaign_name", "def campaign_name(self):\n\n return self._campaign_name", "def campaign_name(self):\n\n return self._campaign_name", "def get_name(self) -> str:\n\n return self.name_", "def get_name(self) -> str:\n return self._name", "def get_name(self) -> str:\n return self._name", "def get_name(self):\n\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def get_name(self):\n return self._name", "def getName(self) -> str:\n return self._name", "def get_name(self):\n\n return self.name", "def get_name(self):\n\n return self.name", "def getName(self):\n\n return self._name", "def get_name(self) -> str:\n return self.name", "def get_name(self) -> str:\n return self.name", "def get_name(self) -> str:\n return self.name", "def getName(self):\n return self._get_name( )", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def getName(self):\n return self._name", "def get_name(self):\r\n return self._name", "def get_name(self):\r\n return self._name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self.name", "def get_name(self):\n return self._sName", "def get_name(self):\n return self._g.get_name()", "def get_name(self):\n return self.load_name(self.subject)", "def get_name(self):\n return self.__name", "def get_name(self):\n return self.__name", "def get_name(self):\n return self.__name", "def contact_name(self) -> str:\n return pulumi.get(self, \"contact_name\")", "def get_name(self):\n return self.attributes[\"name\"]", "def getName(self):\n\n return self.name", "def getName(self):\n\n return self.name", "def name(self):\n\n return self._get_field(\"name\")", "def get_name(self) -> str:\r\n return self.name", "def get_name(self):\n\n\t\treturn self.__name", "def name(self) -> str:\n\n return self._name", "def name(self) -> str:\n\n return self._name", "def name(self) -> str:\n\n return self._name", "def get_name(self):\n return self.name # return the name", "def get_name(self):\n return self.name # return the name", "def get_name(self):\n \n # Return the player's name\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name", "def name(self) -> str:\n return self._name" ]
[ "0.83846724", "0.83846724", "0.83846724", "0.7315201", "0.7272899", "0.7272899", "0.72664964", "0.717051", "0.717051", "0.717051", "0.717051", "0.717051", "0.717051", "0.717051", "0.717051", "0.717051", "0.717051", "0.7166163", "0.7131615", "0.7131615", "0.711222", "0.7105935", "0.7105935", "0.7105935", "0.708141", "0.7031734", "0.7031734", "0.7031734", "0.7031734", "0.7031734", "0.7031734", "0.7031734", "0.7031734", "0.70126617", "0.70126617", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6992271", "0.6988211", "0.6981938", "0.6981586", "0.6940574", "0.6940574", "0.6940574", "0.69396627", "0.6935922", "0.69317603", "0.69317603", "0.6921139", "0.69185907", "0.6885337", "0.6883092", "0.6883092", "0.6883092", "0.6872107", "0.6872107", "0.6853813", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744", "0.6844744" ]
0.0
-1
Sets the name of this MessagingCampaign.
def name(self, name): self._name = name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetName(self, name):\n self.name = name", "def set_name(self, name: str):\n self._name = name", "def set_name(self, name):\n self._name = name", "def set_name(self, name):\n self.name = name", "def set_name(self, name):\n self.name = name", "def set_name(self, name):\n self.name = name", "def set_name(self, name):\n self.name = name", "def set_name(self, name):\n self.name = name", "def set_name(self, _name):\n self.name = _name", "def set_name(self, name):\n assert isinstance(name, str), 'Name must be string'\n self._name = name", "def set_name(self, name):\n self.__name = name", "def set_name(self, name):\n self.__name = name", "def set_name(self, _name):\n self.name = _name\n return self.name", "def set_name(self, name):\r\n self.__name = name", "def set_name(self, name):\n\t\tself.name_ = name", "def name(self, name):\n\n self._set_field(\"name\", name)", "def set_name(self, name):\n self.name = name # overwrite the existing name with the input name", "def set_name(self, name):\n self.name = name # overwrite the existing name with the input name", "def setname(self, name):\n self.__name = name", "def set_name(self, name):\n return self.set_meta('name', name)", "def set_name(self, a_name):\n self.set_parameter('name', a_name)\n return self", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n\n self._name = name", "def set_name(self, name):\n self.options['name'] = name", "def setName(self, name):\n self._name = name", "def setName(self, name):\n self._name = name", "def set_name(self, name):\n self.settings[\"name\"] = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n \n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n \n self._name = name", "def setName(self, name):\n self.name = str(name)", "def contact_name(self, contact_name):\n\n self._contact_name = contact_name", "def contact_name(self, contact_name):\n\n self._contact_name = contact_name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name: str):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def setName(self, name): \n\n self._name = name", "def name(self, name: str) -> None:\n self._name = name", "def set_name(self, PersonName):\r\n self.name = PersonName", "def name(self, name: str):\n\n self._name = name", "def name(self, name: str):\n\n self._name = name", "def name(self, name: str):\n\n self._name = name", "def name(self, name: str):\n\n self._name = name", "def name(self, name: str):\n\n self._name = name", "def name(self, name: str):\n\n self._name = name", "def name(self, name: str):\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n if name is not None and len(name) > 100:\n raise ValueError(\"Invalid value for `name`, length must be less than or equal to `100`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def name(self, name):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def set_name(self, name):\n\n\t\tif name is not None and not isinstance(name, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: name EXPECTED TYPE: str', None, None)\n\t\t\n\t\tself.__name = name\n\t\tself.__key_modified['name'] = 1", "def setName(self, name):\n self.name = name", "def setName(self, name):\n self.name = name", "def setName(self, name):\n self.name = name", "def setName(self, name):\n self.name = name", "def name(self, name: str):\r\n self._name = name", "def name(self, name: str):\r\n self._name = name", "def name(self, name: str):\r\n self._name = name", "def name(self, name: str):\r\n self._name = name", "def name(self, name):\n # if name is None:\n # raise ValueError(\"Invalid value for `name`, must not be `None`\") # noqa: E501\n\n self._name = name", "def __set_name(self, name):\r\n\t\tself.__name = name\r\n\t\tself._window.chat_panel.place_name = name\r\n\t\tself.encode_message(action=\"NO\", selected_name=name)", "def set_name(self,name):\n if not isinstance(name,(str)):\n raise TypeError('name must be string')\n else:\n self._name = name", "def name(self, name):\n\n self._set_field(\"name\", name.get_json())", "def name(self, name: \"str\"):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n self._attrs[\"name\"] = name", "def name(self, name: \"str\"):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n self._attrs[\"name\"] = name", "def name(self, name: \"str\"):\n if name is None:\n raise ValueError(\"Invalid value for `name`, must not be `None`\")\n self._attrs[\"name\"] = name", "def set_name(self, room_name):\n self.name = room_name", "def name(self, name):\n if name is not None and len(name) > 255:\n raise ValueError(\"Invalid value for `name`, length must be less than or equal to `255`\") # noqa: E501\n if name is not None and len(name) < 1:\n raise ValueError(\"Invalid value for `name`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._name = name", "def setName(self, name):\n self.name = name\n return self", "def name(self, name):\n\n self._name = name", "def name(self, name):\n\n self._name = name", "def name(self, name):\n\n self._name = name", "def name(self, name):\n\n self._name = name", "def name(self, name):\n\n self._name = name", "def name(self, name):\n\n self._name = name", "def name(self, name):\n\n self._name = name" ]
[ "0.735443", "0.7340858", "0.7322299", "0.7251668", "0.7251668", "0.7251668", "0.7251668", "0.7251668", "0.72128856", "0.7197441", "0.7195748", "0.7195748", "0.7178476", "0.70947015", "0.7060306", "0.6969277", "0.6966887", "0.6966887", "0.69516367", "0.6950923", "0.6910362", "0.6900081", "0.6853406", "0.68480337", "0.68480337", "0.6847385", "0.68309647", "0.68309647", "0.68309647", "0.68309647", "0.67987615", "0.67987615", "0.67930067", "0.6787156", "0.6787156", "0.67793226", "0.67793226", "0.67793226", "0.67793226", "0.67793226", "0.67793226", "0.67793226", "0.67793226", "0.67793226", "0.6755088", "0.67468995", "0.67058223", "0.6696997", "0.6696997", "0.6696997", "0.6696997", "0.6696997", "0.6696997", "0.6696997", "0.6676699", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6676548", "0.6675322", "0.66730154", "0.66730154", "0.66730154", "0.66730154", "0.6667567", "0.6667567", "0.6667567", "0.6667567", "0.6664902", "0.6655968", "0.66336304", "0.6609496", "0.6607314", "0.6607314", "0.6607314", "0.6592069", "0.65899515", "0.65898174", "0.6578905", "0.6578905", "0.6578905", "0.6578905", "0.6578905", "0.6578905", "0.6578905" ]
0.66005486
90
Gets the date_created of this MessagingCampaign.
def date_created(self): return self._date_created
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def date_created(self) -> datetime:\n return self._date_created", "def created_date(self):\n return self._created_date", "def created_date(self):\n return self._created_date", "def GetDateCreated(self):\n return str(self.datecreated)", "def date_created(self) -> str:\n return pulumi.get(self, \"date_created\")", "def created_date_time(self) -> str:\n return pulumi.get(self, \"created_date_time\")", "def created_time(self) -> datetime.datetime:\n return self.__created_time", "def created_time(self) -> datetime.datetime:\n return self.__created_time", "def get_account_created_date(self):\n return self.account_created_date", "def created(self) -> datetime:\n return datetime.strptime(self.data['created_at'],\n '%Y-%m-%dT%H:%M:%S.%fZ')", "def date(self):\n return DateTime(self.created)", "def created_at(self):\n return self._domain.created_at", "def created_at(self) -> datetime.datetime:\n return self._created_at", "def created(self):\n return datetime.utcfromtimestamp(self.create_ts)", "def get_created_at(self, instance):\n return instance.created_at.strftime(\"%B %d, %Y\")", "def get_created_at(self, instance):\n return instance.created_at.strftime(\"%B %d, %Y\")", "def get_created_at(self, instance):\n return instance.created_at.strftime(\"%B %d, %Y\")", "def get_creation_time(self):\n return self.get_attr('date_created')", "def created_at(self):\n return self._created_at", "def created_at(self):\n return self._created_at", "def created_at(self):\n return self._created_at", "def created_at(self):\n return self._created_at", "def created_timestamp(self):\n return self._created_timestamp", "def created_at(self):\n return self.getattr('created_at')", "def DateCreated(self, default=None):\n return self.data.get('metadata', {}).get('_created', default)", "def created_at(self):\n return string_to_datetime(self._dict.get('created_at'))", "def created_on(self):\n return self.get_time(\"created_on\")", "def created_at(self) -> str:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> str:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> str:\n return pulumi.get(self, \"created_at\")", "def CreatedAt(self):\n return self._created_at", "def created_at(self):\n return self.data[\"attributes\"][\"createdAt\"]", "def time_created(self):\n return self._time_created", "def time_created(self):\n return self._time_created", "def time_created(self):\n return self._time_created", "def time_created(self):\n return self._time_created", "def created_at(self) -> \"datetime\":\n return self._attrs.get(\"createdAt\")", "def created_at(self) -> \"datetime\":\n return self._attrs.get(\"createdAt\")", "def created_at(self) -> \"datetime\":\n return self._attrs.get(\"createdAt\")", "def get_account_created_date_formatted(self):\n return self.account_created_date_formatted", "def created_at(self) -> datetime:\n return util.to_datetime(self.doc.get('createdAt'))", "def created_at(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_at\")", "def getCreated(self):\n return self.__created", "def created_on(self):\n return self._created_on", "def created_on(self):\n return self._created_on", "def created_on(self):\n return self._created_on", "def created_on(self):\n return self._created_on", "def created_date_time(self):\n if \"createdDateTime\" in self._prop_dict:\n return datetime.strptime(self._prop_dict[\"createdDateTime\"].replace(\"Z\", \"\"), \"%Y-%m-%dT%H:%M:%S.%f\")\n else:\n return None", "def created_at(self) -> \"str\":\n return self._attrs.get(\"createdAt\")", "def creation_date(self) -> str:\n return pulumi.get(self, \"creation_date\")", "def creation_date(self) -> str:\n return pulumi.get(self, \"creation_date\")", "def creation_date(self) -> str:\n return pulumi.get(self, \"creation_date\")", "def getCreatedDate(self):\n return _libsbml.ModelHistory_getCreatedDate(self)", "def created(self) -> str:\n return pulumi.get(self, \"created\")", "def created(self) -> str:\n return pulumi.get(self, \"created\")", "def created(self) -> str:\n return pulumi.get(self, \"created\")", "def created_at(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"created_at\")", "def getCreationDate(self):\n return self._creationDate", "def date_created(self):\n date = self._json['author-profile']['date-created']\n if date is not None:\n return (int(date['@year']), int(date['@month']), int(date['@day']))\n else:\n return (None, None, None)", "def date(self):\n return self.status.created_at", "def creationTime(self):\n \n if not self.logMessage is None :\n return self.logMessage[\"date\"]", "def created_at(self) -> Optional[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[str]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[str]:\n return pulumi.get(self, \"created_at\")", "def created_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_time\")", "def created_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_time\")", "def Created(self):\n return self._get_attr('Created')", "def get_creation_time(self):\n return self.creation_time", "def created_at(self):\n created_at = self.joined_at\n if created_at is None:\n created_at = DISCORD_EPOCH_START\n \n return created_at", "def created_at(self) -> datetime:\n return utils.snowflake_time(self.id)", "def get_inbound_statement_details_created_date(self):\n return self.get_text_from_element(self.inbound_statements_details_created_date_locator, False)", "def created(self) -> datetime.datetime:\n # REMARK: On Unix systems getctime() returns the time of most recent\n # metadata change, but not the creation.\n # https://stackoverflow.com/questions/237079/how-do-i-get-file-creation-and-modification-date-times\n # https://docs.python.org/3/library/os.html#os.stat_result\n if platform.system() == \"Windows\":\n timestamp = os.path.getctime(self._manifest_path)\n\n else:\n stat = os.stat(self._manifest_path)\n try:\n timestamp = stat.st_birthtime\n except AttributeError:\n timestamp = stat.st_mtime\n\n return datetime.datetime.fromtimestamp(timestamp)", "def created_on(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"created_on\")", "def prepare_actor_created_date(self, object):\n if object.actor_created is not None:\n return object.actor_created.date()\n else:\n return ''", "def creation_date_time(self) -> Optional[str]:\n return pulumi.get(self, \"creation_date_time\")", "def create_time(self):\n return self._create_time", "def create_time(self):\n return self._create_time", "def create_time(self):\n return self._create_time", "def created_at(self):\n return self.viztrail.created_at", "def creation_datetime(self) -> datetime:\n return utc_to_local(self._db_data.creation_datetime)", "def time_created(self) -> str:\n return pulumi.get(self, \"time_created\")", "def time_created(self) -> str:\n return pulumi.get(self, \"time_created\")", "def time_created(self) -> str:\n return pulumi.get(self, \"time_created\")", "def created_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_at\")", "def created_at(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"created_at\")" ]
[ "0.797184", "0.78968954", "0.78968954", "0.75586146", "0.7542879", "0.7494075", "0.73534364", "0.73534364", "0.73233813", "0.7312706", "0.7265856", "0.7256938", "0.7255421", "0.7170623", "0.7149717", "0.7149717", "0.7149717", "0.71311", "0.7127949", "0.7127949", "0.7127949", "0.7127949", "0.71051383", "0.70877093", "0.70616263", "0.70580035", "0.70540905", "0.7028556", "0.7028556", "0.7028556", "0.7015403", "0.69555753", "0.69373775", "0.69373775", "0.69373775", "0.69373775", "0.69168466", "0.69168466", "0.69168466", "0.6893304", "0.6859956", "0.68445516", "0.68445516", "0.68445516", "0.68445516", "0.68445516", "0.68445516", "0.6833425", "0.68016446", "0.68016446", "0.68016446", "0.68016446", "0.67978436", "0.6762151", "0.6745998", "0.6745998", "0.6745998", "0.673481", "0.6715385", "0.6715385", "0.6715385", "0.66998094", "0.66844386", "0.66507596", "0.6631651", "0.6628301", "0.66272414", "0.66272414", "0.66272414", "0.66272414", "0.66272414", "0.66272414", "0.65629715", "0.65629715", "0.65507984", "0.6548559", "0.6547699", "0.6547305", "0.6527047", "0.64931834", "0.6491831", "0.6454655", "0.64117444", "0.63434076", "0.63434076", "0.63434076", "0.6324402", "0.6322916", "0.63082546", "0.63082546", "0.63082546", "0.6279078", "0.6279078", "0.6279078", "0.6279078", "0.6279078", "0.6279078", "0.62711966" ]
0.7941877
3
Sets the date_created of this MessagingCampaign.
def date_created(self, date_created): self._date_created = date_created
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def date_created(self, date_created: datetime):\n\n self._date_created = date_created", "def date_created(self, date_created):\n\n self._date_created = date_created", "def date_created(self, date_created):\n\n self._date_created = date_created", "def date_created(self, date_created):\n\n self._date_created = date_created", "def date_created(self, date_created):\n self._date_created = date_created", "def date_created(self, date_created):\n self._date_created = date_created", "def datecreated(self, datecreated):\n\n self._datecreated = datecreated", "def created_date(self, created_date):\n\n self._created_date = created_date", "def created_date(self, created_date):\n\n self._created_date = created_date", "def created_date(self, created_date):\n\n self._created_date = created_date", "def created_date(self, created_date):\n\n self._created_date = created_date", "def created_date(self, created_date):\n\n self._created_date = created_date", "def created_date(self, created_date):\n self._created_date = created_date", "def SetDateCreated(self, date):\n self.datecreated = str(date)", "def created_date(self, created_date):\n if created_date is None:\n raise ValueError(\"Invalid value for `created_date`, must not be `None`\") # noqa: E501\n\n self._created_date = created_date", "def set_account_created_date(self, account_created_date):\n self.account_created_date = account_created_date", "def created(self, created):\n if created is None:\n raise ValueError(\"Invalid value for `created`, must not be `None`\")\n\n self._created = created", "def create_date(self, create_date):\n\n self._create_date = create_date", "def create_date(self, create_date):\n\n self._create_date = create_date", "def create_date(self, create_date):\n\n self._create_date = create_date", "def set_created(self, dt):\n self.created = dt_to_iso(dt)", "def creation_date(self, creation_date):\n\n self._creation_date = creation_date", "def creation_date(self, creation_date):\n\n self._creation_date = creation_date", "def created_timestamp(self, created_timestamp):\n self._created_timestamp = created_timestamp", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def created(self, created):\n\n self._created = created", "def set_account_created_date_formatted(self, account_created_date_formatted):\n self.account_created_date_formatted = account_created_date_formatted", "def setCreatedDate(self, *args):\n return _libsbml.ModelHistory_setCreatedDate(self, *args)", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n\n self._created_at = created_at", "def created_at(self, created_at):\n self._created_at = created_at", "def set_created_date(self, doc, created):\n if not self.created_date_set:\n self.created_date_set = True\n date = utils.datetime_from_iso_format(created)\n if date is not None:\n doc.creation_info.created = date\n return True\n else:\n raise SPDXValueError('CreationInfo::Date')\n else:\n raise CardinalityError('CreationInfo::Created')", "def time_created(self, time_created):\n self._time_created = time_created", "def time_created(self, time_created):\n self._time_created = time_created", "def time_created(self, time_created):\n self._time_created = time_created", "def time_created(self, time_created):\n self._time_created = time_created", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created_on(self, created_on):\n\n self._created_on = created_on", "def created(self, created):\n if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501\n raise ValueError(\"Invalid value for `created`, must not be `None`\") # noqa: E501\n\n self._created = created", "def created_date_utc(self, created_date_utc):\n\n self._created_date_utc = created_date_utc", "def set_creation_date(self, creation_date):\n\t\t\n\t\tif (creation_date.__class__ != str or creation_date ==\"\") and (creation_date.__class__ != time.struct_time or len(creation_date) != 9 ):\n\t\t\traise InvalidParameterError(\"creation_date\", \"creation_date is not in a proper format\")\n\t\ttry:\n\t\t\tif creation_date.__class__ == str:\n\t\t\t\ttmp_cd = time.strptime(creation_date, '%S %M %H %d %m %Y')\n\t\t\telif creation_date.__class__ == time.struct_time:\n\t\t\t\ttmp_cd = creation_date\n\t\t\tself.__creation_date = datetime(tmp_cd[0], tmp_cd[1], tmp_cd[2], tmp_cd[3], tmp_cd[4], tmp_cd[5])\n\t\texcept:\n\t\t\traise InvalidDate, \"date is not valid creation_date is not in a proper format\"", "def created_at(self, created_at: \"datetime\"):\n self._attrs[\"createdAt\"] = created_at", "def created_at(self, created_at: \"datetime\"):\n self._attrs[\"createdAt\"] = created_at", "def created_at(self, created_at: \"datetime\"):\n self._attrs[\"createdAt\"] = created_at", "def created_user(self, created_user):\n self._created_user = created_user", "def created(self, created):\n if (\n self.local_vars_configuration.client_side_validation and created is None\n ): # noqa: E501\n raise ValueError(\n \"Invalid value for `created`, must not be `None`\"\n ) # noqa: E501\n\n self._created = created", "def date_created(self) -> datetime:\n return self._date_created", "def created_by(self, created_by):\n\n self._created_by = created_by", "def created_by(self, created_by):\n\n self._created_by = created_by", "def created_by(self, created_by):\n\n self._created_by = created_by", "def created_by(self, created_by):\n\n self._created_by = created_by", "def created_by(self, created_by):\n\n self._created_by = created_by", "def created_by(self, created_by):\n\n self._created_by = created_by", "def create(self):\n self.created_date = timezone.now()\n self.save()", "def save(self, *args, **kwargs):\n if not self.created_date:\n self.created_date = datetime.utcnow()\n \n self.modified_date = datetime.utcnow()\n \n super(Message, self).save(*args, **kwargs)", "def created_at(self, created_at):\n if self.local_vars_configuration.client_side_validation and created_at is None: # noqa: E501\n raise ValueError(\"Invalid value for `created_at`, must not be `None`\") # noqa: E501\n\n self._created_at = created_at", "def created_at(self, created_at):\n if created_at is None:\n raise ValueError(\"Invalid value for `created_at`, must not be `None`\") # noqa: E501\n\n self._created_at = created_at", "def created_at(self, created_at):\n if created_at is None:\n raise ValueError(\"Invalid value for `created_at`, must not be `None`\") # noqa: E501\n\n self._created_at = created_at", "def created_at(self, created_at: \"str\"):\n self._attrs[\"createdAt\"] = created_at", "def created_date(self):\n return self._created_date", "def created_date(self):\n return self._created_date", "def date_created(self):\n return self._date_created", "def date_created(self):\n return self._date_created", "def date_created(self):\n return self._date_created", "def create_posted_on_property(self):\n self.posted_on = self.added_on.date", "def set_datetime(self, date):\n self.date = date", "def created_by_id(self, created_by_id):\n\n self._created_by_id = created_by_id", "def created_at_gt(self, created_at_gt):\n\n self._created_at_gt = created_at_gt", "def created_by_id(self, created_by_id):\n self._created_by_id = created_by_id" ]
[ "0.80591285", "0.79904276", "0.79904276", "0.79904276", "0.7909916", "0.7909916", "0.7818758", "0.7676886", "0.7676886", "0.7676886", "0.7676886", "0.7676886", "0.7632258", "0.759261", "0.7258268", "0.69510114", "0.6808352", "0.67060393", "0.67060393", "0.67060393", "0.664208", "0.6581187", "0.6581187", "0.65647835", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.65502775", "0.6490255", "0.63863945", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.6352084", "0.63414496", "0.63194835", "0.63026327", "0.63026327", "0.63026327", "0.63026327", "0.6248938", "0.6248938", "0.6248938", "0.6248938", "0.6248938", "0.6248938", "0.6248938", "0.61464065", "0.6109187", "0.6105798", "0.60755086", "0.60755086", "0.60755086", "0.6070914", "0.60608894", "0.60546595", "0.6023894", "0.6023894", "0.6023894", "0.6023894", "0.6023894", "0.6023894", "0.6010024", "0.59796417", "0.59598935", "0.5956074", "0.5956074", "0.5936894", "0.5815544", "0.5815544", "0.5798427", "0.5798427", "0.5798427", "0.57285875", "0.57251036", "0.5654983", "0.5579014", "0.557512" ]
0.80568826
1
Gets the date_modified of this MessagingCampaign.
def date_modified(self): return self._date_modified
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def last_modified_at(self) -> str:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> str:\n return pulumi.get(self, \"last_modified_at\")", "def get_last_modified_date(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetLastModifiedDate', self.handle)", "def modified(self) -> datetime.datetime:\n timestamp = os.path.getmtime(self._manifest_path)\n\n return datetime.datetime.fromtimestamp(timestamp)", "def get_inbound_statement_details_last_modified_date(self):\n return self.get_text_from_element(self.inbound_statements_details_last_modified_date_locator, False)", "def modified_timestamp(self) -> str:\n return pulumi.get(self, \"modified_timestamp\")", "def modified_timestamp(self) -> str:\n return pulumi.get(self, \"modified_timestamp\")", "def get_last_modified_date(self):\n\t\treturn call_sdk_function('PrlFsEntry_GetLastModifiedDate', self.handle)", "def last_modified(self):\n return os.path.getmtime(self.filename)", "def last_modified_at(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_time(self) -> str:\n return pulumi.get(self, \"last_modified_time\")", "def last_modified_time(self) -> str:\n return pulumi.get(self, \"last_modified_time\")", "def time_modified(self) -> str:\n return pulumi.get(self, \"time_modified\")", "def time_modified(self) -> str:\n return pulumi.get(self, \"time_modified\")", "def last_modified(self) -> str:\n\t\tif not self._closed:\n\t\t\ttimestamp = self.ds.last_modified()\n\t\t\treturn timestamp\n\t\treturn None", "def last_modified_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_modified_time\")", "def last_modified_at(self):\n return self.viztrail.last_modified_at", "def last_modified_date_time(self):\n if \"lastModifiedDateTime\" in self._prop_dict:\n return datetime.strptime(self._prop_dict[\"lastModifiedDateTime\"].replace(\"Z\", \"\"), \"%Y-%m-%dT%H:%M:%S.%f\")\n else:\n return None", "def time_last_modified(self):\n return self.properties.get(\"TimeLastModified\", None)", "def last_modified(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_modified\")", "def modification_time(self) -> str:\n return pulumi.get(self, \"modification_time\")", "def getModifiedDate(self, *args):\n return _libsbml.ModelHistory_getModifiedDate(self, *args)", "def last_modified_at(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"last_modified_at\")", "def updated_date(self):\n return self._updated_date", "def updated_date(self):\n return self._updated_date", "def get_mtime(self):\n return os.path.getmtime(self.get_path())", "def modified(self) -> datetime:\n # TODO: Should this be overridden for LocalDirectoryAsset?\n return datetime.fromtimestamp(self.filepath.stat().st_mtime).astimezone()", "def last_modified(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified\")", "def last_modified(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified\")", "def modified(self):\n return self.properties.get(\"Modified\", datetime.min)", "def was_modified(self):\n\n return self.__modified", "def get_outbound_statement_summary_last_modified_date(self):\n return self.get_text_from_element(self.outbound_statements_summary_last_modified_date_date_locator, False)", "def last_modified_dts(self):\n return self._last_modified_dts", "def was_modified(self):\n return self.modified", "def last_modified_time(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"last_modified_time\")", "def get_last_modified() -> str:\n service = get_authenticated_service(\"drive\", \"v3\")\n response = (\n service.files().get(fileId=SPREADSHEET_ID, fields=\"modifiedTime\").execute()\n )\n return response[\"modifiedTime\"]", "def get_last_updated_at(self):\n return self.last_updated", "def mtime(self) -> str:\n return self._mtime", "def modified(self):\n return self._modified", "def modified(self):\n return self._modified", "def modified(self):\n return self._modified", "def modified(self):\n return self._modified", "def last_modified(self) -> str:\n\t\tif self.name == \"\":\n\t\t\tif \"last_modified\" in self.ds._file[\"/matrix\"].attrs:\n\t\t\t\treturn self.ds._file[\"/matrix\"].attrs[\"last_modified\"]\n\t\t\telif self.ds._file.mode == 'r+':\n\t\t\t\tself.ds._file[\"/matrix\"].attrs[\"last_modified\"] = timestamp()\n\t\t\t\tself.ds._file.flush()\n\t\t\t\treturn self.ds._file[\"/matrix\"].attrs[\"last_modified\"]\n\n\t\tif self.name != \"\":\n\t\t\tif \"last_modified\" in self.ds._file[\"/layers/\" + self.name].attrs:\n\t\t\t\treturn self.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"]\n\t\t\telif self.ds._file.mode == 'r+':\n\t\t\t\tself.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"] = timestamp()\n\t\t\t\tself.ds._file.flush()\n\t\t\t\treturn self.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"]\n\n\t\treturn timestamp()", "def prepare_actor_modified_date(self, object):\n if object.actor_modified is not None:\n return object.actor_modified.date()\n else:\n return ''", "def get_mtime(self):\n if settings.DEBUG:\n return os.path.getmtime(self.get_path())\n return staticfiles_storage.modified_time(self.get_name())", "def _get_date_modified(path):\n return str(datetime.datetime.fromtimestamp(os.path.getmtime(path)))", "def last_updated_time(self) -> datetime.datetime:\n return self.__last_updated_time", "def last_updated_time(self) -> datetime.datetime:\n return self.__last_updated_time", "def get_last_modified_value(self):\n return self.get_text_from_element(self.last_modified_value_locator)", "def get_destinations_grid_last_modified_date(self):\n return self.get_specific_column_value_from_grid(self.destinations_grid_div_id, self.destinations_grid_row_count, self.last_modified_column_name)", "def modified(self):\n return self.__modified", "def modified(self):\n return self.__modified", "def last_modified_by(self):\n return self._last_modified_by", "def get_mtime(self):\n if not os.path.exists(self.file_name):\n return None\n st = os.stat(self.file_name)\n return st.st_mtime", "def last_updated(self):\n return self._last_updated", "def get_mtime(self):\n storage = getattr(self._file, \"storage\", None)\n if storage:\n return storage.modified_time(self._file.name)\n return super(FileAsset, self).get_mtime()", "def get_mod_time(self):\n if self.file_meta[:2] == b'bp':\n file_meta_plist = ccl_bplist.load(BytesIO(self.file_meta))\n raw_date_time = file_meta_plist['$objects'][1]['LastModified']\n converted_time = datetime.datetime.fromtimestamp(raw_date_time)\n converted_time = converted_time.timetuple()\n return converted_time\n else:\n file_meta_plist = plistlib.loads(self.file_meta)\n return file_meta_plist['modified'].timetuple()", "def mtime(self):\r\n return self.info().mtime", "def get_modified_time(self, name):\n full_path = self.path(name)\n return self.__volume.getmtime(full_path)", "def last_updated(self):\n try:\n date_ = parse(self._data.get('last_updated'))\n except (ValueError, TypeError):\n date_ = None\n return date_", "def get_rates_grid_last_modified_date(self):\n return self.get_specific_column_value_from_grid(self.rates_grid_div_id, self.rates_grid_row_count, self.last_modified_column_name)", "def get_price_list_modified_date_column_value(self):\n return self.get_specific_column_value_from_grid(self.vendor_price_list_grid_div_id, self.price_lists_grid_row_count, self.modified_date_column_name)", "def get_dialed_digits_grid_last_modified_date(self):\n return self.get_specific_column_value_from_grid(self.dialed_digits_grid_div_id, self.dialed_digits_grid_row_count, self.modified_date_column_name)", "def mtime(self):\n\n return os.stat(self.filename).st_mtime", "def updated(self) -> datetime:\n return self._updated", "def updated_at(self):\n return self._updated_at", "def updated_at(self):\n return self._updated_at", "def updated_at(self):\n return self._updated_at", "def last_modified_by(self) -> str:\n return pulumi.get(self, \"last_modified_by\")", "def last_updated(self) -> str:\n return self._last_updated", "def lastmod(self, obj):\n return obj.modified", "def last_edited(self):\n return self._last_edited", "def getmtime(self):\n if self.exists():\n return os.path.getmtime(self.path)\n return 0", "def mtime(self) -> float:\n return self.stat().mtime", "def get_modified_time(self, name):\n return self.cache.get(name).time", "def date_modified(self, date_modified):\n \n self._date_modified = date_modified", "def DateUpdated(self, default=None):\n return self.data.get('metadata', {}).get('_updated', default)", "def updated_datetime(self) -> datetime:\n return utc_to_local(self._db_data.updated_datetime)", "def get_mtime(self):\n return max(asset.get_mtime() for asset in self._assets)", "def updated_at(self) -> \"datetime\":\n return self._attrs.get(\"updatedAt\")", "def updated_at(self) -> \"datetime\":\n return self._attrs.get(\"updatedAt\")", "def updated_at(self) -> \"datetime\":\n return self._attrs.get(\"updatedAt\")", "def getmtime(self, path):\n return os.path.getmtime(path)", "def get_file_modified_date(filepath):\n return datetime.datetime.fromtimestamp(os.path.getmtime(filepath))", "def getLastModifiedTime(self): #$NON-NLS-1$\r", "def last_updated_time(self) -> str:\n return pulumi.get(self, \"last_updated_time\")", "def last_modified():\n return \"Last modified: %s\" % time.ctime(os.path.getmtime(FILE_NAME))", "def updated_at(self) -> str:\n return pulumi.get(self, \"updated_at\")", "def last_modified_by(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_by\")", "def last_modified_by(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_by\")", "def last_modified_by(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_by\")", "def last_modified_by(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_by\")", "def last_modified_by(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_by\")", "def last_modified_by(self) -> Optional[str]:\n return pulumi.get(self, \"last_modified_by\")", "def get_file_modification_time(self, filename):\n mtime = None\n if os.path.exists(filename):\n mtime = datetime.datetime.fromtimestamp(os.path.getmtime(filename))\n return mtime" ]
[ "0.72402114", "0.72402114", "0.71875286", "0.7092818", "0.70100117", "0.6977944", "0.6977944", "0.69701886", "0.69357944", "0.69300026", "0.69300026", "0.69300026", "0.69300026", "0.69300026", "0.69300026", "0.6924368", "0.6924368", "0.6866872", "0.6866872", "0.68586713", "0.6826732", "0.6785116", "0.6763307", "0.6762151", "0.6754415", "0.6688579", "0.66802037", "0.66784215", "0.6675155", "0.6675155", "0.6657474", "0.6651359", "0.6607662", "0.6607662", "0.6538242", "0.6517688", "0.65170443", "0.65164644", "0.6473249", "0.6431642", "0.64156353", "0.6375394", "0.636482", "0.633194", "0.633194", "0.633194", "0.633194", "0.632083", "0.62887335", "0.6285671", "0.62797785", "0.6249755", "0.6249755", "0.6244424", "0.6242369", "0.62188894", "0.62188894", "0.62174547", "0.62154114", "0.620232", "0.61916625", "0.6190963", "0.6161271", "0.61356664", "0.6128718", "0.6114805", "0.61084527", "0.610764", "0.6088944", "0.6078668", "0.6077818", "0.6077818", "0.6077818", "0.6069218", "0.60643363", "0.60642844", "0.6026172", "0.60134476", "0.5996704", "0.59962237", "0.59812766", "0.59781426", "0.5976783", "0.59046644", "0.59030133", "0.59030133", "0.59030133", "0.589151", "0.58799785", "0.5862056", "0.5860276", "0.58515394", "0.5842514", "0.5833662", "0.5833662", "0.5833662", "0.5833662", "0.5833662", "0.5833662", "0.5829807" ]
0.81347257
0
Sets the date_modified of this MessagingCampaign.
def date_modified(self, date_modified): self._date_modified = date_modified
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def date_modified(self, date_modified):\n\n self._date_modified = date_modified", "def modified_date(self, modified_date):\n\n self._modified_date = modified_date", "def modified_date(self, modified_date):\n\n self._modified_date = modified_date", "def modified_at(self, modified_at):\n\n self._modified_at = modified_at", "def modified_at(self, modified_at):\n\n self._modified_at = modified_at", "def setModifiedDate(self, *args):\n return _libsbml.ModelHistory_setModifiedDate(self, *args)", "def set_modified(self, dt):\n self.modified = dt_to_iso(dt)", "def set_modified(self, dt):\n self.modified = dt_to_iso(dt)", "def modified(self, modified):\n\n self._modified = modified", "def last_modified(self, last_modified):\n\n self._last_modified = last_modified", "def set_modification_date(self, modification_date):\n\t\t\n\t\tif (modification_date.__class__ != str or modification_date ==\"\") and (modification_date.__class__ != time.struct_time or len(modification_date) != 9 ):\n\t\t\traise InvalidParameterError(\"modification_date\", \"modification_date is not in a proper format\")\n\t\ttry:\n\t\t\tif modification_date.__class__ == str:\n\t\t\t\ttmp_md = time.strptime(modification_date, '%S %M %H %d %m %Y')\n\t\t\telif modification_date.__class__ == time.struct_time:\n\t\t\t\ttmp_md = modification_date\n\t\t\tself.__modification_date = datetime(tmp_md[0], tmp_md[1], tmp_md[2], tmp_md[3], tmp_md[4], tmp_md[5])\t\n\t\texcept:\n\t\t\traise InvalidDate, \"date is not valid modification_date is not in a proper format\"", "def last_modified_on(self, last_modified_on):\n\n self._last_modified_on = last_modified_on", "def updated_date(self, updated_date):\n\n self._updated_date = updated_date", "def updated_date(self, updated_date):\n\n self._updated_date = updated_date", "def last_modified_dts(self, last_modified_dts):\n\n self._last_modified_dts = last_modified_dts", "def date_modified(self):\n return self._date_modified", "def updated_date(self, updated_date):\n self._updated_date = updated_date", "def set_modified_since(self, data):\n self.add_payload('modifiedSince', data)", "def last_modification(self, last_modification):\n\n self._last_modification = last_modification", "def set_modified(obj, *args):\n dt = datetime.datetime(*args, tzinfo=pytz.utc)\n zope.dublincore.interfaces.IZopeDublinCore(obj).modified = dt\n return dt", "def set_datetime(self, date):\n self.date = date", "def last_modified_by(self, last_modified_by):\n\n self._last_modified_by = last_modified_by", "def last_modified_by(self, last_modified_by):\n\n self._last_modified_by = last_modified_by", "def isSetModifiedDate(self):\n return _libsbml.ModelHistory_isSetModifiedDate(self)", "def change_modified_date(sbml):\n history = sbml.getModel().getModelHistory()\n if history:\n history.setModifiedDate(libsbml.Date(w3c_time()))\n # remove all but final modified date\n while history.getListModifiedDates().getSize() > 1:\n history.getListModifiedDates().remove(0)", "def modified_attribute_id(self, modified_attribute_id):\n\n self._modified_attribute_id = modified_attribute_id", "def save(self, *args, **kwargs):\n if not self.created_date:\n self.created_date = datetime.utcnow()\n \n self.modified_date = datetime.utcnow()\n \n super(Message, self).save(*args, **kwargs)", "def mod_date(self, mod_date):\n\n self._mod_date = mod_date", "def modified_author(self, modified_author):\n\n self._modified_author = modified_author", "def updated_date(self, updated_date):\n if updated_date is None:\n raise ValueError(\"Invalid value for `updated_date`, must not be `None`\") # noqa: E501\n\n self._updated_date = updated_date", "def modified_object(obj, event):\n now = datetime.now(tz=_zone)\n obj.modification_date = now", "def last_updated(self, last_updated):\n\n self._last_updated = last_updated", "def last_updated(self, last_updated):\n\n self._last_updated = last_updated", "def last_updated(self, last_updated):\n\n self._last_updated = last_updated", "def last_updated(self, last_updated):\n\n self._last_updated = last_updated", "def modified(self) -> datetime.datetime:\n timestamp = os.path.getmtime(self._manifest_path)\n\n return datetime.datetime.fromtimestamp(timestamp)", "def save(self, *args, **kwargs):\n self.modified_at = datetime.datetime.utcnow()\n return super().save(*args, **kwargs)", "def save(self, *args, **kwargs):\n self.modified_at = datetime.datetime.utcnow()\n return super().save(*args, **kwargs)", "def set_modified(self, value):\n self.modified = value\n self.save_button.setEnabled(value)", "def set_modified(self, value):\n self.modified = value\n self.save_button.setEnabled(value)", "def prepare_actor_modified_date(self, object):\n if object.actor_modified is not None:\n return object.actor_modified.date()\n else:\n return ''", "def _update_modified_since(self, timestamp):\n pass", "def updated(self, updated: datetime):\n\n self._updated = updated", "def set_uploaded_date_long(self, uploaded_date_long):\n self.uploaded_date_long = uploaded_date_long", "def modify(self, modify):\n if modify is None:\n raise ValueError(\"Invalid value for `modify`, must not be `None`\") # noqa: E501\n\n self._modify = modify", "def updated_date(self, updated_date):\n if updated_date is None:\n raise ValueError(\"Invalid value for `updated_date`, must not be `None`\") # noqa: E501\n if updated_date is not None and len(updated_date) < 1:\n raise ValueError(\"Invalid value for `updated_date`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._updated_date = updated_date", "def datefinished(self, datefinished):\n\n self._datefinished = datefinished", "def last_updated(self, last_updated: str):\n\n self._last_updated = last_updated", "def last_modified_by(self, last_modified_by):\n if last_modified_by is not None and len(last_modified_by) > 100:\n raise ValueError(\"Invalid value for `last_modified_by`, length must be less than or equal to `100`\")\n\n self._last_modified_by = last_modified_by", "def datecreated(self, datecreated):\n\n self._datecreated = datecreated", "def date_finished(self, date_finished):\n self._date_finished = date_finished", "def revision_date(self, revision_date):\n\n self._revision_date = revision_date", "def update_date(self, update_date):\n\n self._update_date = update_date", "def update_date(self, update_date):\n\n self._update_date = update_date", "def set_date(self, date):\n self.date = date\n return", "def set_key_modified(self, key, modification):\n\n\t\tif key is not None and not isinstance(key, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)\n\t\t\n\t\tif modification is not None and not isinstance(modification, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None)\n\t\t\n\t\tself.__key_modified[key] = modification", "def set_key_modified(self, key, modification):\n\n\t\tif key is not None and not isinstance(key, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)\n\t\t\n\t\tif modification is not None and not isinstance(modification, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None)\n\t\t\n\t\tself.__key_modified[key] = modification", "def set_key_modified(self, key, modification):\n\n\t\tif key is not None and not isinstance(key, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)\n\t\t\n\t\tif modification is not None and not isinstance(modification, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None)\n\t\t\n\t\tself.__key_modified[key] = modification", "def setLastModified(when):", "def addModifiedDate(self, *args):\n return _libsbml.ModelHistory_addModifiedDate(self, *args)", "def set_date(self, date):\n self.date = date", "def date_created(self, date_created):\n \n self._date_created = date_created", "def date_created(self, date_created):\n\n self._date_created = date_created", "def date_created(self, date_created):\n\n self._date_created = date_created", "def date_created(self, date_created):\n\n self._date_created = date_created", "def getModifiedDate(self, *args):\n return _libsbml.ModelHistory_getModifiedDate(self, *args)", "def set_last_played_timestamp(self, date: str):\n self.has_been_played = True\n self.last_played_timestamp = (get_timestamp_from_date(date))", "def _datetime(self, _datetime):\n\n self.__datetime = _datetime", "def date_created(self, date_created):\n self._date_created = date_created", "def date_created(self, date_created):\n self._date_created = date_created", "def get_last_modified_date(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetLastModifiedDate', self.handle)", "def SetDateCreated(self, date):\n self.datecreated = str(date)", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def updated_at(self, updated_at):\n\n self._updated_at = updated_at", "def receipt_date(self, receipt_date):\n\n self._receipt_date = receipt_date", "def receipt_date(self, receipt_date):\n\n self._receipt_date = receipt_date", "def updated_at(self, updated_at: \"datetime\"):\n self._attrs[\"updatedAt\"] = updated_at", "def updated_at(self, updated_at: \"datetime\"):\n self._attrs[\"updatedAt\"] = updated_at", "def updated_at(self, updated_at: \"datetime\"):\n self._attrs[\"updatedAt\"] = updated_at", "def date_created(self, date_created: datetime):\n\n self._date_created = date_created", "def date_time(self, date_time):\n\n self._date_time = date_time", "def updated_on(self, updated_on):\n\n self._updated_on = updated_on", "def svn_info_t_last_changed_date_set(svn_info_t_self, apr_time_t_last_changed_date): # real signature unknown; restored from __doc__\n pass", "def update_from_changeset(self, changeset, update_sender=None, update_recipient=None):\n modified_dt = iso8601.parse_date(changeset[\"modified\"]).replace(tzinfo=None)\n self.modified = modified_dt\n\n self.set_state(changeset[\"state\"])\n\n app.logger.info(\"Updated {} from changeset\".format(self))", "def save(self, *args, **kwargs):\n self.modify_ts = datetime.now()\n super(ModelBase, self).save(*args, **kwargs)", "def hasBeenModified(self):\n return _libsbml.Date_hasBeenModified(self)" ]
[ "0.7839285", "0.7839285", "0.7839285", "0.7839285", "0.7839285", "0.7839285", "0.7839285", "0.75868607", "0.75868607", "0.64068264", "0.64068264", "0.63872856", "0.6339021", "0.6339021", "0.6242144", "0.61531746", "0.6107971", "0.6012136", "0.5762818", "0.5762818", "0.575947", "0.574503", "0.5740442", "0.5457705", "0.5421706", "0.54139584", "0.53219634", "0.5305048", "0.5305048", "0.52821285", "0.5266588", "0.52590424", "0.5242473", "0.52197677", "0.5203363", "0.5179876", "0.51304674", "0.5088369", "0.5088369", "0.5088369", "0.5088369", "0.50825304", "0.50688595", "0.50688595", "0.5059808", "0.5059808", "0.50413316", "0.5036368", "0.5014765", "0.4994091", "0.49556696", "0.4951449", "0.4925051", "0.49183717", "0.49115962", "0.49083635", "0.49073753", "0.48948222", "0.48855355", "0.48855355", "0.48820218", "0.48679125", "0.48679125", "0.48679125", "0.48513758", "0.48510158", "0.4842066", "0.48400623", "0.48197824", "0.48197824", "0.48197824", "0.47865555", "0.47828418", "0.47786278", "0.47661614", "0.47661614", "0.47554907", "0.4737057", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47322828", "0.47185293", "0.47185293", "0.46942568", "0.46942568", "0.46942568", "0.46858734", "0.46844566", "0.46694508", "0.46617714", "0.46545643", "0.46314427", "0.4622019" ]
0.7893535
0
Gets the version of this MessagingCampaign. Required for updates, must match the version number of the most recent update
def version(self): return self._version
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_version(self):\n return self.bot_data_file[\"version\"]", "def get_version(self):\n return self.__make_api_call('get/version')", "def get_version(self):\n return self._version", "def get_version(self):\n return self._version", "def version(self):\n self._get_latest_content()\n return self._data.get('version', None)", "def get_version(self):\n return self.version", "def get(self):\n return self._version", "def get_version(self):\n return self.api_version", "def getVersion(self):\n return self.get('Version', type=\"numeric\")", "def Version(self):\n if self.force_auto_sync:\n self.get('Version')\n return self._Version", "def getversion(self):\n return self.__version", "def get_version(self):\n return version.__version__", "def get_version(self):\n return version.__version__", "def get_version(self):\n return self.http_call(\"get\", url=f\"{self.base_url}/version\").json()", "def version(self):\n return self._get(\"version\")", "def version(self):\n\n return self.manifest[\"version\"]", "def get_version(self):\n data = self._get('app_version')\n return data['version']", "def version(self):\n if not self._version:\n self._version = self._get_version()\n\n return self._version", "def version(self):\n return self._client.getVersion()", "def get_version(self):\r\n\r\n return self.versions[0].number", "def getVersion(self):\n return _libsbml.SBase_getVersion(self)", "def get_version(self) -> str:\n return versioning.get_version()", "def get_version(self):\n\n r = self._create_operation_request(self, method=\"GET\")\n root_info = send_session_request(self._session, r).json()\n return root_info[\"currentVersion\"]", "def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['version']\n except Exception as e:\n pass\n return ''", "def get_version(self):\n url = '{}/v2/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['version']\n except Exception as e:\n pass\n return ''", "def version(self):\n\n return self._version", "def version(self):\n _, body = self.request('/', 'GET')\n return body.get('version', None)", "def getPackageVersion(self):\n return _libsbml.SBasePlugin_getPackageVersion(self)", "def get_version(self):\n pass", "def query_version(self):\n return self.connection.cursor().execute('SELECT version()').fetchone()[0]", "def GetVersion(self):\n return self._SendRequest(HTTP_GET, \"/version\", None, None)", "def getVersion(self):\n return _libsbml.SBasePlugin_getVersion(self)", "def getPackageVersion(self):\n return _libsbml.SBase_getPackageVersion(self)", "def version(self):\n return self.get_current_version()", "def Version(self):\n return self._version", "def version(self):\n if not hasattr(self, '_version'):\n self._version = self._get_package_version()\n return self._version", "def version(self):\n return self.proto.details.appDetails.versionString", "def get_version(self, params):\n return self.version", "def get_version(self):\n res = requests.get(self.base_url + '/version')\n\n return res", "def getPackageVersion(self):\n return _libsbml.CompBase_getPackageVersion(self)", "def get_version(self):\n return self._harvester_version", "async def version(self) -> str:\n response = await self._request(\"status\")\n return response[\"version\"]", "def get_version(self):\n self._send_command(self._adapter.get_version())", "def version(self):\n return self.__version", "def version(self):\n return self.__version", "def version(self):\n if \"version\" in self._prop_dict:\n return self._prop_dict[\"version\"]\n else:\n return None", "def _get_version(self):\n if _cbc_version is None:\n return _extract_version('')\n return _cbc_version", "async def get_version(self) -> Version:\n response = await self._http_requests.get(build_url(Paths.VERSION))\n return Version(**response.json())", "def version(self) -> 'outputs.VersionResponse':\n return pulumi.get(self, \"version\")", "def version(self) -> int:\n return self._version", "def version(self) -> str:\n\n return self._version", "def GetVersion(self):\n if self._addonVersion:\n return self._addonVersion\n elif self.IsValid():\n return AddonVersion(self._info.GetDisplayVersion())\n else:\n return AddonVersion()", "def version(self):\r\n return self.version_guid", "def changed_version(self):\r\n try:\r\n return CampaignChange.objects.get(campaign__pk=self.pk)\r\n except CampaignChange.DoesNotExist:\r\n return None", "def version(self) -> str:\n return self._version", "def version(self) -> str:\n return self._version", "def version(self) -> str:\n return self._version", "def version(self) -> str:\n return self._version", "def version(self) -> str:\n return self._version", "def api_version(self):\n\n return self._api_version", "def getVersion(self, *args):\n return _libsbml.SBMLExtension_getVersion(self, *args)", "def getVersion(self, *args):\n return _libsbml.FbcExtension_getVersion(self, *args)", "def getPackageVersion(self, *args):\n return _libsbml.CompExtension_getPackageVersion(self, *args)", "def getVersion(self, *args):\n return _libsbml.CompExtension_getVersion(self, *args)", "def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['orionld version']\n except Exception as e:\n pass\n return ''", "def get_version(self):\n args = {\"access_token\": self.access_token}\n try:\n response = self.session.request(\n \"GET\",\n FACEBOOK_GRAPH_URL + self.version + \"/me\",\n params=args,\n timeout=self.timeout,\n proxies=self.proxies,\n )\n except requests.HTTPError as e:\n response = json.loads(e.read())\n raise GraphAPIError(response)\n\n try:\n headers = response.headers\n version = headers[\"facebook-api-version\"].replace(\"v\", \"\")\n return str(version)\n except Exception:\n raise GraphAPIError(\"API version number not available\")", "def build_api_version(self):\n return self._build_api_version", "def api_version(self):\n\n\t\treturn self._api_version", "def getPackageVersion(self):\n return _libsbml.ASTBasePlugin_getPackageVersion(self)", "def version(self):\n\n if self.running() is True:\n return APIConsumer.get(\"/version\").content\n else:\n return None", "def getPackageVersion(self, *args):\n return _libsbml.FbcExtension_getPackageVersion(self, *args)", "def latest_version_number(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"latest_version_number\")", "def get_version(self):\n return 0", "def getObjectVersion(self):\n return _libsbml.SBase_getObjectVersion(self)", "def get_version(self) -> Dict[str, str]:\n return self.http.get(self.config.paths.version)", "def version(self):\n if not hasattr(self, \"_version_string\"):\n return None\n return semantic_version.Version(self._version_string)", "def get_version(self):\n\t\treturn call_sdk_function('PrlApi_GetVersion')", "def get_version(self):\r\n return self._arm.get_version()", "def version(self) -> Dict[str, str]:\n return self.get_version()", "def getPackageVersion(self, *args):\n return _libsbml.SBMLExtension_getPackageVersion(self, *args)", "def getPackageVersion(self):\n return _libsbml.FbcPkgNamespaces_getPackageVersion(self)", "def latest_version(self):\n state = self.coordinator.data\n\n try:\n # fake a new update\n # return \"foobar\"\n return dict_get(state, \"firmware_update_info.base.version\")\n except KeyError:\n return None", "def version(self):\n if self.installed and not self._version:\n output = self.call(self._version_args, require_success=False)\n match = re.search(self._version_regexp, output)\n if not match:\n raise RuntimeError(\n \"Unable to find version number for '{0}' in output from\"\n \" '{0} {1}':\\n{2}\".format(self.name,\n ' '.join(self._version_args),\n output))\n self._version = StrictVersion(match.group(1))\n return self._version", "def version(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"version\")", "def get_application_version(self):\n return self.connector.request('GET', '/app/version')", "def get_product_version(self):\n return self.get_attr('product_version')", "def version(self) -> int:\r\n\r\n return self.__version", "def version(self) -> int:\r\n\r\n return self.__version", "def getPackageVersion(self):\n return _libsbml.MultiPkgNamespaces_getPackageVersion(self)" ]
[ "0.74936455", "0.7484783", "0.7445432", "0.7445432", "0.74160135", "0.7414787", "0.72851366", "0.7277261", "0.72265685", "0.7201598", "0.71837974", "0.7182243", "0.7182243", "0.71736085", "0.7157434", "0.7147735", "0.7075582", "0.7058257", "0.70546836", "0.7049833", "0.7038163", "0.70172703", "0.69857126", "0.6983983", "0.69804174", "0.696574", "0.6891064", "0.68752176", "0.6853718", "0.68496454", "0.6843804", "0.68410164", "0.6838963", "0.682704", "0.67885447", "0.678794", "0.6783728", "0.6777276", "0.67760634", "0.6768426", "0.67380005", "0.67345107", "0.6731835", "0.6724773", "0.6724773", "0.6723173", "0.6691962", "0.6688214", "0.66801125", "0.6673328", "0.66641355", "0.6641726", "0.6631898", "0.66264904", "0.66017145", "0.66017145", "0.66017145", "0.66017145", "0.66017145", "0.6593877", "0.6582174", "0.6576688", "0.65360004", "0.6532835", "0.6528595", "0.6525894", "0.6516299", "0.6493112", "0.6485115", "0.64785147", "0.6478297", "0.6471454", "0.64597", "0.64571667", "0.64571023", "0.64441866", "0.64438504", "0.6440626", "0.64369094", "0.6436032", "0.64318997", "0.6412534", "0.6408829", "0.6408426", "0.64082295", "0.6404794", "0.64038885", "0.64038885", "0.6394897" ]
0.6938133
37
Sets the version of this MessagingCampaign. Required for updates, must match the version number of the most recent update
def version(self, version): self._version = version
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def version(self, version):\n self._version = version", "def version(self, version):\n self._version = version", "def set_version(self, version: str) -> None:\n if self.current_version == version:\n return\n self.current_version = version\n self._del_cached_property(\"version\")", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version):\n\n self._version = version", "def version(self, version: str):\n\n self._version = version", "def version(self, version: str):\n\n self._version = version", "def version(self, version: int):\n\n self._version = version", "def version(self, version):\n if version is None:\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n\n self._version = version", "def version(self, version):\n if version is None:\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n\n self._version = version", "def version(self, version):\n if version is None:\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n\n self._version = version", "def version(self, version):\n self._version = utils.VersionParser().parse(version)", "def version(self, version: str):\n if version is None:\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n\n self._version = version", "def version(self, version):\n if version is None:\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n if version is not None and len(version) < 1:\n raise ValueError(\"Invalid value for `version`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._version = version", "def version(self, version):\n if self.local_vars_configuration.client_side_validation and version is None: # noqa: E501\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n\n self._version = version", "def SetVersion(self, addonVersion):\n self._addonVersion = addonVersion", "def builder_version(self, builder_version):\n\n self._builder_version = builder_version", "def version(self, version):\n if self.local_vars_configuration.client_side_validation and version is None: # noqa: E501\n raise ValueError(\"Invalid value for `version`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n version is not None and len(version) > 64):\n raise ValueError(\"Invalid value for `version`, length must be less than or equal to `64`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n version is not None and len(version) < 1):\n raise ValueError(\"Invalid value for `version`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._version = version", "def set_toVersion(self):\n if not self.data.get('toVersion') or LooseVersion(self.data.get('toVersion', '99.99.99')) >= TO_VERSION_5_9_9:\n if self.verbose:\n click.echo('Setting toVersion field')\n self.data['toVersion'] = TO_VERSION_5_9_9", "def version(self, newVersion=None):\n pass", "def setVersion(self, version) :\n if version is not None :\n try :\n self.version = [int(p) for p in version.split(\".\")]\n except AttributeError :\n if len(version) == 2 : # 2-tuple\n self.version = version\n else :\n try :\n self.version = [int(p) for p in str(float(version)).split(\".\")]\n except :\n self.version = [int(p) for p in IPP_VERSION.split(\".\")]", "def version(self, newVersion=None):\n if newVersion != None:\n self._setValue('version', newVersion)\n return self._getValue('version')", "def _set_version(self, version):\n with self.db.atomic():\n JambiModel.delete().execute()\n JambiModel.create(ref=str(version))\n self.logger.debug('Set jambi version to {}'.format(version))", "def setVersion(self, *args):\n\n self._version = '.'.join( [str(arg) for arg in args] )", "def switch_to_version(self, version):\n self.current_version = version\n self.save()", "def hxdp_build_version(self, hxdp_build_version):\n\n self._hxdp_build_version = hxdp_build_version", "def meta_version(self, meta_version):\n\n self._meta_version = meta_version", "def update_version(self, version):\n self.version = CPE.escape_for_cpe23_fs(version)", "def set_php_version(self, version: str) -> Session:\n data = {\n \"version\": version\n }\n\n return self.configure(data)", "def set_version(self, version):\n\n def update_version(version, filepath):\n with open(filepath, \"r\") as stream:\n contents = stream.read()\n\n new_contents = _fix_contents_version(contents, version)\n assert contents != new_contents\n with open(filepath, \"w\") as stream:\n stream.write(new_contents)\n\n update_version(version, os.path.join(\".\", \"package.json\"))\n update_version(version, os.path.join(\".\", \"src\", \"setup.py\"))\n update_version(\n version, os.path.join(\".\", \"src\", \"robocorp_code\", \"__init__.py\")\n )", "def setPackageVersion(self, *args):\n return _libsbml.ISBMLExtensionNamespaces_setPackageVersion(self, *args)", "def carrier_settings_version(self, carrier_settings_version):\n\n self._carrier_settings_version = carrier_settings_version", "def version_name(self, version_name):\n\n self._version_name = version_name", "def setProgramVersion(self, *args):\n return _libsbml.SBMLWriter_setProgramVersion(self, *args)", "def __set__(self, instance, value):\n # make sure value follows \"major,minor,build\" convention\n if not is_version_valid(value):\n raise InvalidVersionFormat(\"Version: {0} is invalid\".format(value))\n\n super().__set__(instance, value)", "def set_version(self, version, dataset_name=None):\n if dataset_name is None:\n self._version = version\n return self._version\n\n # resolve dataset name\n dataset = self.__getitem__(dataset_name)\n if dataset is None:\n raise KeyError(\"Dataset %s does not exist\" % dataset_name)\n dataset.attrs[\"version\"] = version\n return version", "def maybe_update_application_version(self, value):\n if (\n value\n and value.command_class == COMMAND_CLASS_VERSION\n and value.label == \"Application Version\"\n ):\n self._application_version = value.data", "def set_version(self, bundle, ctx, filename, version):", "def min_tls_version(self, value):\n self._set_attr('min-tls-version', value)", "def protocol_version(self, protocol_version):\n\n self._protocol_version = protocol_version", "async def update_version(self, version: int):\n async with open(self.__file_name, mode=\"r\") as auth_file:\n tag_data = json.loads(await auth_file.read())\n await auth_file.close()\n async with open(self.__file_name, mode=\"w\") as auth:\n tag_data[\"version\"] = version\n await auth.write(json.dumps(tag_data, indent=2, sort_keys=True))\n await auth.close()\n self.__version = version", "def set_version(self, protocol_version):\n self.version = protocol_version\n self.version_bytes = str(protocol_version).encode(\"latin1\")\n self.version_header = self.version_bytes + PROTOCOL_3x_HEADER\n if protocol_version == 3.2: # 3.2 behaves like 3.3 with type_0d\n # self.version = 3.3\n self.dev_type = \"type_0d\"\n elif protocol_version == 3.4:\n self.dev_type = \"v3.4\"", "def set_read_version(self, version):\n self.capi.fdb_transaction_set_read_version(self.tpointer, version)", "def serialization_version(self, serialization_version):\n\n self._serialization_version = serialization_version", "def build_api_version(self, build_api_version):\n\n self._build_api_version = build_api_version", "def node_version(self, node_version):\n\n self._node_version = node_version", "def set_version(v):\n old = get_version()\n sys.stderr.write('%s -> %s\\n' % (old, v))\n with open(INIT, 'r+') as f:\n text = f.read()\n text = pattern.sub(\"__version__ = %r\" % v, text)\n f.seek(0)\n f.truncate()\n f.write(text)", "async def version(self):\n # [p]version\n\n await self.bot.say(\"Current version: \" + CoreAPI.get_version())", "def version(self):\n self.version_list[-1] = self.revision\n version = '.'.join(self.version_list)\n return version", "def version(self) -> int:\n return self._version", "def set_doc_version(self, doc, value):\n if not self.doc_version_set:\n self.doc_version_set = True\n m = self.VERS_STR_REGEX.match(value)\n if m is None:\n raise SPDXValueError('Document::Version')\n else:\n doc.version = version.Version(major=int(m.group(1)),\n minor=int(m.group(2)))\n return True\n else:\n raise CardinalityError('Document::Version')", "def version(self) -> int:\r\n\r\n return self.__version", "def version(self) -> int:\r\n\r\n return self.__version", "def versionMessage(self):\n\n message = MumbleControlProtocol.Version()\n message.release=\"1.2.5\"\n message.version=66053\n message.os=platform.system()\n message.os_version=\"evebot1.0.2\"\n return message", "def _update_version(self) -> None:\n # Implement in child class.\n raise NotImplementedError", "def update_version(self, version):\n self._metadata['version'] = version\n\n if self._type == '.json':\n with open(self._filename, 'w') as f:\n f.write(json.dumps(self._metadata, indent=2))\n\n dof_filename = os.path.join(self.path, self.name + '.dof')\n if os.path.isfile(dof_filename):\n dof_file = DOFFile(dof_filename)\n dof_file.update_version(version)", "async def set(\n self,\n itx: discord.Interaction,\n /,\n version: app_commands.Transform[str, bible_lookup],\n ) -> None:\n\n if TYPE_CHECKING:\n assert itx.guild is not None\n\n version = version.lower()\n\n async with Session.begin() as session:\n existing = await BibleVersion.get_by_command(session, version)\n await existing.set_for_guild(session, itx.guild)\n\n await utils.send_embed(\n itx,\n description=self.localizer.format(\n 'set.response',\n data={'version': version},\n locale=itx.locale,\n ),\n ephemeral=True,\n )", "def version(self, **kwargs):\n return version.version(self._host, self._session, **kwargs)", "def product_version(self, product_version):\n\n self._product_version = product_version", "def version(self):\n return self.__version", "def version(self):\n return self.__version", "def SetGuardRailVersion(self, *args, **kwargs):\n # type: (*Any, **Any) -> None\n payload = {}\n for i in range(len(args)):\n payload[\"Arg%s\" % (i + 1)] = args[i]\n for item in kwargs.items():\n payload[item[0]] = item[1]\n return self._execute(\n \"setGuardRailVersion\", payload=payload, response_object=None\n )", "def version(self, app, args):\n app.put('\\n\\n%s\\n' % _version_str)", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n return self._version", "def version(self):\n\n return self._version", "def get_version(self):\n self._send_command(self._adapter.get_version())", "def version(self):\n return self.proto.details.appDetails.versionString", "def version(self):\n return self._get(\"version\")", "def set_pkg_vers(self, doc, version):\n self.assert_package_exists()\n if not self.package_vers_set:\n self.package_vers_set = True\n doc.package.version = version\n return True\n else:\n raise CardinalityError('Package::Version')", "def version(self):\n pass", "def version(self):\n pass" ]
[ "0.72409934", "0.72409934", "0.71715456", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.71619844", "0.7151149", "0.7151149", "0.7107088", "0.679211", "0.679211", "0.679211", "0.6780163", "0.67522746", "0.6737506", "0.66697395", "0.6560099", "0.6548404", "0.651609", "0.6482288", "0.6478975", "0.6358396", "0.6351294", "0.6348627", "0.63338023", "0.6220054", "0.6217451", "0.6196121", "0.614083", "0.6100281", "0.60885197", "0.6078275", "0.6073389", "0.6039914", "0.60247815", "0.6018501", "0.60137296", "0.59584576", "0.59486914", "0.59372807", "0.59284616", "0.59180963", "0.59142053", "0.590902", "0.58488095", "0.58386004", "0.5812095", "0.5789124", "0.57878387", "0.57621664", "0.5751394", "0.57446456", "0.57327116", "0.57327116", "0.5723672", "0.5714959", "0.57030076", "0.5702423", "0.5687114", "0.5685051", "0.5677979", "0.5677979", "0.56599504", "0.56333023", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.5618625", "0.56039965", "0.55955327", "0.5588474", "0.5579414", "0.5575127", "0.556309", "0.556309" ]
0.7293403
0
Gets the division of this MessagingCampaign. The division this entity belongs to.
def division(self): return self._division
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subdivision(self) -> Optional[str]:\n return pulumi.get(self, \"subdivision\")", "def subdivision(self) -> Optional[str]:\n return pulumi.get(self, \"subdivision\")", "def get_group(self):\n return self._group", "def getGroup(self):\n\t\treturn self.Group", "def get_domain(self):\n return self._domain", "def getDivider(self):\n return _libsbml.CompModelPlugin_getDivider(self)", "def format_division(self, data):\n return data", "def group(self):\n return self.properties.get('Group', None)", "def get_domain(self):\n return self.domain", "def boundary(self):\n return self._boundary", "def district(self) -> str:\n return pulumi.get(self, \"district\")", "def get_divide(self, ):\n return self.get_parameter('divide')", "def comm_group(self):\n return self._gcomm", "def get(self):\n self._group = self._client.get(\n url=self._client.get_full_url(\n self.get_path(\n 'single', realm=self._realm_name, group_id=self._group_id\n )\n )\n )\n self._group_id = self._group[\"id\"]\n return self._group", "def group(self):\n return self._group", "def group(self):\n return self._group", "def group(self):\n return self._group", "def get_divergence_hor(self):\n for focus_mode in self.focus_modes:\n if focus_mode['modeName'] == self.active_focus_mode:\n return focus_mode['diverg'][0]", "def get_partition(self):\n return self._partition", "def folder(self):\n return self._folder", "def getDomain(self):\n return self.domain", "def get_group(self) -> Optional[str]:\n return self.group", "def get_division(self, name: str):\n genome = self.genomes[safe(name)]\n division = str(genome[\"division\"]).lower().replace(\"ensembl\", \"\")\n if division == \"bacteria\":\n raise NotImplementedError(\"Bacteria from Ensembl not supported.\")\n\n is_vertebrate = division == \"vertebrates\"\n return division, is_vertebrate", "def message_group_id(self) -> Optional[str]:\n return pulumi.get(self, \"message_group_id\")", "def chunk(self):\n # easy enough\n return self.dcpl.getChunk(rank=len(self.shape))", "def domain(self):\n\n return self._domain", "def get_dimension(self):\n return self._dimension", "def domain(self):\n return self._domain", "def domain(self):\n return self._domain", "def domain(self):\n return self._domain", "def divide(self):\n return self._do_calc(self.divider)", "def divide(self):\n return self._do_calc(self.divider)", "def divide(self):\n return self._do_calc(self.divider)", "def division(self):\n try:\n division = self.matrix1 / self.matrix2\n except Exception as e:\n return \"Error: {}\".format(e)\n\n return division", "def GroupId(self):\n\t\treturn self._get_attribute('groupId')", "def split(self):\n return self._clip_metadata.get(\"split\")", "def getPartition(self):\n\t\treturn self.partition", "def domain(self):\n # type: () -> string_types\n return self._domain", "def group(self) -> str:\n return pulumi.get(self, \"group\")", "def parent_folder(self):\n return self.properties.get(\"ParentFolder\",\n Folder(self.context, ResourcePath(\"ParentFolder\", self.resource_path)))", "def faceDiv(self):\n if getattr(self, '_faceDiv', None) is None:\n n = self.vnC\n # Compute faceDivergence operator on faces\n if(self.dim == 1):\n D = ddx(n[0])\n elif(self.dim == 2):\n D1 = sp.kron(speye(n[1]), ddx(n[0]))\n D2 = sp.kron(ddx(n[1]), speye(n[0]))\n D = sp.hstack((D1, D2), format=\"csr\")\n elif(self.dim == 3):\n D1 = kron3(speye(n[2]), speye(n[1]), ddx(n[0]))\n D2 = kron3(speye(n[2]), ddx(n[1]), speye(n[0]))\n D3 = kron3(ddx(n[2]), speye(n[1]), speye(n[0]))\n D = sp.hstack((D1, D2, D3), format=\"csr\")\n # Compute areas of cell faces & volumes\n S = self.area\n V = self.vol\n self._faceDiv = sdiag(1/V)*D*sdiag(S)\n return self._faceDiv", "def security_group_id_for_domain_boundary(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"security_group_id_for_domain_boundary\")", "def split_id(self):\n return self._split_id", "def getSegment(self):\n return self.segment", "def parent_organization(self) -> object:\n return self._parent_organization", "def get_campaign(self, uuid):\n return Campaign.deserialize(self._get_single('campaigns', {'uuid': uuid}))", "def id(self):\n return self._group", "def get_segment(self):\n return self.segment", "def get_segment(self):\n return self.segment", "def get_commentaire(self):\n return self.commentaire", "def domain(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"domain\")", "def get_comm(self):\n return self.comm", "def get_group(self):\n\t\treturn self.variables.get('group')", "def _get_portal_domain(self):\n return self._sm_client.describe_workteam(WorkteamName=self._workteam_name)[\n \"Workteam\"\n ][\"SubDomain\"]", "def get(self):\n return self._partition", "def get_parent(self):\n parent_id = self.client._perform_json(\"GET\", \"/project-folders/%s\" % self.project_folder_id).get(\"parentId\", None)\n if parent_id is None:\n return None\n else:\n return DSSProjectFolder(self.client, parent_id)", "def domain(self) -> str:\n return pulumi.get(self, \"domain\")", "def domain(self) -> str:\n return pulumi.get(self, \"domain\")", "def parent(self):\n return getattr(self, \"parent_%s\" % self.discriminator)", "def getCountryClubId(self):\n return self.countryClubId", "def domain(self) -> str:\n return self._domain", "def group(cls):\n return relationship.many_to_one(cls, 'group')", "def subFolder(self):\r\n return self.__folder", "def group_id(self):\n return self._group_id", "def group_id(self):\n return self._group_id", "def group_id(self):\n return self._group_id", "def getGroupFolder(self):\n if platform.system()==\"Windows\":\n groupFolder = os.path.join(\"\\\\\\\\ursa\",\"AQOGroupFolder\")\n if platform.system()==\"Linux\":\n groupFolder = os.path.join(\"/media\",\"ursa\",\"AQOGroupFolder\")\n return groupFolder", "def getCompartment(self):\n return _libsbml.Reaction_getCompartment(self)", "def entity_group(self):\n return self.key().entity_group()", "def localdiscriminator(self) :\n\t\ttry :\n\t\t\treturn self._localdiscriminator\n\t\texcept Exception as e:\n\t\t\traise e", "def get_dimension(self):\n return", "def domain(self):\n return self['domain']", "def department(self) -> object:\n return self._department", "def get_centre(self):\n return self.c", "def group(self) -> str:\n return self._db_data.group", "def dimension(self):\n return self._dimension", "def dimension(self):\n return self._dimension", "def dimension(self):\n return self._dimension", "def dimension(self):\n return self._dimension", "def get_gift_conversation(self):\r\n return self.gift_conversation", "def get_div(self):\n if self.FDISTR == 1:\n return (self.HDIV1, self.HDIV2, self.VDIV1, self.VDIV2)\n elif self.FDISTR == 5:\n return (self.CONE_MIN, self.CONE_MAX)\n else:\n return None", "def group(self) -> Optional[str]:\n return pulumi.get(self, \"group\")", "def group_id(self):\n # type: () -> string_types\n return self._group_id", "def scenario_group_id(self) -> str:\n return self.__scenario_group_id", "def GetEntity(self):\n return self.__entity", "def get_bounding_box(self):\n return self._domain.get_bounding_box()", "def div(self, other):\n\n return self._get(\"div\", other, self.__class__)", "def get_original_domain(self):\n return self.record_id.split('/', 1)[0]", "def elections_division(self):\n return self.get_queryset().get_or_create(\n name='Elections Division',\n classification='executive',\n parent=self.secretary_of_state(),\n )[0]", "def group_identifier(self):\n return self._group_identifier", "def contact_folders(self):\n if \"contactFolders\" in self._prop_dict:\n return ContactFoldersCollectionPage(self._prop_dict[\"contactFolders\"])\n else:\n return None", "def group_id(self) -> str:\n return pulumi.get(self, \"group_id\")", "def group_id(self) -> str:\n return pulumi.get(self, \"group_id\")", "def faceDivx(self):\n if getattr(self, '_faceDivx', None) is None:\n # The number of cell centers in each direction\n n = self.vnC\n # Compute faceDivergence operator on faces\n if(self.dim == 1):\n D1 = ddx(n[0])\n elif(self.dim == 2):\n D1 = sp.kron(speye(n[1]), ddx(n[0]))\n elif(self.dim == 3):\n D1 = kron3(speye(n[2]), speye(n[1]), ddx(n[0]))\n # Compute areas of cell faces & volumes\n S = self.r(self.area, 'F', 'Fx', 'V')\n V = self.vol\n self._faceDivx = sdiag(1/V)*D1*sdiag(S)\n\n return self._faceDivx", "def get_group_name(self):\n return self.groupname", "def boundary(self):\n return self.substrates.boundary", "def room(self) -> Room:\n return self.__room", "def log_group(self) -> \"ILogGroup\":\n return self._values.get('log_group')", "def log_group(self) -> \"ILogGroup\":\n return self._values.get('log_group')", "def log_group(self) -> \"ILogGroup\":\n return self._values.get('log_group')" ]
[ "0.5810358", "0.5810358", "0.51984763", "0.5110629", "0.50223154", "0.5020974", "0.50027514", "0.49567127", "0.49472067", "0.49352798", "0.49319625", "0.49162114", "0.4908078", "0.4903234", "0.488874", "0.488874", "0.488874", "0.4876585", "0.4860735", "0.48574632", "0.48448205", "0.48382932", "0.48056766", "0.47636947", "0.4753338", "0.47446758", "0.47381356", "0.4734144", "0.4734144", "0.4734144", "0.4731469", "0.4731469", "0.4731469", "0.47260442", "0.47190174", "0.4715549", "0.4701066", "0.469539", "0.46942627", "0.46698248", "0.46654597", "0.46632037", "0.46585664", "0.46585092", "0.4656139", "0.46444583", "0.46370968", "0.4626443", "0.4626443", "0.46257377", "0.4620327", "0.4610055", "0.46058634", "0.46036875", "0.46019062", "0.45994645", "0.4585413", "0.4585413", "0.4572298", "0.45716926", "0.4571318", "0.45513836", "0.45444277", "0.45411354", "0.45411354", "0.45411354", "0.454023", "0.4536396", "0.4533582", "0.45291945", "0.45235202", "0.45210734", "0.45028925", "0.45028532", "0.44954994", "0.44939288", "0.44939288", "0.44939288", "0.44939288", "0.44873953", "0.44772953", "0.44671583", "0.44643375", "0.44637522", "0.44627586", "0.4452327", "0.44505674", "0.4448768", "0.4439222", "0.44374734", "0.4432527", "0.44170377", "0.44170377", "0.4414511", "0.44102582", "0.4403922", "0.44000146", "0.43938154", "0.43938154", "0.43938154" ]
0.6648795
0
Sets the division of this MessagingCampaign. The division this entity belongs to.
def division(self, division): self._division = division
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def division(self, division):\n\n self._division = division", "def set_divide(self, a_divide):\n self.set_parameter('divide', a_divide)\n return self", "def SetBoundaryCriterion(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_ShapeDivideContinuity_SetBoundaryCriterion(self, *args)", "def validate_division(self, div_field):\n if not div_field.data or div_field.data == '':\n raise ValidationError('All users must belong to a division')\n return True", "def setSplit(self,split):\n self.split=split", "def set_part(self, connection_part):\n self.part = connection_part", "def campaign(self, campaign):\n\n self._campaign = campaign", "def _set_campaign(self, campaign):\n if isinstance(campaign, str):\n campaign = TrackedCampaign.objects.create(name=campaign)\n\n campaign.save()\n\n self.campaign = campaign", "def dividend(self, dividend):\n\n self._dividend = dividend", "def district(self, district):\n\n self._district = district", "def district(self, district):\n\n self._district = district", "def set_divisions(self, nx=1, ny=1):\n\n self.nx = nx\n self.ny = ny", "def division(self):\n return self._division", "def create_division(self, division_title):\n request = post(url=self.base_url + 'api/services/etender/division/CreateDivision',\n headers=self.headers,\n data=json.dumps({\"title\": division_title}))\n self.division = json.loads(request.content).get('result')\n print('Created division:', self.division)\n return self.division", "def set_dimension(self, dimension):\n assert self.dimension == dimension, \"dimensions do not match\"\n self._dimension = dimension", "def setDomainRange(self, domain, range):\n self.domain = domain.cloneSpace()\n self.range = range.cloneSpace()\n return", "def set_space_guid(self, space_guid):\n res = self._cc.spaces(space_guid).get()\n self._space = res.resource\n\n res = self._cc.request(self._space.organization_url).get()\n self._org = res.resource\n return self", "def set_group(self, group):\n self._group = group", "def set_group(self, group: str) -> None:\n self.group = group", "def set_split(self,split='train'):\r\n \r\n self._target_data = self.processed_data[split]\r\n self.split_ = split", "def group(self, group):\n self._group = group", "def set_conversation(self, conversation):\r\n self.conversation = conversation", "def add_subdivision(self, parent, condition, client_id=None):\n\n biddable_ad_group_criterion=set_elements_to_none(campaign_service.factory.create('BiddableAdGroupCriterion'))\n product_partition=set_elements_to_none(campaign_service.factory.create('ProductPartition'))\n # If the root node is a unit, it would not have a parent\n product_partition.ParentCriterionId=parent.ad_group_criterion.Id if parent is not None and parent.ad_group_criterion is not None else None\n product_partition.Condition=condition\n product_partition.PartitionType='Subdivision'\n biddable_ad_group_criterion.Criterion=product_partition\n biddable_ad_group_criterion.CriterionBid=None\n biddable_ad_group_criterion.AdGroupId=self._ad_group_id\n biddable_ad_group_criterion.Status=None\n if hasattr(biddable_ad_group_criterion, 'EditorialStatus'):\n biddable_ad_group_criterion.EditorialStatus=None\n biddable_ad_group_criterion.Id=self._reference_id\n self._reference_id=self._reference_id\n self._reference_id-=1\n\n partition_action=BulkAdGroupProductPartition()\n partition_action.client_id=client_id\n partition_action.ad_group_criterion=biddable_ad_group_criterion\n self._partition_actions.append(partition_action)\n\n return partition_action", "def _setPartedPartition(self, partition):\n log_method_call(self, self.name)\n\n if partition is not None and not isinstance(partition, parted.Partition):\n raise ValueError(\"partition must be None or a parted.Partition instance\")\n\n log.debug(\"device %s new partedPartition %s\", self.name, partition)\n self._partedPartition = partition\n self.updateName()", "def SetProtobufMessageField(self, group_message, field, field_value):\n if field.label == field.LABEL_REPEATED:\n self.SetProtoRepeatedField(group_message, field, field_value)\n elif field.type == field.TYPE_MESSAGE:\n self.SetProtoMessageField(group_message, field, field_value)\n elif not self.SetProtoField(group_message, field, field_value):\n raise Exception('Unknown field type %s' % field.type)", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def group(self, group):\n\n self._group = group", "def department(self, department):\n\n self._department = department", "def department(self, department):\n\n self._department = department", "def set_definition(self, definition):\n return self.client._perform_json(\n \"PUT\", \"/admin/groups/%s\" % self.name,\n body = definition)", "def occupation(self, occupation):\n\n self.logger.debug(\"In 'occupation' setter.\")\n\n self._occupation = occupation", "def group_identifier(self, group_identifier):\n\n self._group_identifier = group_identifier", "def format_division(self, data):\n return data", "def setSplitOrientation(self, orientation):\n pass", "def domain(self, domain):\n self._domain = domain", "def domain(self, domain):\n self._domain = domain", "def setGroup(self, group):\n\t\tself.config.GROUP = group", "async def async_set_multiroom_group(self, multiroom_group):\n self._multiroom_group = multiroom_group", "def conversation_participant_uuid(self, conversation_participant_uuid):\n\n self._conversation_participant_uuid = conversation_participant_uuid", "def set_participant(self, part_key, part_data_raw):\n self.client.set(part_key, dumps(part_data_raw))\n return part_key", "def domain(self, domain):\n\n self._domain = domain", "def domain(self, domain):\n\n self._domain = domain", "def municipality(self, municipality):\n\n self._municipality = municipality", "def set_group(self, id_: str, player: str, group: list):\n self._groups[id_] = {\n 'player': player,\n 'group': group\n }", "def _subdivideDomain(self, divisionInstructions, trainingSet):\n unclustered = []\n # division instructions are as {subspace: (mode, value)}\n ## where \"value\" is the number of segments in \"split\" mode\n ## or the length of pivot values per segment in \"value\" mode\n self.raiseADebug('Training segmented subspaces for \"{}\" ...'.format(self._romName))\n for subspace, (mode, value) in divisionInstructions.items():\n dataLen = len(trainingSet[subspace][0]) # TODO assumes syncronized histories, or single history\n self._divisionInfo['historyLength'] = dataLen # TODO assumes single pivotParameter\n if mode == 'split':\n numSegments = value # renamed for clarity\n # divide the subspace into equally-sized segments, store the indexes for each segment\n counter = np.array_split(np.arange(dataLen), numSegments)\n # only store bounds, not all indices in between -> note that this is INCLUSIVE!\n counter = list((c[0], c[-1]) for c in counter)\n # Note that \"segmented\" doesn't have \"unclustered\" since chunks are evenly sized\n elif mode == 'value':\n segmentValue = value # renamed for clarity\n # divide the subspace into segments with roughly the same pivot length (e.g. time length)\n pivot = trainingSet[subspace][0]\n # find where the data passes the requested length, and make dividers\n floor = 0 # where does this pivot segment start?\n nextOne = segmentValue # how high should this pivot segment go?\n counter = []\n # TODO speedup; can we do this without looping?\n while pivot[floor] < pivot[-1]:\n cross = np.searchsorted(pivot, nextOne)\n # if the next crossing point is past the end, put the remainder piece\n ## into the \"unclustered\" grouping, since it might be very oddly sized\n ## and throw off segmentation (specifically for clustering)\n if cross == len(pivot):\n unclustered.append((floor, cross - 1))\n break\n # add this segment, only really need to know the first and last index (inclusive)\n counter.append((floor, cross - 1)) # Note: indices are INCLUSIVE\n # update search parameters\n floor = cross\n nextOne += segmentValue\n self.raiseADebug('Dividing {:^20s} into {:^5d} divisions for training ...'.format(subspace, len(counter) + len(unclustered)))\n # return the counter indicies as well as any odd-piece-out parts\n return counter, unclustered", "def set_cid(self, cid):\n self.__cid = cid", "def set_cid(self, cid):\n self.__cid = cid", "def set_assignment_game_frame(self, assignment):\n\n self.frames[\"game\"].set_assignment(assignment)", "def room(self, room):\n if self.local_vars_configuration.client_side_validation and room is None: # noqa: E501\n raise ValueError(\"Invalid value for `room`, must not be `None`\") # noqa: E501\n\n self._room = room", "def _set_message(self):\n\n type_to_message = {\n MESSAGE_TYPES['join'] : (\n u\"User '%s' has joined the chat.\" % \n self.author\n ),\n MESSAGE_TYPES['leave'] : (\n u\"User '%s' has left the chat.\" % \n self.author\n ),\n }\n\n message = self.message\n if type_to_message.has_key(self.type):\n message = type_to_message[self.type]\n self.message = message", "def set_partition(self, begin=0, end=0):\r\n self.partition = (begin, end)", "def occupation(self, occupation):\n\n self._occupation = occupation", "def _set_communities(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"communities\", rest_name=\"communities\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"communities must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"communities\", rest_name=\"communities\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__communities = t\n if hasattr(self, '_set'):\n self._set()", "def department(self, department: object):\n\n self._department = department", "def set_partition(self, partition=0):\n if not isinstance(partition, int):\n raise TypeError('partition must be an integer')\n if partition <= 0:\n raise ValueError('partition must be positive')\n if self.connected:\n self.producer.send(\"PART:\"+str(partition))", "def set_domain(self, var, domain) :\n if var not in self.variables :\n raise KeyError(str(var) + \" is not a variable in this problem.\")\n self.domains[var] = sorted(domain[:])\n return self", "def split(self, split):\n\n # check split validity\n self.__check_split_object_validity(split)\n\n res = self.pdf4me_client.custom_http.post_universal_object(universal_object=split,\n controller='Split/Split')\n\n return res", "def grouping(self, grouping):\n\n self._grouping = grouping", "def organization(self, organization):\n\n self._organization = organization", "def organization(self, organization):\n\n self._organization = organization", "def organization(self, organization):\n\n self._organization = organization", "def partitioning_attribute(self, partitioning_attribute):\n\n self._partitioning_attribute = partitioning_attribute", "def setValidationSplit(self, validation_split):\n return self._set(validationSplit=validation_split)", "def industry(self, industry):\n\n self._industry = industry", "def sequencing_center(self, sequencing_center):\n self.logger.debug(\"In 'sequencing_center' setter.\")\n\n self._sequencing_center = sequencing_center", "def setSettingPart(self, settingPartEnum: cern.lsa.domain.settings.SettingPartEnum) -> 'TrimRequestBuilder':\n ...", "def step_impl_the_msg_to_is_set_to_respondent(context):\n step_impl_the_msg_to_is_set_to(context, context.bdd_helper.respondent_id)", "def campaign_id(self, campaign_id):\n\n self._campaign_id = campaign_id", "def send(self, group=0):\n self._data1 = group\n super().send(data1=self._data1)", "def conversation(self, conversation):\n\n self._conversation = conversation", "def set_group(self, group):\n # Implemented from template for osid.resource.ResourceForm.set_group_template\n if self.get_group_metadata().is_read_only():\n raise errors.NoAccess()\n if not self._is_valid_boolean(group):\n raise errors.InvalidArgument()\n self._my_map['group'] = group", "def integral_division(self, divisor):\n if not isinstance(divisor, six.integer_types):\n raise ValueError(\"You can only divide by an integer or a long.\")\n new_value = self.value / float(divisor)\n if int(new_value) != new_value:\n raise ValueError(\"Amount not exactly divisible by provided divisor\")\n return Amount(self.currency, int(new_value))", "def setBroadcast(self, broadcast):\n # type: (str)->None\n\n self._validator.validate_one(\n 'broadcast', VALID_OPTS['broadcast'], broadcast)\n self._ifAttributes['broadcast'] = broadcast", "def setValidationSplit(self, v):\n self._set(validationSplit=v)\n return self", "def setValidationSplit(self, v):\n self._set(validationSplit=v)\n return self", "def setValidationSplit(self, v):\n self._set(validationSplit=v)\n return self", "def setValidationSplit(self, v):\n self._set(validationSplit=v)\n return self", "def multipart_upload_id(self, multipart_upload_id):\n\n self._multipart_upload_id = multipart_upload_id", "def _setsenders_correspondent_53D(self, val):\n self.swift_obj.SendersCorrespondent_D = val\n self.swift_obj.SendersCorrespondent_D.swiftTag = \"53D\"", "def __ifloordiv__(self, d_value: float):\n self.set_value(self.get_value() // d_value)\n return self", "def set_divorced(self, d, line_number=0):\n self.divorced = d if d else 'NA'\n self._divorced_line = line_number", "def industry(self, industry: str):\n\n self._industry = industry", "def set_domain(self, domain):\n\n self._domain = domain\n\n self.changed = True", "def setCompartment(self, *args):\n return _libsbml.Reaction_setCompartment(self, *args)", "def set_reduced_tcp_boundary(self, boundary):\r\n return self._arm.set_reduced_tcp_boundary(boundary)", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def group_id(self, group_id):\n\n self._group_id = group_id", "def SetCriterion(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_SplitSurfaceContinuity_SetCriterion(self, *args)", "def subject_group(self, subject_group):\n\n self._subject_group = subject_group", "def set_ratio(self, ratio: tuple) -> None:\r\n self.ratio = ratio", "def set_comment(self, comment):\n\t\tself.comment_ = comment", "def space(self, space):\n\n self._space = space", "def group(self, val):\n self.set_property(\"Group\", val)", "def setGraphFolder(self, p):\n return self._set(graphFolder=p)", "def segment_id(self, segment_id):\n\n self._segment_id = segment_id" ]
[ "0.6715935", "0.48573586", "0.48263213", "0.45991567", "0.45864677", "0.458617", "0.45811203", "0.44704387", "0.44482273", "0.4443386", "0.4443386", "0.44264278", "0.44212875", "0.439743", "0.43385282", "0.43234468", "0.43153226", "0.4298492", "0.4283395", "0.42175615", "0.41753483", "0.4148679", "0.41163713", "0.41144437", "0.41112566", "0.4102334", "0.4102334", "0.4102334", "0.40902355", "0.40902355", "0.40735295", "0.40515414", "0.40509844", "0.40495545", "0.40399316", "0.40320766", "0.40320766", "0.40239182", "0.40212128", "0.4017193", "0.40101084", "0.39776787", "0.39776787", "0.39569905", "0.39558622", "0.3953074", "0.39521936", "0.39521936", "0.39335683", "0.3933333", "0.3920777", "0.39197013", "0.39160883", "0.391568", "0.39010876", "0.3894195", "0.38901365", "0.3887813", "0.38865715", "0.38714832", "0.38714832", "0.38714832", "0.38663056", "0.38570428", "0.38514453", "0.3849921", "0.38486487", "0.3848316", "0.38457477", "0.384235", "0.38405523", "0.3824673", "0.381475", "0.3798999", "0.37986177", "0.37986177", "0.37986177", "0.37986177", "0.37985167", "0.3779632", "0.3767417", "0.3766045", "0.37660003", "0.37628692", "0.37591353", "0.37502512", "0.3746665", "0.3746665", "0.3746665", "0.3746665", "0.3746665", "0.3746665", "0.37445393", "0.37443063", "0.37381387", "0.37363487", "0.37251312", "0.37234575", "0.37114143", "0.37106666" ]
0.66989917
1
Gets the campaign_status of this MessagingCampaign. The current status of the messaging campaign. A messaging campaign may be turned 'on' or 'off'.
def campaign_status(self): return self._campaign_status
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_status(self):\n # TODO retrieve from db if not set\n return self.status", "def get_status(self):\n return self._status", "def get_status(self):\n statuses = dict(ACTIVITY_STATUS_CHOICES)\n return statuses.get(self.status, \"N/A\")", "def campaign_status(self, campaign_status):\n allowed_values = [\"on\", \"stopping\", \"off\", \"complete\", \"invalid\"]\n if campaign_status.lower() not in map(str.lower, allowed_values):\n # print(\"Invalid value for campaign_status -> \" + campaign_status)\n self._campaign_status = \"outdated_sdk_version\"\n else:\n self._campaign_status = campaign_status", "def get_status(self):\n return self.status", "def get_status(self):\n return self.status", "def get_status(self):\n return self.status", "def getStatus(self):\n return self._status", "def getStatus(self):\n return self.__status", "def getstatus(self):\n return self.__status", "def status(self):\n return status_dict[self._get_property_(self.STATUS).upper()]", "def Status(self):\n return self._get_attribute('status')", "def status(self):\n return self.get(self._names[\"status\"])", "def status(self):\n return STATUS[self.fields['status']]", "def status(self):\n return self._query_status()['status']", "def status(self):\n return self.m.status", "def get_status(self):\n return dict(CAMPAIGN_STATUS_CHOICES).get(self.status, \"N/A\")", "def status(self):\n return self._data['status']", "def status(self):\n\n return self._status", "def status(self):\n\n return self._status", "def status(self):\n\n return self._status", "def status(self):\n return STATUSES.get(self._mower_status, {}).get('message', self._mower_status)", "def get_status(self):\n status = self._status.get_message()\n \n if status == \"N\":\n return \"offline\"\n \n elif status == \"Y\":\n return \"online\"\n \n elif status == \"A\":\n return \"away\"\n \n elif status == \"B\":\n return \"busy\"", "def Status(self):\r\n\t\treturn self._get_attribute('status')", "def status(self):\n return self._dbattr('status')", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self):\n return self._status", "def status(self) -> Status:\n return self._status", "def status(self):\n if hasattr(self, \"_status\"):\n return self._status\n else:\n return None", "def status(self) -> Union[ReportStatus, str]:\n return self.__status", "def get_service_status(self):\n return self.service.status()", "def get_status(self):\n return self.msg", "def status(self):\n return self.job_proto.status", "def status(self):\n return self.__status", "def status(self):\n return self.__status", "def consumable_status(self) -> ConsumableStatus:\n return ConsumableStatus(self.send(\"get_consumable\")[0])", "def GetStatus(self):\r\n return self.status", "def status(self):\n self._refresh_state()\n return self._data.get('status')", "def status(self):\r\n return self._status", "def status(self):\r\n return self._status", "def status(self):\n return self.status", "def status(self) -> int:\n return self._status", "def status(self):\n if \"status\" in self._prop_dict:\n if isinstance(self._prop_dict[\"status\"], OneDriveObjectBase):\n return self._prop_dict[\"status\"]\n else :\n self._prop_dict[\"status\"] = AutomaticRepliesStatus(self._prop_dict[\"status\"])\n return self._prop_dict[\"status\"]\n\n return None", "def get_status(self):\n return self._conn_state", "def get_status_by_id(cls, request, id):\n return request.dbsession.query(cls).get(id).status", "def status(self) -> Optional[pulumi.Input[Union[str, 'ConnectionStatus']]]:\n return pulumi.get(self, \"status\")", "def get_status(self):\n return self._refreshed", "def status(self) -> str:\n return self._status", "def status(self) -> str:\n return self._status", "def status(self) -> str:\n return self._status", "def status(self) -> str:\n return self._status", "def status(self) -> str:\n return self._status", "def status(self) -> str:\n return self._status", "def job_status(self, job_id):\n\n response = self.batch_client.describe_jobs(jobs=[job_id])\n return response[\"jobs\"][0][\"status\"]", "def connection_status(self):\n return self._connection_status", "def getServiceStatus(self):\n return self.jsonRequest(\"/api/v1/getServiceStatus\", {\"apiKey\": self._apiKey})", "def status(self):\n\t\treturn self._status", "def status(self) -> pulumi.Output['outputs.JobStatus']:\n return pulumi.get(self, \"status\")", "def status(self):\n return self._get(path='status')", "def get_status(self):\n r = requests.get(self.base_url + '/status')\n return r.json()", "def check_status(self):\n return self.status", "def check_status(self):\n return self.status", "def job_status(self) -> JobStatus:\n statuses = set()\n with self._jobs.lock:\n\n # No jobs present\n if not self._jobs:\n return JobStatus.DONE\n\n statuses = set()\n for job in self._jobs.values():\n if job:\n statuses.add(job.status())\n\n # If any jobs are in non-DONE state return that state\n for stat in [\n JobStatus.ERROR,\n JobStatus.CANCELLED,\n JobStatus.RUNNING,\n JobStatus.QUEUED,\n JobStatus.VALIDATING,\n JobStatus.INITIALIZING,\n ]:\n if stat in statuses:\n return stat\n\n return JobStatus.DONE", "def getStatus(self):\n return self.enabled", "def get_job_status(self):\n if self.worker_thread is None:\n return None\n else:\n return self.worker_thread.get_status()", "def status(self) -> dict[str, str] | None:\n return self._status", "def status(self) -> Optional[pulumi.Input[Union[str, 'Status']]]:\n return pulumi.get(self, \"status\")", "def status(self) -> Optional[pulumi.Input[Union[str, 'Status']]]:\n return pulumi.get(self, \"status\")", "def status(self) -> Optional[pulumi.Input['GoogleRpcStatusArgs']]:\n return pulumi.get(self, \"status\")", "def state(self):\n return self._attributes['status']", "def Status(self, default=None):\n return self.data.get('status', default)", "def _get_status(self):\n return self.__status", "def id_status_conta(self):\n return self._id_status_conta", "def get_status(self, refresh: bool = True) -> JobStatus:\n if refresh:\n status = self.connection.hget(self.key, 'status')\n self._status = as_text(status) if status else None\n return self._status", "def get_status(self) -> RobovacStatus:\n message = self._build_get_device_status_user_data_message()\n robovac_response = self._send_packet(message, True)\n received_status_bytes = robovac_response.c.usr_data\n received_status_ints = [x for x in received_status_bytes]\n\n return RobovacStatus(\n 1 if received_status_ints[6] & 4 > 0 else 0,\n 1 if received_status_ints[6] & 2 > 0 else 0,\n received_status_ints[1] & 255,\n received_status_ints[8] & 255,\n received_status_ints[11] & 255,\n received_status_ints[10] & 255,\n received_status_ints[12] & 255,\n received_status_ints[13] & 255\n )", "def status(self):\n if self._child:\n return self._child.status()\n return self._status", "def recording_status(self):\n return self._get('recording/status')", "async def get_status(self, sms_id: int) -> SmsStatus:\n raise NotImplementedError" ]
[ "0.6404847", "0.6360354", "0.62657183", "0.62341815", "0.6217242", "0.6217242", "0.6217242", "0.62143916", "0.61250436", "0.6114817", "0.6114813", "0.60491306", "0.6047289", "0.6042757", "0.6023667", "0.6022818", "0.5983734", "0.59719783", "0.5946572", "0.5946572", "0.5946572", "0.5871262", "0.5832075", "0.58224785", "0.58119065", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57928276", "0.57711244", "0.5759102", "0.57475", "0.5739274", "0.5706799", "0.5695927", "0.5682723", "0.5682723", "0.5677152", "0.5675338", "0.5673468", "0.5667948", "0.5667948", "0.5650127", "0.5566235", "0.55651826", "0.55503505", "0.55371505", "0.5534195", "0.5524437", "0.5524336", "0.5524336", "0.5524336", "0.5524336", "0.5524336", "0.5524336", "0.55145353", "0.55067027", "0.5502488", "0.5501331", "0.5496999", "0.5492789", "0.5486854", "0.54564977", "0.54564977", "0.5451117", "0.5447005", "0.5444526", "0.54098094", "0.54086995", "0.54086995", "0.53922826", "0.53917027", "0.53897184", "0.5369609", "0.5366057", "0.53655815", "0.5359174", "0.53588533", "0.53549254", "0.5348217" ]
0.8441347
0
Sets the campaign_status of this MessagingCampaign. The current status of the messaging campaign. A messaging campaign may be turned 'on' or 'off'.
def campaign_status(self, campaign_status): allowed_values = ["on", "stopping", "off", "complete", "invalid"] if campaign_status.lower() not in map(str.lower, allowed_values): # print("Invalid value for campaign_status -> " + campaign_status) self._campaign_status = "outdated_sdk_version" else: self._campaign_status = campaign_status
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def campaign_status(self):\n return self._campaign_status", "def set_activity(self, status):\n self._activity = status", "def set_status(self, status):\n self.status = status", "def set_status(self, status):\n self.status = status", "def set_status(self, status):\n self.status = status", "def set_status(self, status: str) -> None:\n\n try:\n self.status = Buddy.status_map[status.lower()]\n except KeyError:\n self.status = status", "def set_message_status(self, message_id, status):\n\t\tself.c.execute(\"UPDATE messages SET status = ? WHERE message_id = ?\", (status, message_id))\n\t\tself.save()", "def setStatus(self, status):\n self.__status = status", "def setstatus(self, status):\n with self.lock:\n self.status = status", "def status(self, status):\n if self.local_vars_configuration.client_side_validation and status is None: # noqa: E501\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n\n self._status = status", "def job_status(self, job_status):\n\n self._job_status = job_status", "def status(self, status):\n allowed_values = [1, 2, 3] # noqa: E501\n if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\".format(status, allowed_values) # noqa: E501\n )\n\n self._status = status", "def status(self, status):\n if self.local_vars_configuration.client_side_validation and status is None: # noqa: E501\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n status is not None and len(status) < 1):\n raise ValueError(\"Invalid value for `status`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._status = status", "def SetStatus(self, status):\r\n self.status = status", "def status(self, status):\n if status is None:\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n allowed_values = [\"draft\", \"sent\", \"archive\", \"queued\", \"suspended\", \"in_process\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status):\n allowed_values = [\"ENABLED\", \"DISABLED\"] # noqa: E501\n if (self._configuration.client_side_validation and\n status not in allowed_values):\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def _set_status(self, status):\n with self.status_lock:\n if (status in _ENDING_STATUSES) or (not self.status in _ENDING_STATUSES):\n self.status = status", "def set_status(self, status):\n # TODO log to db\n self.status = status", "def status(self, status: int):\n if status is None:\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n\n self._status = status", "def set_connection_status(self, connection_status: Literal[ConnectionState]) -> None:\n self.connection_status = connection_status\n self.publish(self.key_gen(\"connection_status\"), connection_status)", "def set_status(self, status: HTTPProxyStatus) -> None:\n self._status = status\n self.update_actor_details(status=self._status)", "def status(self, status):\n if status is None:\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n if status is not None and len(status) < 1:\n raise ValueError(\"Invalid value for `status`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n\n self._status = status", "def status(self, status):\n allowed_values = [\"loaned\", \"finished\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\".format(status, allowed_values) # noqa: E501\n )\n\n self._status = status", "def status(self, status):\n if status is None:\n raise ValueError(\"Invalid value for `status`, must not be `None`\")\n allowed_values = [\"success\", \"warning\", \"error\", \"pending\"]\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\"\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status):\n if self.local_vars_configuration.client_side_validation and status is None: # noqa: E501\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n allowed_values = [\"active\", \"locked\", \"disabled\", \"changepassword\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "async def set_status(self, ctx, *, status: str = \"online\"):\n\n try:\n status = discord.Status[status.lower()]\n except KeyError:\n await ctx.error(\"Invalid Status\", \"Only `online`, `idle` or `dnd` statuses are available.\")\n else:\n await self.bot.change_presence(status=status, activity=ctx.me.activity)\n await ctx.success(f\"Status changed to {status}.\")", "def status(self, status):\n self._set_property_(self.STATUS, str(status))", "def status(self, status):\n allowed_values = [\"open\", \"finished\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\".format(status, allowed_values) # noqa: E501\n )\n\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status):\n self._status = status", "def status(self, status: str):\n\n self._status = status", "def status(self, status: str):\n\n self._status = status", "def setStatus(self, status, details=None):\n self.onStatusSent(None, status)", "def status(self, status):\n allowed_values = [\"C\", \"D\", \"P\", \"I\", \"E\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status):\n allowed_values = [\"NEW\", \"ACCEPTED\", \"DECLINED\", \"REJECTED\", \"DELIVERED\", \"EMAILED\", \"COMPLETED\", \"CANCELLED\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status):\n allowed_values = [\"Pending\", \"Running\", \"Success\", \"Failed\", \"Skipped\", \"SuccessWithWarning\", \"Canceled\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status: str):\n allowed_values = [\"OPEN\", \"WAITING_RESOLUTION\", \"CONFIRMED\", \"CANCELLED\", \"COMPLETED\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\"\n .format(status, allowed_values)\n )\n\n self._status = status", "def set_bounced(self, status: bool):\n self._bounced = status", "def set_status(self, status, comment=None):\n\n self.status_history.create(name=status, comment=comment)\n self.status = status", "def status(self, status):\n allowed_values = [\"co\", \"ne\", \"se\", \"vi\", \"si\", \"do\", \"sd\", \"ca\", \"de\", \"ec\", \"es\", \"xp\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def set_status(self, status):\n if status == \"offline\":\n self._status.set_message(\"N\")\n self._status.set_foreground_color(\"red\")\n \n elif status == \"online\":\n self._status.set_message(\"Y\")\n self._status.set_foreground_color(\"Green\")\n \n elif status == \"away\":\n self._status.set_message(\"A\")\n self._status.set_foreground_color(\"Grey\")\n \n elif status == \"busy\":\n self._status.set_message(\"B\")\n self._status.set_foreground_color(\"Yellow\")", "def account_status(self, account_status):\n\n self._account_status = account_status", "def __set_job_status(self, job: Job):\n\n self.redis_client.set(f'jobstatus:{job.id}:{str(job.status)}', f'job:{job.id}')", "def status(self, status):\n if status is None:\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n allowed_values = [\"queued\", \"running\", \"complete\", \"failed\", \"canceled\", \"expired\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def set_desired_connection_status(self, connection_status: Literal[ConnectionState]) -> None:\n self.desired_connection_status = connection_status\n self.publish(self.key_gen(\"desired_connection_status\"), connection_status)", "def status(self, status):\n allowed_values = [\"D\", \"P\", \"V\", \"S\", \"M\", \"I\", \"R\", \"C\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def setStatus(self, newStatus):\n self._status = newStatus", "def workflow_status(self, workflow_status):\n self._workflow_status = workflow_status", "def status(self, status: str):\n allowed_values = [\"waiting\", \"running\", \"complete\", \"failed\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\"\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status: str):\n allowed_values = [\"done\", \"late\", \"in progress\", \"to do\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\".format(\n status, allowed_values\n )\n )\n\n self._status = status", "def connection_status(self, connection_status):\n allowed_values = [\"Unknown\", \"Success\", \"Failure\"]\n if connection_status not in allowed_values:\n raise ValueError(\n \"Invalid value for `connection_status` ({0}), must be one of {1}\"\n .format(connection_status, allowed_values)\n )\n\n self._connection_status = connection_status", "def agent_status(self, agent_status):\n\n self._agent_status = agent_status", "def service_status(self, service_status):\n\n self._service_status = service_status", "async def status(self, ctx:utils.Context, status:str):\n\n status_o = getattr(discord.Status, status.lower())\n await self.bot.change_presence(activity=self.bot.guilds[0].me.activity, status=status_o)", "def campaign(self, campaign):\n\n self._campaign = campaign", "def set_status(self, status: JobStatus, pipeline: Optional['Pipeline'] = None) -> None:\n self._status = status\n connection: 'Redis' = pipeline if pipeline is not None else self.connection\n connection.hset(self.key, 'status', self._status)", "def set_reduction_status(self, status, message, chopped_data):\n # check input\n assert isinstance(\n status, bool), 'Reduction status must be given by bool but not {0}'.format(type(status))\n assert isinstance(message, str), 'Reduction message {0} must be string but not {1}' \\\n ''.format(message, type(message))\n assert isinstance(chopped_data, bool), 'Flag for being chopped run must be boolean but not {0}' \\\n ''.format(type(chopped_data))\n\n self._reductionStatus = status\n self._reductionInformation = message\n self._isChopped = chopped_data\n\n return", "def _set_status(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"status\", rest_name=\"status\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"status must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"status\", rest_name=\"status\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__status = t\n if hasattr(self, '_set'):\n self._set()", "def rule_status(self, rule_status):\n if self.local_vars_configuration.client_side_validation and rule_status is None: # noqa: E501\n raise ValueError(\"Invalid value for `rule_status`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n rule_status is not None and len(rule_status) > 6000):\n raise ValueError(\"Invalid value for `rule_status`, length must be less than or equal to `6000`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n rule_status is not None and len(rule_status) < 0):\n raise ValueError(\"Invalid value for `rule_status`, length must be greater than or equal to `0`\") # noqa: E501\n\n self._rule_status = rule_status", "def send_status_update(self, agent_id: str, status: str):\n status_packet = Packet(\n packet_type=PACKET_TYPE_UPDATE_STATUS,\n subject_id=agent_id,\n data={\n \"status\": status,\n },\n )\n self._get_channel_for_agent(agent_id).enqueue_send(status_packet)", "def status(self, status: str):\n allowed_values = [\"available\", \"pending\", \"sold\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\"\n .format(status, allowed_values)\n )\n\n self._status = status", "def setMyStatus(self, status, comment='', REQUEST=None):\n calendar = self.getCalendar()\n calendar_rpath = calendar.getRpath()\n event_id = self.getId()\n (member, member_cn, dtstamp) = self._getRequestInformations()\n for attendee in self.attendees:\n if attendee['rpath'] == calendar_rpath:\n old_status = attendee['status']\n attendee['status'] = status\n if status != old_status:\n if status == 'decline':\n calendar.declineEvent(self)\n if old_status == 'decline':\n calendar.unDeclineEvent(self)\n self._p_changed = 1\n \n # Set up the dict for email notification\n mtool = getToolByName(calendar, 'portal_membership')\n userid = mtool.getAuthenticatedMember().getUserName()\n try:\n cn = self.getAttendeeInfo(calendar.getRpath()).get('cn', id)\n except AttributeError:\n cn = userid\n event_dict = {\n 'id': userid,\n 'request': 'status',\n 'change': ({\n 'attendee': calendar_rpath,\n 'cn': cn,\n 'type': self.getCalendar().usertype,\n 'status': status,\n 'comment': comment,\n 'dtstamp': dtstamp,\n 'sender': member,\n 'sender_cn': member_cn,\n },)\n }\n \n # Change the status for all attendees calendars.\n # Get the attendeelist from the organizers calendar, \n # since new attendees may have been added or old removed.\n org_calendar = self.getOrganizerCalendar()\n org_event = org_calendar._getOb(event_id, None)\n if org_event is None:\n LOG('NGCal', INFO, \"Can't find original event for %s/%s\" \n % (calendar_rpath, event_id))\n return\n \n ctool = getToolByName(self, 'portal_cpscalendar')\n org_attendees = org_event.attendees\n \n for attendee in org_attendees:\n apath = attendee['rpath']\n # Skip this calendar\n if apath == calendar_rpath:\n continue\n acal = ctool.getCalendarForPath(apath, unrestricted=1)\n event = acal._getOb(event_id, None)\n if event is not None:\n event.setAttendeeStatus(calendar_rpath, status)\n \n # Check pending events\n for event in acal._pending_events:\n if event['id'] != event_id:\n continue\n for att in event['event']['attendees']:\n if att['rpath'] == calendar_rpath:\n att['status'] = status\n acal._p_changed = 1\n \n # This needs some testing to see that it really does\n # the correct thing.\n acal.notifyMembers(event_dict)\n \n if REQUEST is not None:\n REQUEST.RESPONSE.redirect(self.absolute_url())", "def set_status(self, status):\n if not status == self._status:\n self._status = status\n self.winstance.send_event('State changed to ' + self._status)\n\n self.completed = not self.parent_node.is_job or \\\n self._status == 'COMPLETED'\n\n if self.completed:\n self.publish()\n\n if not self.parent_node.is_job:\n self.failed = False\n else:\n self.failed = self.parent_node.is_job and \\\n (self._status == 'BOOT_FAIL' or\n self._status == 'CANCELLED' or\n self._status == 'FAILED' or\n self._status == 'REVOKED' or\n self._status == 'TIMEOUT')", "def status(self, status):\n allowed_values = [\"REQUESTED\", \"CREATE_IN_PROGRESS\", \"AVAILABLE\", \"UPDATE_IN_PROGRESS\", \"UPDATE_REQUESTED\", \"UPDATE_FAILED\", \"CREATE_FAILED\", \"ENABLE_SECURITY_FAILED\", \"PRE_DELETE_IN_PROGRESS\", \"DELETE_IN_PROGRESS\", \"DELETE_FAILED\", \"DELETE_COMPLETED\", \"STOPPED\", \"STOP_REQUESTED\", \"START_REQUESTED\", \"STOP_IN_PROGRESS\", \"START_IN_PROGRESS\", \"START_FAILED\", \"STOP_FAILED\", \"WAIT_FOR_SYNC\", \"MAINTENANCE_MODE_ENABLED\"]\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\"\n .format(status, allowed_values)\n )\n\n self._status = status", "def _set_status(self, action, status):\n raise NotImplementedError(\"Base class: cannot be called directly\")", "async def set_status(self, status: TransientStatus):\n if isinstance(status, Game):\n await self.http.set_transient_status(status.game_id)\n elif isinstance(status, TransientStatus):\n await self.http.set_custom_status(status.name)\n elif status is None:\n await self.http.delete_transient_status()\n else:\n raise TypeError('status must inherit from TransientStatus (Game, CustomStatus) or be None, not %s' % status.__class__.__name__)", "def campaign_id(self, campaign_id):\n\n self._campaign_id = campaign_id", "def status(self, status):\n if status is None:\n raise ValueError(\"Invalid value for `status`, must not be `None`\") # noqa: E501\n allowed_values = [\"Open\", \"Claimed\", \"Held\", \"Closed\"] # noqa: E501\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status` ({0}), must be one of {1}\" # noqa: E501\n .format(status, allowed_values)\n )\n\n self._status = status", "def status(self, status):\n allowed_values = [\"I\", \"A\", \"S\", \"T\", \"D\"]\n if status not in allowed_values:\n raise ValueError(\n \"Invalid value for `status`, must be one of {0}\"\n .format(allowed_values)\n )\n self._status = status", "def status_id(self, status_id):\n\n self._status_id = status_id", "def update_status(self, update_status):\n allowed_values = [\"investigating\", \"identified\", \"monitoring\", \"resolved\", \"scheduled\", \"in_progress\", \"verifying\", \"completed\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and update_status not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `update_status` ({0}), must be one of {1}\" # noqa: E501\n .format(update_status, allowed_values)\n )\n\n self._update_status = update_status", "def setModerationStatus(self, status):\n kSetStatus(self, status)", "def update_remediation_status(self, status):\n self.remediation_status = status" ]
[ "0.6368695", "0.61262167", "0.5973353", "0.5973353", "0.5973353", "0.5827563", "0.57827497", "0.5776234", "0.5757022", "0.5722353", "0.5680996", "0.56777996", "0.5648092", "0.5639821", "0.5627372", "0.55989665", "0.5592994", "0.5582759", "0.5582326", "0.55720466", "0.55364394", "0.5512199", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450819", "0.5450301", "0.54464865", "0.544063", "0.5438748", "0.5429301", "0.54194117", "0.53894943", "0.53894943", "0.53894943", "0.53894943", "0.53894943", "0.53894943", "0.53894943", "0.53877795", "0.53877795", "0.53876", "0.5385313", "0.5383033", "0.53740764", "0.53694654", "0.5350758", "0.5350143", "0.53489906", "0.53414345", "0.53167367", "0.53116596", "0.52688456", "0.52640504", "0.5262957", "0.525421", "0.52535313", "0.5235774", "0.5233591", "0.5233398", "0.5232183", "0.5229908", "0.5223023", "0.5177068", "0.5158022", "0.5142775", "0.5141613", "0.51349974", "0.5125516", "0.51195157", "0.510218", "0.50996935", "0.50961936", "0.5093193", "0.50777763", "0.5077472", "0.5075783", "0.5070468", "0.5050711", "0.5036289", "0.5025739", "0.49843845" ]
0.7548788
0
Gets the callable_time_set of this MessagingCampaign. The callable time set for this messaging campaign.
def callable_time_set(self): return self._callable_time_set
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def callable_time_set(self, callable_time_set):\n \n self._callable_time_set = callable_time_set", "def schedule_times(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"schedule_times\")", "def getScheduleOnset(self):\n return DPxGetDinSchedOnset()", "def get_schedules(self):\n return self.__schedules", "def schedule_times(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"schedule_times\")", "def get_time_points(self):\n return self._time", "def scheduledTimes(self, runnable):\n events = self.store.query(\n TimedEvent, TimedEvent.runnable == runnable)\n return (event.time for event in events if not event.running)", "def schedule(self):\n return self._schedule", "def schedule(self):\n return self._schedule", "def schedule(self):\n return self._schedule", "def queue_times(self):\r\n queue_times = []\r\n for task in self.__tasks.values():\r\n if task.complete():\r\n queue_times.append(task.queued_time())\r\n return queue_times", "def time_scoping(self):\n return self._time_scoping", "def get_times(self):\n raise NotImplementedError(\"Abstract method not implemented.\")", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def _get_schedulers(self):\n return self.__schedulers", "def queue_times(self):\r\n return [task.scheduler_launch_time - self.__arrival_time\r\n for task in self.__tasks.values() if task.complete()]", "def get_last_set(self):\n return self.set", "def getSchedules(self) :\n return self.schedules", "def get_time(option_set):\n return option_set & TIME_MASK", "def get_timed_events(self):\n return self.dispatcher.timed_events", "def timers(self):\n return self['timers']", "def sets(self):\n return self._loaded_and_cached(gdxcc.GMS_DT_SET)", "def timings(self):\n if self._C_timings is None:\n raise RuntimeError(\"Cannot extract timings with non-finalized Profiler.\")\n return {field: max(getattr(self._C_timings, field), 10**-6)\n for field, _ in self._C_timings._fields_}", "def get_best_times(self):\n\n return self.best_times", "def get_recentmost_queuetime(self):\r\n if not self.is_queued():\r\n return None\r\n\r\n # Get a list of timestamps of all queueing requests, then convert it to a DateTime object\r\n queuetime_strs = [\r\n self.correct_map.get_queuetime_str(answer_id)\r\n for answer_id in self.correct_map\r\n if self.correct_map.is_queued(answer_id)\r\n ]\r\n queuetimes = [\r\n datetime.strptime(qt_str, xqueue_interface.dateformat).replace(tzinfo=UTC)\r\n for qt_str in queuetime_strs\r\n ]\r\n\r\n return max(queuetimes)", "def time(self):\n return self[self.time_columns]", "def time(self):\n return self[self.time_columns]", "def get_end_time(self):\n return max([m.get_end_time() for m in self._mappers])", "def get_time_strs(self):\n\n log(\"Getting time strings starting at {}\".format(self._t0))\n tz = dt.timezone.utc\n mkdt = lambda n: dt.datetime.fromtimestamp(\n self._t0 - (self._delta * n),\n tz=tz\n )\n ns = range(self._frames, 0, -1)\n return [mkdt(n).strftime('%Y%m%d%H%M') for n in ns]", "def getSchedulers():", "def analysisStartTime(self) -> WQXTime:\r\n return self.__analysisStartTime", "def all_schedules(self):\n return self._all_schedules", "def computation_times(self) -> List[float]:\r\n return self._computation_times", "def get_time_col(self):\n return self.time_col", "def service_times(self):\r\n service_times = [task.service_time() for task in self.__tasks.values() if task.complete(True)]\r\n return service_times", "def service_times(self):\r\n service_times = []\r\n for task in self.__tasks.values():\r\n if task.complete():\r\n x = task.service_time()\r\n service_times.append(task.service_time())\r\n return service_times", "def time_synchronization(self):\n return self.client.call('GET', self.name + 'time-synchronization')", "def times(self):\n if self.isActive:\n times = copy.deepcopy(self._times)\n times[-1] = (self._times[-1][0], MasterTimer.time())\n return times\n else:\n return self._times", "def ensemble_times(self):\n return self['validtime'].values", "def _get_set_mpls_tc(self):\n return self.__set_mpls_tc", "def _get_set_mpls_tc(self):\n return self.__set_mpls_tc", "def _get_set_mpls_tc(self):\n return self.__set_mpls_tc", "def _get_set_mpls_tc(self):\n return self.__set_mpls_tc", "def _get_set_mpls_tc(self):\n return self.__set_mpls_tc", "def _get_set_mpls_tc(self):\n return self.__set_mpls_tc", "def synchronize_schedule(self):\n\n return self._synchronize_schedule", "def get_best_schedule(self):\n # load the model weights\n self.models = [load_model(f'dqn_{task_id}.h5')\n for task_id in range(len(self.models))]\n\n actions = []\n is_scheduled = [0] * len(self.models)\n\n while (not all(is_scheduled)):\n observation = OrderedDict([('is_scheduled', is_scheduled)])\n best_action = self._get_best_action(observation)\n actions.append(best_action)\n is_scheduled[best_action['task_id']] = best_action['start_time']\n\n return actions", "def run_times(self):\n if self.__hasrun:\n return self._run_times\n else:\n raise ValueError(\"Cannot report unmeasured times.\")", "def timeRange(self):\r\n _times = self.getTimes()\r\n return _times[0], _times[-1]", "def analysisEndTime(self) -> WQXTime:\r\n return self.__analysisEndTime", "def scheduled_reset_at(self):\n return self._scheduled_reset_at", "def get(self):\n try:\n result = load_schedules_from_file()\n return result\n except Exception:\n logging.exception('Failed to get Celery Beat schedules!')\n raise", "def get_tasks(self):\n return self.task_collection", "def _schedule(self):\n return self._event['schedule_expression']", "def start_and_service_times(self):\r\n return [(x.scheduler_launch_time, x.service_time()) for x in self.__tasks.values()\r\n if x.complete()]", "def get_primitive_set(self):\n return self.primitive_listener.get_most_recent_message()", "def timings(self):\r\n return self._timings", "def starts(self):\n return self.time_start", "def get_time(self):\n return self.time_param", "def get_settemp(self):\n return self.settemp", "def register_time(self):\n\n return self._register_time", "def scheduled_builds(self):\n return self._scheduled_builds", "def synictimer(self):\n return self._synictimer", "def get_time(self):\n return self._ticks", "def isSetTimeConversionFactor(self):\n return _libsbml.Submodel_isSetTimeConversionFactor(self)", "def get_campaigns_in_interval(self, start_time, end_time, **kwargs):\n \n if 'campaign_filter' in kwargs:\n campaign_filter = kwargs['campaign_filter']\n if not(isinstance(campaign_filter, str)):\n campaign_filter = ''\n else:\n campaign_filter = MySQLdb._mysql.escape_string(str(campaign_filter))\n \n \"\"\" Escape parameters \"\"\"\n start_time = MySQLdb._mysql.escape_string(str(start_time).strip())\n end_time = MySQLdb._mysql.escape_string(str(end_time).strip())\n \n sql = \"select utm_campaign \" + \\\n \"from drupal.contribution_tracking left join civicrm.civicrm_contribution on (drupal.contribution_tracking.contribution_id = civicrm.civicrm_contribution.id) \" + \\\n \"where ts >= '%s' and ts < '%s' and utm_campaign regexp '%s' group by 1\" % (start_time, end_time, campaign_filter)\n \n results = self.execute_SQL(sql)\n \n campaigns = list()\n for row in results:\n campaigns.append(str(row[0]))\n \n return campaigns", "def get_time_info(self):\n\n raise NotImplementedError", "def _get_task_queues():\n\n return _thread_data.__dict__.setdefault('task_queues', defaultdict(list))", "def get_start_time(self):\n return min([m.get_start_time() for m in self._mappers])", "def schedule(self):\n\n crontab = self._crontab\n return datetime.now() + timedelta(\n seconds=math.ceil(\n crontab.next(default_utc=False)\n )\n )", "def scheduler(self):\n return self._scheduler", "def get_times(self):\n times = []\n for i in range(1, len(self.events)):\n times.append(self.events[i-1].elapsed_time(self.events[i]))\n return times", "def get_timescale_stringlist(self):\n return text_timescale", "def computation_time(self) -> float:\r\n if self._computation_times is None:\r\n return None\r\n else:\r\n return self._computation_times[0]", "def set(self):\n return self.cdb.code_to_card_set[self.set_code]", "def pre_schedule(self):\n return []", "def schedule_time(self) -> str:\n return pulumi.get(self, \"schedule_time\")", "def get_schedules(self) -> List[SwitcherV2Schedule]:\n return self._schedule_list", "def get_temps_cuisson(self):\n return self.temps_cuisson", "def get_fan_set_point(self):\n return self.__fan_set_point", "def get_time(self):\n return self.time", "def time_slice(self):\n return self._time_slice", "def scheduled_plan(self):\n return self._scheduled_plan", "def enqueued_time(self):\n timestamp = self._annotations.get(EventData.PROP_TIMESTAMP, None)\n if timestamp:\n return datetime.datetime.fromtimestamp(float(timestamp)/1000)\n return None", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler", "def _get_scheduler(self):\n return self.__scheduler" ]
[ "0.6893189", "0.55124503", "0.54107213", "0.5211071", "0.5081652", "0.5053929", "0.4999273", "0.4996595", "0.4996595", "0.4996595", "0.49755397", "0.4962994", "0.49513596", "0.4948963", "0.4948963", "0.4948963", "0.4948963", "0.4948963", "0.4948963", "0.4948963", "0.4948963", "0.4948963", "0.4880842", "0.48680344", "0.48603994", "0.47949654", "0.4780974", "0.47346133", "0.47227257", "0.4699778", "0.4693767", "0.46781304", "0.4677717", "0.4677717", "0.46044725", "0.45975667", "0.45793787", "0.45683047", "0.45599008", "0.45528638", "0.45514596", "0.4551134", "0.4546137", "0.45287952", "0.4522322", "0.4509267", "0.44872975", "0.44872975", "0.44872975", "0.44872975", "0.44872975", "0.44872975", "0.44786704", "0.44706002", "0.44618717", "0.44598767", "0.44547004", "0.445341", "0.44455114", "0.44422942", "0.4438035", "0.44376612", "0.44181508", "0.44107682", "0.4376859", "0.4375779", "0.43733174", "0.43676758", "0.4366232", "0.43646", "0.4363188", "0.43582383", "0.4339131", "0.433196", "0.43266466", "0.43189824", "0.43160078", "0.431381", "0.43078044", "0.4295272", "0.4294419", "0.4293934", "0.42888427", "0.42887226", "0.427667", "0.42751014", "0.4269523", "0.4268737", "0.42596045", "0.4258007", "0.42570993", "0.4249423", "0.4249423", "0.4249423", "0.4249423", "0.4249423", "0.4249423", "0.4249423", "0.4249423", "0.4249423" ]
0.8133848
0
Sets the callable_time_set of this MessagingCampaign. The callable time set for this messaging campaign.
def callable_time_set(self, callable_time_set): self._callable_time_set = callable_time_set
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def callable_time_set(self):\n return self._callable_time_set", "def set_time(self, set_time):\n\n self._set_time = set_time", "def setScheduleOnset(self, onset):\n DPxSetDinSchedOnset(onset)", "def setShowCallables(self, show_callables):\n logger.debug(\"setShowCallables: {}\".format(show_callables))\n self._show_callables = show_callables\n self.invalidateFilter()", "def collection_time(self, collection_time):\n\n self._collection_time = collection_time", "def set_enqueue_time(self, time):\n self.enqueue_time = time\n for task in self.tasks:\n task.enqueue_time = time", "def set_exec_time(self, time):\n for task in self.tasks:\n task.exec_time = time", "def scheduled_at(self, scheduled_at):\n\n self._scheduled_at = scheduled_at", "def set_set_later(self, value):\r\n self.set_later = value", "def time_utc(self, time_utc):\n\n self._time_utc = time_utc", "def scheduled_reset_at(self, scheduled_reset_at):\n\n self._scheduled_reset_at = scheduled_reset_at", "def setSubmitTime(t):", "def set(self):\n now = time.time()\n remove = None\n for ident, event in self.events.items():\n if not event[0].isSet():\n # if this client's event is not set, then set it\n # also update the last set timestamp to now\n event[0].set()\n event[1] = now\n else:\n # if the client's event is already set, it means the client\n # did not process a previous frame\n # if the event stays set for more than 5 seconds, then assume\n # the client is gone and remove it\n if now - event[1] > 5:\n remove = ident\n if remove:\n del self.events[remove]", "def valkkafsmanager_set_time_cb(self, t):\n self.signals.set_time.emit(t)", "def set_speaker_time(self, datetime):\n params = [\n ('year', datetime.year),\n ('month', datetime.month),\n ('day', datetime.day),\n ('hour', datetime.hour),\n ('min', datetime.minute),\n ('sec', datetime.second),\n ]\n\n self.get(COMMAND_UIC, 'SetSpeakerTime', params)", "def set_response_time(self, time):\n for task in self.tasks:\n task.response_time = time", "def _callback_local_setpoint(self, local_setpoint):\n # type: (PositionTarget) -> None\n self.local_setpoint = local_setpoint\n return", "def submit_time(self, submit_time: datetime):\n\n self._submit_time = submit_time", "def svn_info_t_schedule_set(svn_info_t_self, svn_wc_schedule_t_schedule): # real signature unknown; restored from __doc__\n pass", "def set_analysis_time(self, t):\n for z in self.zones:\n z.set_demand_rate_per_t(t)", "def setup_channel_set(channel_set):\n\n #amfast.logger = log.logger\n\n # Map service targets to controller methods\n cont_obj = app.controller.Controller()\n service = Service('DAService')\n service.mapTarget(CallableTarget(cont_obj.get_player_info, 'get_player_info'))\n service.mapTarget(CallableTarget(cont_obj.do_move, 'do_move'))\n service.mapTarget(CallableTarget(cont_obj.do_attack, 'do_attack'))\n service.mapTarget(CallableTarget(cont_obj.get_news, 'get_news'))\n service.mapTarget(CallableTarget(cont_obj.get_floor, 'get_floor'))\n service.mapTarget(CallableTarget(cont_obj.get_monster, 'get_monster'))\n service.mapTarget(CallableTarget(cont_obj.raiseException, 'raiseException'))\n channel_set.service_mapper.mapService(service)", "def set_last_submission_time(self):\r\n self.last_submission_time = datetime.datetime.now(UTC())", "def setTimepoint(self, tp):\n\t\tpass", "def setConcurrentTasks(self, config):\n self.concurrentTasks = [{'func': self.logDBCleanUp, 'duration': config.logDBCleanDuration}]", "def permission_sets(self, permission_sets):\n\n self._permission_sets = permission_sets", "def set_time(self, time_fn):\n self.time_fn = time_fn\n self.socket.send_string(f'T {time_fn()}')\n return self.socket.recv_string()", "def set_schedule(self, time: str, handler: Callable, **kwargs) -> None:\n if time in (\"sunrise\", \"sunset\"):\n method = getattr(self, \"run_at_{0}\".format(time))\n method(handler, **kwargs, constrain_enabled=True)\n else:\n self.run_daily(\n handler, self.parse_time(time), **kwargs, constrain_enabled=True\n )", "def scheduled_plan(self, scheduled_plan):\n\n self._scheduled_plan = scheduled_plan", "async def set_chat_sticker_set(self, chat_id: typing.Union[base.Integer, base.String],\n sticker_set_name: base.String) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SET_CHAT_STICKER_SET, payload)\n\n return result", "def time_settime(currenttime):\r\n\r\n time_query_times.append((getruntime(), currenttime))", "def set_fan_timer_timeout(self, time_: str = None):\r\n if time_ is not None:\r\n self._fan_timer_timeout = datetime.fromisoformat(time_)\r\n else:\r\n self._fan_timer_timeout = datetime.now() + self._fan_timer_duration\r\n\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"fan_timer-timeout\", self.fan_timer_timeout))", "def setup_channel_set(channel_set):\n\n # Send log messages to STDOUT\n handler = logging.StreamHandler(sys.stdout)\n handler.setLevel(logging.DEBUG)\n amfast.logger.addHandler(handler)\n\n # Map service targets to controller methods\n cont_obj = controller.Controller()\n service = Service('ExampleService')\n service.mapTarget(CallableTarget(cont_obj.echo, 'echo'))\n service.mapTarget(CallableTarget(cont_obj.raiseException, 'raiseException'))\n channel_set.service_mapper.mapService(service)", "def set_time(self, value: float):\n raise NotImplementedError()", "def set_time(self, time):\n self._time = time", "def on_set(self, callback):\n self._set_callback = callback if callable(callback) else _void", "def on_set(self, callback):\n self._set_callback = callback if callable(callback) else _void", "def set_imeastime(self, time):\n self.itime = time", "def set_time(self, datetime):\n\n self.set_year(datetime[0])\n self.set_month(datetime[1])\n self.set_day(datetime[2])\n\n # Optional Hour\n if len(datetime) > 3:\n self.set_hour(datetime[3])\n else:\n self.set_hour(0)\n\n # Optional Minute\n if len(datetime) > 4:\n self.set_minute(datetime[4])\n else:\n self.set_minute(0)\n\n # Optional Second\n if len(datetime) > 5:\n self.set_second(datetime[5])\n else:\n self.set_second(0)", "def setSetPoint(self, set_point):\r\n\t\tself.SetPoint = set_point", "def scheduled_message_was_sent(self, user_id, scheduled_ts):\n if user_id not in self.user_id_to_scheduled_message_ts:\n self.user_id_to_scheduled_message_ts[user_id] = set()\n self.user_id_to_scheduled_message_ts[user_id].add(scheduled_ts)", "def time_created(self, time_created):\n self._time_created = time_created", "def time_created(self, time_created):\n self._time_created = time_created", "def time_created(self, time_created):\n self._time_created = time_created", "def time_created(self, time_created):\n self._time_created = time_created", "def completion_time(self, completion_time: datetime):\n\n self._completion_time = completion_time", "def bcp_set(self, **kwargs):\n pass", "def available_schedule_types(self, available_schedule_types):\n\n self._available_schedule_types = available_schedule_types", "def docked_time(self, docked_time):\n\n self._docked_time = docked_time", "def set_sent(self, time=datetime.utcnow()) -> None:\n if not self._expire_task:\n self._expire_task = asyncio.create_task(self._expire())\n if self._attempts >= 1:\n query = self._command.set_query(qos=0)\n self._connection.send_command(query)\n if self._attempts == 0:\n self._command.raise_qos() # prioritize resends\n self._attempts += 1\n self._time_sent = time\n self._qos_task = asyncio.create_task(self._resend_command())", "def registration_time(self, registration_time):\n\n self._registration_time = registration_time", "def set_startTime(self, startTime):\n self.startTime = mktime(startTime)", "def scheduled_reset_period(self, scheduled_reset_period):\n\n self._scheduled_reset_period = scheduled_reset_period", "def setInitialTime(self, t0):\n _cantera.reactornet_setInitialTime(self.__reactornet_id, t0)", "def setConcurrentTasks(self, config):\n self.concurrentTasks = [{'func': self.gatherActiveDataStats, 'duration': config.activeDuration}, \n {'func': self.gatherArchivedDataStats, 'duration': config.archiveDuration}]", "def urlset(self, urlset):\n if urlset is None:\n raise ValueError(\"Invalid value for `urlset`, must not be `None`\") # noqa: E501\n\n self._urlset = urlset", "def management_password_set(self, management_password_set):\n\n self._management_password_set = management_password_set", "def set_flag(self, set_flag):\n\n self._set_flag = set_flag", "def date_time(self, date_time):\n\n self._date_time = date_time", "def __init__(self, callable_, time=1):\n Function.__init__(self) # callable_ could go here\n self.time = time\n self.callable = callable_", "def set_cooling_schedule(self, lamb_0, target_precision, eta=3/4):\n self.cs = sc.CoolingSchedule(lamb_0, eta)\n self.cs.set_precision_schedule(target_precision)", "def send_reminders(self, send_reminders):\n\n self._send_reminders = send_reminders", "def time_windows(self, time_windows):\n\n self._time_windows = time_windows", "def set_scheduler(self, scheduler):\n self.scheduler = scheduler", "def synictimer(self, synictimer):\n\n self._synictimer = synictimer", "def _publish_setpoint(self, setpoint):\n # type: (PositionTarget) -> None\n t = threading.current_thread()\n while not rospy.is_shutdown() and getattr(t, \"do_run\", True):\n setpoint.header = Header()\n setpoint.header.stamp = rospy.Time.now()\n self._pub_setpoint.publish(setpoint)\n self._rate_publish.sleep()\n return", "def setTime(self, *args):\n return _osgAnimation.Keyframe_setTime(self, *args)", "def scheduled_builds(self, scheduled_builds):\n\n self._scheduled_builds = scheduled_builds", "def _function_set(self, data_length=self.data_length, number_of_lines=self.number_of_lines, character_font=self.character_font):\n function_set_mask = 32\n data = funtion_set_mask | (data_length << 4) | (number_of_lines << 3) | (character_font << 2)\n\n self.instruction(data)", "def set_remain_time(self, time):\n for task in self.tasks:\n task.remain_time = time", "def setTimeConversionFactor(self, *args):\n return _libsbml.Submodel_setTimeConversionFactor(self, *args)", "def action_time(self, action_time):\n\n self._action_time = action_time", "def schedule(self, schedule):\n\n self._schedule = schedule", "def schedule(self, schedule):\n\n self._schedule = schedule", "def delivery_time(self, delivery_time):\n\n self._delivery_time = delivery_time", "def schedule(self, schedule):\n \n self._schedule = schedule", "def start_time(self, start_time):\n\n self._start_time = start_time", "def start_time(self, start_time):\n\n self._start_time = start_time", "def start_time(self, start_time):\n\n self._start_time = start_time", "def start_time(self, start_time):\n\n self._start_time = start_time", "def start_time(self, start_time):\n\n self._start_time = start_time", "def cron_time_zone(self, cron_time_zone):\n\n self._cron_time_zone = cron_time_zone", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def create_time(self, create_time):\n\n self._create_time = create_time", "def last_message_dts(self, last_message_dts):\n\n self._last_message_dts = last_message_dts", "def flowers(self, flowers):\n\n self._flowers = flowers", "def eventcorrelationpolicysets(self, eventcorrelationpolicyset_id, data, tenant_id=None, api_version=\"v2.0\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/eventcorrelationpolicysets/{}\".format(api_version,\n tenant_id,\n eventcorrelationpolicyset_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)", "def last_executed(self, last_executed):\n\n self._last_executed = last_executed", "def setUseValuesFromTriggerTime(self, *args):\n return _libsbml.Event_setUseValuesFromTriggerTime(self, *args)", "def setSetPoint(self, set_point, clearPID=False):\n\t\tself.SetPoint = set_point\n\t\tif (clearPID): self.clear()", "def free_flight_time(self, free_flight_time):\n\n self._free_flight_time = free_flight_time", "def set_schedule(self, new_schedule):\n #first, set all the others to inactive\n\n new_schedule.deprecated=False\n if new_schedule.started == None or new_schedule.started <= datetime.utcnow():\n new_schedule.started=datetime.utcnow()\n for sched in self.weekly_schedule:\n if not sched.deprecated:\n #sched.deprecated=True\n sched.ended=datetime.utcnow()\n sched.save()\n elif new_schedule.started > datetime.utcnow():\n #if it's in the future, then don't deprecate the future schedule, just procede along and let the system set the dates correctly\n pass\n self.weekly_schedule.append(new_schedule)\n self.save()", "def time_accepted(self, time_accepted):\n self._time_accepted = time_accepted" ]
[ "0.65908307", "0.6178317", "0.5953841", "0.5100885", "0.49795374", "0.49558243", "0.483162", "0.48207587", "0.48039177", "0.477478", "0.47390914", "0.47065333", "0.47013178", "0.4697793", "0.4662012", "0.46349522", "0.45914975", "0.45656434", "0.45478013", "0.45220882", "0.45187205", "0.45013884", "0.44955614", "0.44649416", "0.44500446", "0.4425367", "0.4389864", "0.43658504", "0.4364397", "0.43598932", "0.43442136", "0.4324883", "0.43081462", "0.43070853", "0.4290141", "0.4290141", "0.4284424", "0.4281537", "0.4276476", "0.42580923", "0.42493138", "0.42493138", "0.42493138", "0.42493138", "0.42454988", "0.4233168", "0.4232278", "0.42158473", "0.42086035", "0.4206167", "0.42056376", "0.4202193", "0.41992173", "0.41975892", "0.41893938", "0.41870674", "0.41834915", "0.4182596", "0.41650227", "0.415735", "0.4156985", "0.41517842", "0.4148513", "0.41480166", "0.41466716", "0.41449624", "0.4142684", "0.4136335", "0.4129184", "0.4127299", "0.4126929", "0.4123319", "0.4123319", "0.41220143", "0.41114715", "0.40985385", "0.40985385", "0.40985385", "0.40985385", "0.40985385", "0.40957358", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40915245", "0.40736863", "0.40611356", "0.4054578", "0.40457457", "0.40348375", "0.40318465", "0.40247127", "0.40245688", "0.4022501" ]
0.8699226
0
Gets the contact_list of this MessagingCampaign. The contact list that this messaging campaign will send messages for.
def contact_list(self): return self._contact_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_contacts(self):\n return self.contacts", "def get_contacts(self):\n\n\t\treturn self.__contacts", "def GetContactList(self):\n\t\tfeeds = []\n\t\tfeed = self.client.GetContacts()\n\t\tfeeds.append(feed)\n\t\tnext = feed.GetNextLink()\n\t\twhile next:\n\t\t\tfeed = self.client.GetContacts(uri=next.href)\n\t\t\tfeeds.append(feed)\n\t\t\tnext = feed.GetNextLink()\n\t\t\n\t\tcontacts = []\n\t\tfor feed in feeds:\n\t\t\tif not feed.entry:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tfor i, entry in enumerate(feed.entry):\n\t\t\t\t\tcontacts.append(entry)\n\t\treturn contacts", "def get_contacts_list(self):\n return [(id + 1, contact) for id, contact in enumerate(self.contact_list)]", "def contact_lists(self):\n from hubspot3.contact_lists import ContactListsClient\n\n return ContactListsClient(**self.auth, **self.options)", "def get_contacts(self):\n contacts = Membership.objects.filter(entity = self, key_contact = True).order_by('importance_to_entity')\n return contacts", "def contacts(self):\r\n return contacts.Contacts(self)", "def ListAllContacts(self):\n feed = self.gd_client.GetContacts()\n self.contacts = self.CleanPhoneNumbers(self.GetContactsInfo(feed))\n return self.contacts", "def contacts(self):\n return ContactCollection(self.request)", "def _get_receivers_list(self):\n\n # TODO: document what this plugin expects to be in Dockerfile/where it gets info from\n global_component = self._get_component_label()\n # this relies on bump_release plugin configuring source.git_commit to actually be\n # branch name, not a commit\n if not isinstance(self.workflow.source, GitSource):\n raise PluginFailedException('Source is not of type \"GitSource\", panic!')\n git_branch = self.workflow.source.git_commit\n try:\n r = requests.get(urljoin(self.pdc_url, 'rest_api/v1/release-component-contacts/'),\n headers={'Authorization': 'Token %s' % self._get_pdc_token()},\n params={'global_component': global_component,\n 'dist_git_branch': git_branch,\n 'role': self.pdc_contact_role},\n verify=self.pdc_verify_cert)\n except requests.RequestException as e:\n self.log.error('failed to connect to PDC: %s', str(e))\n raise RuntimeError(e)\n\n if r.status_code != 200:\n self.log.error('PDC returned status code %s, full response: %s',\n r.status_code, r.text)\n raise RuntimeError('PDC returned non-200 status code (%s), see referenced build log' %\n r.status_code)\n\n contacts = r.json()\n\n if contacts['count'] == 0:\n self.log.error('no %s role for the component', self.pdc_contact_role)\n raise RuntimeError('no %s role for the component' % self.pdc_contact_role)\n\n send_to = []\n for contact in contacts['results']:\n send_to.append(contact['contact']['email'])\n\n return send_to", "def get_recipients(self) -> List[Client]:\n\n index_list = [i for i in range(len(self.int_var_list)) if self.int_var_list[i].get() == 1]\n return [self.client_list[i] for i in index_list]", "def get_contacts_list(self):\n contacts = self.driver.find_elements_by_class_name(\"_1wjpf\")\n s= [contact.text for contact in contacts] #extracts chats and last messsages\n print (\"get contacts: \"+str(s)) #print only chat names\n return s[::2] #returns only chat names", "def get_active_contact(self):\n list_contact = Contact.objects.filter(phonebook__campaign=self.id,\n status=CONTACT_STATUS.ACTIVE).all()\n if not list_contact:\n return False\n return list_contact", "def contact(self):\n return self._contact", "def contact(self):\n return self._contact", "def get_contacts(self, count=-1, excluded_guid=None):\n current_len = len(self._contacts)\n if current_len == 0 or count == 0:\n return []\n\n if count < 0:\n count = current_len\n else:\n count = min(count, current_len)\n\n if excluded_guid is None:\n # Get the last `count` contacts.\n contact_list = self._contacts[-count:]\n else:\n contact_list = []\n for contact in reversed(self._contacts):\n if contact.guid == excluded_guid:\n continue\n contact_list.append(contact)\n if len(contact_list) >= count:\n break\n return contact_list", "async def get_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.get_certificate_contacts(\n vault_base_url=self._vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def get_cached_contacts(self):\n return list(self._replacement_cache)", "def get_all_contacts(self):\n self.init_db(self._testing)\n\n query = \"SELECT {} FROM {} ORDER BY id;\".format(\", \".join(Contact.columns_with_uid), Contact.table_name)\n\n data = self.db.conn.execute(query)\n\n return [Contact(*item) for item in data]", "def get_queryset(self):\n contact_data = Contact.objects.filter(contact_groups__in=Member.objects.filter(\n user=self.request.user).values('group_id').distinct())\n\n return contact_data", "def Contact(self):\n return self.__contact", "def support_contacts(self):\n return self._support_contacts", "def contacts(self):\n from hubspot3.contacts import ContactsClient\n\n return ContactsClient(**self.auth, **self.options)", "def get_queryset(self):\n return self.request.user.contacts.all()", "def receiveContactList(self, contactList):", "def update_contacts(self):\n self.contacts = self.db.list_contacts()\n return self.list_contacts()", "def list_contact(self, key, value):\n self.db.list_contact(\n key,\n value,\n )", "def get_contact(self, username, password):\n\t\tdn, username = self.auth(username, password)\n\t\tif self.is_blacklisted(username):\n\t\t\traise ServiceForbidden()\n\n\t\tuser = self.get_udm_user(username=username)\n\t\tif not self.send_plugins:\n\t\t\traise ServiceForbidden()\n\n\t\treturn [{\n\t\t\t\"id\": p.send_method(),\n\t\t\t\"label\": p.send_method_label(),\n\t\t\t\"value\": user[p.udm_property]\n\t\t} for p in self.send_plugins.values() if p.udm_property in user]", "def contacts(request):\n User = get_user_model()\n ids = set(request.user.chatmessage_set.all().values_list(\"recipients\", flat=True))\n context = {\n 'contacts': User.objects.filter(pk__in=ids)\n }\n return render(request, \"chat/contacts.html\", context)", "def contact_info(self):\n return self._contact_info", "def contacts(self):\n service_root = self._get_webservice_url(\"contacts\")\n return ContactsService(service_root, self.session, self.params)", "def cc_email_address(self):\n return self._cc_recipients", "def getcontacts():\n contacts = {}\n\n try:\n #get list of contact ids\n contactids = r.smembers(\"contacts\")\n\n #for each contact id get data\n for contactid in contactids:\n contacts.update(_getcontact(str(contactid)))\n return contacts\n except:\n print \"Unexpected error:\", sys.exc_info()[0]\n raise", "def get_messages(self):\n return self.addresses", "def fetch_contact_messages(self, org, contact, created_after, created_before):\n pass", "def get_message_list(self):\n \n result = requests.get(\n url = root_url + '/{}'.format(\"message\"),\n headers = { 'Authorization': api_key },\n )\n\n message_list = result.json()\n\n self.message_list = message_list", "def contact_list(self, contact_list):\n \n self._contact_list = contact_list", "def update_contacts(self, contact_list):\n updated_contacts = 0\n request_list = list()\n\n # stale_contacts contains all old contacts at first, all current\n # contacts get then removed so that the remaining can get deleted\n stale_contacts = set(self.contacts)\n\n for contact in contact_list:\n c = Persona.query.get(contact[\"id\"])\n\n if c is None:\n c = Persona(id=contact[\"id\"], _stub=True)\n\n if c._stub is True:\n request_list.append(contact[\"id\"])\n\n try:\n # Old and new contact; remove from stale list\n stale_contacts.remove(c)\n except KeyError:\n # New contact\n self.contacts.append(c)\n updated_contacts += 1\n\n # Remove old contacts that are not new contacts\n for contact in stale_contacts:\n self.contacts.remove(contact)\n\n app.logger.info(\"Updated {}'s contacts: {} added, {} removed, {} requested\".format(\n self.username, updated_contacts, len(stale_contacts), len(request_list)))\n\n return request_list", "def get_drip_campaigns(self):\n return list(DripCampaign.objects(user_id=self.user_id))", "def contacts(self):\n if \"contacts\" in self._prop_dict:\n return ContactsCollectionPage(self._prop_dict[\"contacts\"])\n else:\n return None", "def get_list_of_campaigns(self, limit=0, offset=0):\n logger.info(\"Function call: get_list_of_campaigns\")\n return self.__handle_result(self.__send_request('campaigns', 'GET', {'limit': limit or 0, 'offset': offset or 0}))", "def get_campaign_name_list(self):\n campaigns = self.find('campaigns', {})\n campaign_names = []\n for campaign in campaigns:\n if 'name' in campaign:\n campaign_names.append(campaign['name'])\n return campaign_names", "def billing_contact(self):\n return self._billing_contact", "def get_a_contact(self, uid):\n self.init_db(self._testing)\n\n query = \"SELECT {} FROM {} WHERE (id=?) ORDER BY id;\".format(\n \", \".join(Contact.columns_with_uid), Contact.table_name)\n\n data = self.db.conn.execute(query, (uid,))\n\n return [Contact(*item) for item in data]", "def recipients(self) -> ty.List[str]:", "def search_contact_list(self):\n\n search_db = Database()\n result = search_db.contact_search(self.name)\n if not result:\n print Fore.YELLOW + ' No such contact'\n return None\n if result > 1:\n print ' Which contact ??'\n for items in result:\n if items[2] > 1:\n print Fore.BLUE + ' %s %s %s' % ([items[0]], items[1], items[2])\n else:\n print str(items[1]), items[2]\n\n return result", "def contact_info(self):\n return [\n {\n 'contact_info': c.get('contactInfo'),\n 'type': c.get('type'),\n 'primary': c.get('primary'),\n 'verified': c.get('verified'),\n }\n for c in self.entity_payload.get('contactInfo')]", "def resulting_contact(self):\n return self._resulting_contact", "def getallcontacts(self):\n feed_url = self.contacts_client.GetFeedUri(projection='full')\n total_read = 0\n while True:\n print('Retrieving contacts... (%d retrieved so far)' % total_read)\n feed = self.contacts_client.get_feed(uri=feed_url,\n auth_token=None,\n desired_class=gdata.contacts.data.ContactsFeed)\n total_read += len(feed.entry)\n for entry in feed.entry:\n yield entry\n next_link = feed.GetNextLink()\n if next_link is None:\n print('All contacts retrieved: %d total' % total_read)\n break\n feed_url = next_link.href", "def source_contact(self):\n return self._source_contact", "def contact_information(self) -> ContactInformation:\n return self._contact_information", "def get_contacts(self, uuid=None, urn=None, group=None, deleted=None, before=None, after=None):\n params = self._build_params(uuid=uuid, urn=urn, group=group, deleted=deleted, before=before, after=after)\n return self._get_query('contacts', params, Contact)", "def get_contacts():\n return jsonify(g.driver.get_contacts())", "def contact_points(self) -> object:\n return self._contact_points", "def get_contacts_by_company(self, company_id):\n\n contacts = self._request('getContactsByCompany', {'company_id': company_id})\n for contact in contacts:\n yield contact", "def contacts_list_update(self):\n\t\tself.database.contacts_clear()\n\t\tclient_log.debug(f'Запрос контакт листа для пользователся {self.name}')\n\t\treq = {\n\t\t\tACTION: GET_CONTACTS,\n\t\t\tTIME: time.time(),\n\t\t\tUSER: self.username\n\t\t}\n\t\tclient_log.debug(f'Сформирован запрос {req}')\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tclient_log.debug(f'Получен ответ {ans}')\n\t\tif RESPONSE in ans and ans[RESPONSE] == 202:\n\t\t\tfor contact in ans[LIST_INFO]:\n\t\t\t\tself.database.add_contact(contact)\n\t\telse:\n\t\t\tclient_log.error('Не удалось обновить список контактов.')", "def cc_emails(self):\n return self._cc_emails", "def get_customer_list(self):\n return self._customer_repo.get_customer_list()", "def get_company_to_contacts(self, company_id: str):\n return self.get(object_id=company_id, definition=Definitions.COMPANY_TO_CONTACT)", "def get_phone_numbers_to_send_to(self):\n # Get the phone numbers we want to send to, excluding those that have\n # already done the thing we want to remind them of\n phone_numbers = self.PhoneModel.objects.exclude(phone_number__in=self.to_exclude())\\\n .values_list('phone_number', flat=True)\n\n message_text = self.get_message_text()\n # Set from_number to REPORTS_SHORT_CODE so that recipient can\n # simply just respond to this message with their report.\n from_shortcode = settings.REPORTS_SHORT_CODE\n for phone_number in phone_numbers:\n yield phone_number, message_text, from_shortcode", "def get_all(self):\n total_contacts = []\n get_count = {\n 'query': {\n 'object': 'CONTACT',\n 'select': {\n 'field': 'RECORDNO'\n },\n 'pagesize': '1'\n }\n }\n\n response = self.format_and_send_request(get_count)\n count = int(response['data']['@totalcount'])\n pagesize = 2000\n offset = 0\n for i in range(0, count, pagesize):\n data = {\n 'query': {\n 'object': 'CONTACT',\n 'select': {\n 'field': [\n 'RECORDNO',\n 'CONTACTNAME',\n 'COMPANYNAME',\n 'FIRSTNAME',\n 'LASTNAME',\n 'INITIAL',\n 'PRINTAS',\n 'TAXABLE',\n 'MAILADDRESS.ADDRESS1'\n ]\n },\n 'pagesize': pagesize,\n 'offset': offset\n }\n }\n contacts = self.format_and_send_request(data)['data']['CONTACT']\n total_contacts = total_contacts + contacts\n offset = offset + pagesize\n return total_contacts", "def contact(self, contactid):\r\n return contacts.Contact(self, contactid)", "def _messages_list(self, queue):\n\n return queue.messages()", "async def delete_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.delete_certificate_contacts(\n vault_base_url=self.vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]:\n new_contacts = await self._client.set_certificate_contacts(\n vault_base_url=self.vault_url,\n contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]),\n **kwargs\n )\n return [\n CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list\n ]", "def get_conversations(self):\n\t\treturn self.conversations", "def get_mailing_list():\n\t\tresult = {}\n\t\tconnection = DbHelper.connect()\n\n\t\twith connection.cursor() as cursor:\n\t\t\tsql = \"SELECT email FROM mail_list \\\n\t\t\t\t WHERE is_activated=1;\"\n\t\t\tcursor.execute(sql)\n\t\t\tresult = cursor.fetchall()\n\n\t\treturn [email_data['email'] for email_data in result]", "def get_sent_messages(self):\n return self.sent_messages", "def getList(self):\n return self.list_", "def get(self) -> List[Conversation]:\n return get_all_conversations(), 200", "def get_dmarc_messages(self):\n messages = []\n try:\n if self.opt_use_ssl:\n self.server = poplib.POP3_SSL(self.opt_pop3_server)\n self.server.user(self.opt_global_account[\"username\"])\n self.server.pass_(self.opt_global_account[\"password\"])\n else:\n self.server = poplib.POP3(self.opt_pop3_server)\n self.server.user(self.opt_global_account[\"username\"])\n self.server.pass_(self.opt_global_account[\"password\"])\n except Exception as e:\n raise Exception(\n \"Error connecting to %s with exception %s\" %\n (self.opt_pop3_server, str(e)))\n else:\n self.helper.log_debug(\n 'get_dmarc_messages: successfully connected to %s' %\n self.opt_pop3_server)\n messages = self.byte2str(self.server.uidl()[1])\n self.helper.log_info(\n 'get_dmarc_messages: %d messages' %\n len(messages))\n return messages", "async def get_contacts_for_contact_group(dbcon: DBConnection, contact_group_id: int) -> Iterable[object_models.Contact]:\n q = \"\"\"select\n contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active\n from contact_group_contacts, contacts\n where contact_group_contacts.contact_group_id = %s\n and contact_group_contacts.contact_id = contacts.id\"\"\"\n return [object_models.Contact(*row) for row in await dbcon.fetch_all(q, (contact_group_id,))]", "def get(self):\n args = GET_PARSER.parse_args()\n print(f'args={args}')\n\n return Contacts().get_all(\n args[\"phonetypeOne\"],\n args[\"phonetypeTwo\"],\n args[\"phonetypeThree\"],\n args[\"firstName\"],\n args[\"lastName\"],)", "def get_queryset(self):\n user = self.request.user\n return Contact.objects.filter(owner=user)", "def getList(self):\n return self.list", "def get_recipients(self):\n recipients = set([user.email for user in self.users.all()])\n for group in self.groups.all():\n recipients.update([user.email for user in group.user_set.all()])\n return list(recipients)", "async def get_all_contacts_for_active_monitor(dbcon: DBConnection, monitor_id: int) -> Iterable[object_models.Contact]:\n contacts = set()\n contacts.update(await _active_monitor_contacts(dbcon, monitor_id))\n contacts.update(await _active_monitor_contact_groups(dbcon, monitor_id))\n contacts.update(await _active_monitor_monitor_group_contacts(dbcon, monitor_id))\n contacts.update(await _active_monitor_monitor_group_contact_groups(dbcon, monitor_id))\n return list(contacts)", "def get_campaigns(self, uuid=None):\n params = self._build_params(uuid=uuid)\n return self._get_query('campaigns', params, Campaign)", "def first_contact(self) -> List[str]:\n error_list = []\n return error_list", "def get_recipients(self):\n return [\n participant.user for participant in self.obj.activity.accepted_participants\n ]", "def display_contact(self):\n contacts = \"\".join(str(contact) for contact in self.contact_list)\n print(contacts)", "def get_candidate_list(self):\n return self.candidate_list", "def contact(self, id_or_email):\n return ContactCollection(self.request).find(id_or_email)", "def contact_details(self) -> 'outputs.ContactDetailsResponse':\n return pulumi.get(self, \"contact_details\")", "def contact_details(self) -> 'outputs.ContactDetailsResponse':\n return pulumi.get(self, \"contact_details\")", "def contact_details(self) -> 'outputs.ContactDetailsResponse':\n return pulumi.get(self, \"contact_details\")", "def get_citation_child_list(self):\n return self.address_list", "def get_messages(self):\r\n return self.messages", "def pull_one_contact(self, name):\n contact = []\n for x in self.contacts:\n if x[0] == name:\n contact_name = x[0]\n number = x[1]\n email = x[2]\n zipcode = x[3]\n contact = [contact_name, number, email, zipcode]\n print(contact)\n return contact, self.contacts.index(x)", "def get_group_list(self):\n return [(item[0], item[1][0]) for item in self.contacts_by_group_list]", "def get_message_list(self):\n count = 0\n for msg in self.mbox:\n if msg['From'].find(self.config['tgt_email']) > -1:\n dtime = arrow.get(msg['Date'], 'ddd, D MMM YYYY HH:mm:ss ZZ')\n message = dict({'from': msg['From'],\n 'date': dtime,\n 'subject': msg['Subject']})\n # boundary = msg.get_boundary()\n # if boundary is not None:\n # bounds = [m.start() for m\n # in re.finditer(boundary, str(msg))]\n # else:\n # bounds = list()\n # if len(bounds) > 2:\n # message['text'] = str(msg)[bounds[1]:bounds[2]]\n # else:\n # message['text'] = None\n pl = None\n if msg['Subject'].find(\":\") == -1:\n finished = False\n pl = msg.get_payload()\n while finished is False:\n if isinstance(pl, str):\n finished = True\n elif isinstance(pl, list):\n pl = pl[0].get_payload()\n else:\n raise ValueError(\"Non-list, non-str payload?\")\n break\n message['text'] = self.clean_text(str(pl))\n\n if message['text'] is not None:\n self.messages.append(message)\n count += 1\n # print count\n self.messages.sort(key=lambda item: item['date'])", "def get_contacts(self):\n feet = [\"REAR_RIGHT_FOOT\", \"REAR_LEFT_FOOT\",\n \"FRONT_RIGHT_FOOT\", \"FRONT_LEFT_FOOT\"]\n contacts = np.zeros(4, dtype=np.float32)\n for i, foot in enumerate(feet):\n if self.supervisor.getFromDef(foot).getNumberOfContactPoints() > 0:\n contacts[i] = 1.0\n return contacts", "def recipients(self, content_object):\n if not hasattr(content_object, self.recipient_function):\n # the content object does not provide this function, test to see if its a lambda\n if not self.recipient_function.lower().startswith(\"lambda\"):\n raise NotImplementedError(\"%s does not exist for %s\" % (self.recipient_function,\n content_object))\n recipients = eval(self.recipient_function)(content_object)\n else:\n func_or_attr = getattr(content_object, self.recipient_function)\n recipients = func_or_attr() if inspect.ismethod(func_or_attr) else func_or_attr\n if not hasattr(recipients, \"__iter__\"):\n if recipients is None:\n return None\n recipients = [recipients]\n return [(r.email, r) if hasattr(r, \"email\") else (r, None) for r in recipients]", "def get_contacts(self, uuids=None, urns=None, groups=None, before=None, after=None, deleted=None, pager=None):\n params = self._build_params(uuid=uuids, urns=urns, group_uuids=groups, before=before, after=after,\n deleted=deleted)\n return Contact.deserialize_list(self._get_multiple('contacts', params, pager))", "def get_messages(self):\n res = self.conn.cursor().execute(\"SELECT * FROM messages\")\n return res.fetchall()", "def recipients(self):\n return self._recips", "def list(self):\n try:\n response = self.service.users().messages().list(userId=self.user_id,\n q=self.query).execute()\n messages = []\n if 'messages' in response:\n messages.extend(response['messages'])\n\n while 'nextPageToken' in response:\n page_token = response['nextPageToken']\n response = self.service.users().messages().list(userId=self.user_id, q=self.query,\n pageToken=page_token).execute()\n messages.extend(response['messages'])\n\n return messages\n except errors.HttpError as error:\n print('An error occurred: %s' % error)", "def get(self, set=''):\n params = {}\n if set: params['set'] = set\n\n request = self._connection.get('contacts.json', params=params)\n if request.status_code != 200:\n raise Exception('status code {0}: cannot get contacts'.format(request.status_code))\n return [User.parse(self._connection, each) for each in request.json()]", "def target_contact(self):\n return self._target_contact", "def get_recipients(msg_parsed):\n recipients = []\n addr_fields = ['From', 'To', 'Cc', 'Bcc']\n\n for f in addr_fields:\n rfield = msg_parsed.get(f, \"\") # Empty string if field not present\n rlist = re.findall(ADDR_PATTERN, rfield)\n recipients.extend(rlist)\n\n return recipients" ]
[ "0.72631705", "0.71663344", "0.7121245", "0.6652589", "0.65349084", "0.64175344", "0.63972867", "0.6386485", "0.63770306", "0.63157517", "0.61904186", "0.61765337", "0.6132532", "0.6121634", "0.6121634", "0.602559", "0.6003591", "0.59747976", "0.5964907", "0.5926964", "0.5926448", "0.5915741", "0.59078896", "0.5905416", "0.589102", "0.5790044", "0.5783242", "0.57187986", "0.5704222", "0.5675407", "0.567043", "0.56384206", "0.56341976", "0.5627944", "0.56250626", "0.559637", "0.552889", "0.5514794", "0.5482435", "0.54757184", "0.545075", "0.54494613", "0.544242", "0.5432188", "0.5395183", "0.53910005", "0.534315", "0.53385097", "0.53358686", "0.53316194", "0.53243816", "0.53139645", "0.52997684", "0.52973974", "0.5296391", "0.529599", "0.52929693", "0.52762717", "0.5262194", "0.5249087", "0.5246305", "0.5237774", "0.52237517", "0.52132976", "0.5200423", "0.51861614", "0.518084", "0.51734054", "0.5170489", "0.5152326", "0.51436543", "0.5137565", "0.51349556", "0.51309234", "0.50918293", "0.507489", "0.50743437", "0.5072385", "0.50628895", "0.5050648", "0.50476044", "0.5032133", "0.5026486", "0.5022638", "0.5022638", "0.5022638", "0.5017613", "0.50095385", "0.50027764", "0.4992955", "0.49862403", "0.49730676", "0.49716076", "0.49650335", "0.4963129", "0.49521157", "0.49408746", "0.4938071", "0.49301484", "0.49292713" ]
0.8081953
0
Sets the contact_list of this MessagingCampaign. The contact list that this messaging campaign will send messages for.
def contact_list(self, contact_list): self._contact_list = contact_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def receiveContactList(self, contactList):", "def set_contacts(self, contacts):\n\n\t\tif contacts is not None and not isinstance(contacts, list):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: contacts EXPECTED TYPE: list', None, None)\n\t\t\n\t\tself.__contacts = contacts\n\t\tself.__key_modified['Contacts'] = 1", "def contacts(self, contacts):\n\n self._contacts = contacts", "def contacts(self, contacts):\n\n self._contacts = contacts", "def contact_list(self):\n return self._contact_list", "def update_contacts(self, contact_list):\n updated_contacts = 0\n request_list = list()\n\n # stale_contacts contains all old contacts at first, all current\n # contacts get then removed so that the remaining can get deleted\n stale_contacts = set(self.contacts)\n\n for contact in contact_list:\n c = Persona.query.get(contact[\"id\"])\n\n if c is None:\n c = Persona(id=contact[\"id\"], _stub=True)\n\n if c._stub is True:\n request_list.append(contact[\"id\"])\n\n try:\n # Old and new contact; remove from stale list\n stale_contacts.remove(c)\n except KeyError:\n # New contact\n self.contacts.append(c)\n updated_contacts += 1\n\n # Remove old contacts that are not new contacts\n for contact in stale_contacts:\n self.contacts.remove(contact)\n\n app.logger.info(\"Updated {}'s contacts: {} added, {} removed, {} requested\".format(\n self.username, updated_contacts, len(stale_contacts), len(request_list)))\n\n return request_list", "def contact(self, contact):\n\n self.logger.debug(\"In 'contact' setter.\")\n\n self._contact = contact", "async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]:\n new_contacts = await self._client.set_certificate_contacts(\n vault_base_url=self.vault_url,\n contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]),\n **kwargs\n )\n return [\n CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list\n ]", "def contact_points(self, contact_points: object):\n\n self._contact_points = contact_points", "def contact(self, contact):\n\n self._contact = contact", "def contact(self, contact):\n\n self._contact = contact", "def support_contacts(self, support_contacts):\n self._support_contacts = support_contacts", "def contact_lists(self):\n from hubspot3.contact_lists import ContactListsClient\n\n return ContactListsClient(**self.auth, **self.options)", "def contacts_list_update(self):\n\t\tself.database.contacts_clear()\n\t\tclient_log.debug(f'Запрос контакт листа для пользователся {self.name}')\n\t\treq = {\n\t\t\tACTION: GET_CONTACTS,\n\t\t\tTIME: time.time(),\n\t\t\tUSER: self.username\n\t\t}\n\t\tclient_log.debug(f'Сформирован запрос {req}')\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tclient_log.debug(f'Получен ответ {ans}')\n\t\tif RESPONSE in ans and ans[RESPONSE] == 202:\n\t\t\tfor contact in ans[LIST_INFO]:\n\t\t\t\tself.database.add_contact(contact)\n\t\telse:\n\t\t\tclient_log.error('Не удалось обновить список контактов.')", "def send_mass_messages(self, recipient_list, sender, message=\"\", subject=\"\"):\n try:\n for s in recipient_list:\n self.send_message(to=s, sender=sender, message=message, subject=subject)\n except TypeError:\n return -1\n return 1", "def list(self, list):\n if list is None:\n raise ValueError(\"Invalid value for `list`, must not be `None`\") # noqa: E501\n\n self._list = list", "def do_send_list( self, a_list ):\r\n # --- this needs to be moved to task some set up here then on there\r\n self.logger.info( \"turn on sendList\" )\r\n self.send_list_ix = 0\r\n\r\n #self.send_list = [ 180, 920, 160, 1740, 160, 780, 160, 2840, 160, 1320, 160, 1340, 160, ] # 1180, 160, 2700, 160, 12780, 200, 920, \\\r\n #160, 2680, 160, 780, 160, 800, 160, 780, 160, 920, 160, 800, 140, 800, \\\r\n # 160 ]\r\n self.send_list = a_list\r\n self.com_driver.send( \"z\\n\" )\r\n self.list_send = True # if we were mult-threaded this would have to be here\r\n\r\n return", "def contact_point(self, contact_point: object):\n\n self._contact_point = contact_point", "def contact_reference(self, contact_reference):\n\n self._contact_reference = contact_reference", "def list_contact(self, key, value):\n self.db.list_contact(\n key,\n value,\n )", "def target_contact(self, target_contact):\n \n self._target_contact = target_contact", "def contactListClicked(self):\n \n contacts = self.userList.getSelectedItems()\n self.mergeButton.setEnabled(contacts != None and len(contacts) > 1)\n \n if contacts != None and len(contacts) == 1:\n self.messageList.filterByContact(contacts[0])\n else:\n self.messageList.removeFilter()", "def _create_mailing_list(cls):\n cls.mailing_list_1 = cls.env['mailing.list'].with_context(cls._test_context).create({\n 'name': 'List1',\n 'contact_ids': [\n (0, 0, {'name': 'Déboulonneur', 'email': 'fleurus@example.com'}),\n (0, 0, {'name': 'Gorramts', 'email': 'gorramts@example.com'}),\n (0, 0, {'name': 'Ybrant', 'email': 'ybrant@example.com'}),\n ]\n })\n cls.mailing_list_2 = cls.env['mailing.list'].with_context(cls._test_context).create({\n 'name': 'List2',\n 'contact_ids': [\n (0, 0, {'name': 'Gilberte', 'email': 'gilberte@example.com'}),\n (0, 0, {'name': 'Gilberte En Mieux', 'email': 'gilberte@example.com'}),\n (0, 0, {'name': 'Norbert', 'email': 'norbert@example.com'}),\n (0, 0, {'name': 'Ybrant', 'email': 'ybrant@example.com'}),\n ]\n })", "def member_list(self, member_list):\n\n self._member_list = member_list", "def contact_info(self, contact_info):\n\n self._contact_info = contact_info", "def SetDomainsList(self, domainsList) :\n\t\t...", "def source_contact(self, source_contact):\n \n self._source_contact = source_contact", "def remove_from_contact_list(self, contacts_to_remove_list):\n if self.contact_list is None:\n return\n for id in contacts_to_remove_list:\n if id in range(0, len(self.contact_list) + 1):\n self.contact_list[id - 1] = None\n self.contact_list = [contact for contact in self.contact_list if contact is not None]", "def add_contact(self, name, number, email, zipcode):\n \n new_contact = f\"{name}, {number}, {email}, {zipcode}\"\n contact_list = [name,number,email,zipcode]\n self.contacts.append(contact_list)\n self.save()\n print(f\"Thank you {new_contact} has been added to your contact book.\")", "def contact_email(self, contact_email):\n\n self._contact_email = contact_email", "def contact_email(self, contact_email):\n\n self._contact_email = contact_email", "def update_contacts(self, contacts):\n\n if contacts.time.size != 1:\n raise IndexError(\"Contacts should be from one frame only\")\n if contacts.channel.size != self.contacts.channel.size:\n self.new_contact_set(contacts)\n return # Prevent calling update_contacts recursively\n self.contacts = contacts\n contacts = np.array(contacts)\n\n for i, actor in enumerate(self.contacts_actors):\n # mapper = actors.GetNextActor().GetMapper()\n mapper = actor.GetMapper()\n self.contacts_actors[i].GetProperty().SetColor(self.contacts_color)\n self.contacts_actors[i].GetProperty().SetOpacity(self.contacts_opacity)\n source = vtkSphereSource()\n source.SetCenter(contacts[0:3, i])\n source.SetRadius(self.contacts_size)\n mapper.SetInputConnection(source.GetOutputPort())", "def contact_information(self, contact_information: ContactInformation):\n\n self._contact_information = contact_information", "def new_contact_set(self, contacts):\n if contacts.time.size != 1:\n raise IndexError(\"Contacts should be from one frame only\")\n self.contacts = contacts\n\n # Remove previous actors from the scene\n for actor in self.contacts_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.contacts_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtk.vtkPoints()\n for i in range(contacts.channel.size):\n # Create a mapper\n mapper = vtkPolyDataMapper()\n\n # Create an actor\n self.contacts_actors.append(vtkActor())\n self.contacts_actors[i].SetMapper(mapper)\n\n self.parent_window.ren.AddActor(self.contacts_actors[i])\n\n # Update marker position\n self.update_contacts(self.contacts)", "def test_create_contact_list(self):\n c1 = ContactFactory(company_id=self.company.id)\n data = {\n 'title': 'ContactList1',\n 'contact_ids': [c1.id],\n }\n\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n \n response = self.client.post(url, data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.post(url, data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n content = json.loads(response.content)\n self.assertEqual(content['title'], 'ContactList1')\n self.assertEqual(content['contacts'], [c1.id])\n self.assertNotEqual(content['company_id'], None)\n self.assertNotEqual(content['owner'], None)\n\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n content = json.loads(response.content)\n self.assertEqual(self.contact_lists_count+1, len(content))", "def billing_contact(self, billing_contact):\n\n self._billing_contact = billing_contact", "def contact_number(self, contact_number):\n if contact_number is None:\n raise ValueError(\"Invalid value for `contact_number`, must not be `None`\") # noqa: E501\n\n self._contact_number = contact_number", "def update_contacts(self):\n self.contacts = self.db.list_contacts()\n return self.list_contacts()", "def contact_person(self, contact_person):\n\n self._contact_person = contact_person", "def contact_person(self, contact_person):\n\n self._contact_person = contact_person", "def list_contacts(self):\n return self.contacts", "def set_contact_mechanisms(cls, records, name, value=None):\n Party = Pool().get('party.party')\n\n for record in records:\n Party.write([record.party], {'contact_mechanisms': value})", "def cc_email_address(self, val: list):\n self._cc_recipients = []\n if val is not None:\n for item in val:\n if isinstance(item, EmailAddress):\n self._cc_recipients.append(item)", "def contact_name(self, contact_name):\n\n self._contact_name = contact_name", "def contact_name(self, contact_name):\n\n self._contact_name = contact_name", "def tearDown(self):\n Contact.contact_list = []", "def set_customer(self, customer_list):\n self.multiple_items_selection_from_kendo_dropdown(self.customer_dropdown_locator, customer_list)\n self.wait_for_ajax_spinner_load()", "def __init__(self, first_name=\" \", last_name=\" \", phone_number=0, phone_number_type=\" \", contact_list=[]):\n self.first_name = first_name\n self.last_name = last_name\n self.phone_number = phone_number\n self.phone_number_type = phone_number_type\n self.valid_phone_number_types = [\"home\", \"office\", \"cell\"]\n self.contact_list = contact_list", "def mailman_add(contact, listname=None, send_welcome_msg=None, admin_notify=None):\n mm, listname = _get_maillist(listname)\n print('mailman adding %s to %s' % (contact.email, listname), file=sys.stderr)\n if send_welcome_msg is None:\n send_welcome_msg = mm.send_welcome_msg\n\n userdesc = UserDesc()\n userdesc.fullname = contact.full_name\n userdesc.address = contact.email\n userdesc.digest = False\n\n if mm.isMember(contact.email):\n print(_('Already Subscribed: %s' % contact.email), file=sys.stderr)\n else:\n try:\n try:\n mm.Lock()\n mm.ApprovedAddMember(userdesc, send_welcome_msg, admin_notify)\n mm.Save()\n print(_('Subscribed: %(email)s') % { 'email' : contact.email }, file=sys.stderr)\n except Errors.MMAlreadyAMember:\n print(_('Already a member: %(email)s') % { 'email' : contact.email }, file=sys.stderr)\n except Errors.MMBadEmailError:\n if userdesc.address == '':\n print(_('Bad/Invalid email address: blank line'), file=sys.stderr)\n else:\n print(_('Bad/Invalid email address: %(email)s') % { 'email' : contact.email }, file=sys.stderr)\n except Errors.MMHostileAddress:\n print(_('Hostile address (illegal characters): %(email)s') % { 'email' : contact.email }, file=sys.stderr)\n finally:\n mm.Unlock()", "def contact_id(self, contact_id):\n if self.local_vars_configuration.client_side_validation and contact_id is None: # noqa: E501\n raise ValueError(\"Invalid value for `contact_id`, must not be `None`\") # noqa: E501\n\n self._contact_id = contact_id", "def set_contacts_size(self, contacts_size):\n self.contacts_size = contacts_size\n self.update_contacts(self.contacts)", "def block(self, report=False):\n prefix = \"28\" if isinstance(self, SkypeBotUser) else \"8\"\n self.skype.conn(\"PUT\", \"{0}/users/{1}/contacts/blocklist/{2}:{3}\"\n .format(SkypeConnection.API_CONTACTS, self.skype.userId, prefix, self.id),\n auth=SkypeConnection.Auth.SkypeToken, json={\"report_abuse\": report, \"ui_version\": \"skype.com\"})\n self.blocked = True", "def test_save_multiple_contact(self):\n self.new_contact.save_contact()\n # new contact\n test_contact = Contact(\"Test\", \"user\", \"0798765432\", \"test@user.com\")\n test_contact.save_contact()\n self.assertEqual(len(Contact.contact_list), 2)", "def send_email(subject, message, recipient_list, from_email=None,\n fail_silently=False, connection=None):\n if not from_email:\n from_email = _s('SERVER_EMAIL') or _s('DEFAULT_FROM_EMAIL')\n try:\n subj = unicode(subject)\n except UnicodeDecodeError:\n subj = subject.decode('utf8')\n datatuple = [(subj, message, from_email, [recipient],) \\\n for recipient in recipient_list]\n send_mass_mail(datatuple)", "def setListDoc(self, list):\n if list is None: list__o = None\n else: list__o = list._o\n libxml2mod.xmlSetListDoc(list__o, self._o)", "def set_raw_contact(self, value: Atoms):\n self._raw_contact = value", "async def send_contact(self, chat_id: typing.Union[base.Integer, base.String],\n phone_number: base.String,\n first_name: base.String, last_name: typing.Union[base.String, None] = None,\n disable_notification: typing.Union[base.Boolean, None] = None,\n reply_to_message_id: typing.Union[base.Integer, None] = None,\n reply_markup: typing.Union[types.InlineKeyboardMarkup,\n types.ReplyKeyboardMarkup,\n types.ReplyKeyboardRemove,\n types.ForceReply, None] = None) -> types.Message:\n reply_markup = prepare_arg(reply_markup)\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SEND_CONTACT, payload)\n\n return types.Message(**result)", "def set_contacts_color(self, contacts_color):\n self.contacts_color = contacts_color\n self.update_contacts(self.contacts)", "def contact_method(self, contact_method):\n\n self._contact_method = contact_method", "def add_contact(self):\n contact_list = {}\n contact_list[self.my_number] = self.name\n connect_db = Database()\n connect_db.add_contact(self.name, self.my_number)", "def sequencing_contact(self, sequencing_contact):\n self.logger.debug(\"In 'sequencing_contact' setter.\")\n\n self._sequencing_contact = sequencing_contact", "def cc_emails(self, cc_emails):\n\n self._cc_emails = cc_emails", "def email_ml_set_can_send_on_behalf(self, maillist=None, maillist_uid=None, subscriber=None, subscriber_uid=None, can_send_on_behalf=None):\n if not maillist and not maillist_uid:\n raise ValueError('Maillist or uid required')\n if not subscriber and not subscriber_uid:\n raise ValueError('Subscriber or uid required')\n return self._request('email/ml/set_can_send_on_behalf', inspect_args_func(inspect.currentframe()))", "def ip_addresses_list(self, ip_addresses_list):\n\n self._ip_addresses_list = ip_addresses_list", "def ListAllContacts(self):\n feed = self.gd_client.GetContacts()\n self.contacts = self.CleanPhoneNumbers(self.GetContactsInfo(feed))\n return self.contacts", "def test_save_multiple_contacts(self):\n self.new_contact.save_contact() # saving the new contact\n test_contact = Contact(\"Test\", \"User\", 254712345678, \"test@user.com\") # new user\n test_contact.save_contact() # saving the new contact\n self.assertEqual(len(Contact.contact_list), 2)", "def email_process(recipient_list: List[Client]) -> None:\n\n if recipient_list:\n send_email(recipient_list)\n update_only_emailed_clients(recipient_list)\n remove_fully_contacted_clients()\n else:\n print(\"No emails were sent.\")", "def block_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='block'))", "def GetContactList(self):\n\t\tfeeds = []\n\t\tfeed = self.client.GetContacts()\n\t\tfeeds.append(feed)\n\t\tnext = feed.GetNextLink()\n\t\twhile next:\n\t\t\tfeed = self.client.GetContacts(uri=next.href)\n\t\t\tfeeds.append(feed)\n\t\t\tnext = feed.GetNextLink()\n\t\t\n\t\tcontacts = []\n\t\tfor feed in feeds:\n\t\t\tif not feed.entry:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tfor i, entry in enumerate(feed.entry):\n\t\t\t\t\tcontacts.append(entry)\n\t\treturn contacts", "def resulting_contact(self, resulting_contact):\n \n self._resulting_contact = resulting_contact", "def fetch_contact_messages(self, org, contact, created_after, created_before):\n pass", "def setListPunctCmplx(self, list):\n\t\tself.list_punct_cmplx = list", "def configure_tls_to_endpoint_in_sipserver(\n self,\n phone_list: List[Union[fxo_template.FXOTemplate, SIPPhoneTemplate]],\n ) -> None:", "def add_contact_to_google_account(self, i):\n\n self.add_contact_to_phone(i)", "def activities_list(self, new_activities_list):\n self._activities_list = new_activities_list\n self.__save_activities_from_memory_to_file()", "def display_contact(self):\n contacts = \"\".join(str(contact) for contact in self.contact_list)\n print(contacts)", "def update_device_list(self, device_list):\n self.device_list = device_list\n\n self.device_combo.clear()\n\n if not device_list:\n return\n\n self.device_combo.addItem(\"\")\n\n active_entry = None\n\n for dev in device_list:\n\n action_string = \"{model:<18} - {contype:<7} - {serial}\".format(model=dev.model,\n contype=dev.device_type,\n serial=dev.serial)\n if dev.serial == self.serial:\n active_entry = action_string\n self.device_combo.addItem(action_string)\n\n if active_entry is not None:\n self.device_combo.setCurrentText(active_entry)", "def AddContact(self, contact):\n\t\tcontact.group_membership_info = [gdata.contacts.data.GroupMembershipInfo(href=self.GetFirstGroupId())]\n\t\ttry:\n\t\t\tself.client.CreateContact(contact)\n\t\texcept gdata.client.RequestError:\n\t\t\tpass", "def email_ml_subscribers(self, maillist=None, maillist_uid=None):\n if not maillist and not maillist_uid:\n raise ValueError('Maillist or uid required')\n return self._request('email/ml/subscribers', inspect_args_func(inspect.currentframe()), method='get')", "def contact_started(self, node, contact):\n for subscriber in self.subscribers:\n subscriber.contact_started(node, contact)", "def bcc_email_address(self, val: list):\n self._bcc_recipients = []\n if val is not None:\n for item in val:\n if isinstance(item, EmailAddress):\n self._bcc_recipients.append(item)", "def contacts(self):\n from hubspot3.contacts import ContactsClient\n\n return ContactsClient(**self.auth, **self.options)", "async def set_contact_group_contacts(dbcon: DBConnection,\n contact_group_id: int, contact_ids: Iterable[int]) -> None:\n\n async def _run(cur: Cursor) -> None:\n q = \"\"\"delete from contact_group_contacts where contact_group_id=%s\"\"\"\n await cur.execute(q, (contact_group_id,))\n for contact_id in contact_ids:\n q = \"\"\"insert into contact_group_contacts (contact_group_id, contact_id) values (%s, %s)\"\"\"\n q_args = (contact_group_id, contact_id)\n await cur.execute(q, q_args)\n\n if not await contact_group_exists(dbcon, contact_group_id):\n raise errors.InvalidArguments('contact group does not exist')\n await dbcon.transact(_run)", "def push_all(self, contacts):\n for ell in contacts:\n self.push(ell)", "def new_soft_contacts_set(self, soft_contacts):\n if soft_contacts.time.size != 1:\n raise IndexError(\"soft_contacts should be from one frame only\")\n self.soft_contacts = soft_contacts\n\n # Remove previous actors from the scene\n for actor in self.soft_contacts_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.soft_contacts_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtk.vtkPoints()\n for i in range(soft_contacts.channel.size):\n # Create a mapper\n mapper = vtkPolyDataMapper()\n\n # Create an actor\n self.soft_contacts_actors.append(vtkActor())\n self.soft_contacts_actors[i].SetMapper(mapper)\n\n self.parent_window.ren.AddActor(self.soft_contacts_actors[i])\n # Update marker position\n self.update_soft_contacts(self.soft_contacts)", "def contact_list(request):\n if request.method == 'GET':\n contact = Contact.objects.all()\n serializer = ContactSerializer(contact, many=True)\n return Response(serializer.data)\n elif request.method == 'POST':\n serializer = ContactSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data,\n status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors,\n status=status.HTTP_400_BAD_REQUEST)", "def set_response_list(self, r_list):\n self.response_list = r_list", "def set_coupled_pair_list(self, coupled_pair_list):\n self.coupled_pair_list = coupled_pair_list\n self.reg_coupled_pair = True", "def get_contacts_list(self):\n return [(id + 1, contact) for id, contact in enumerate(self.contact_list)]", "def test_delete_contact_list(self):\n contact_list = ContactList.objects.first()\n\n url, parsed = self.prepare_urls('v1:contact_list-detail', subdomain=self.company.subdomain, kwargs={'pk':contact_list.id})\n \n response = self.client.delete(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.delete(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n \n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n content = json.loads(response.content)\n self.assertEqual(self.contact_lists_count-1, len(content))", "def __init__(self, contacts_client):\n self.contacts_client = contacts_client", "def contacts(self):\n return ContactCollection(self.request)", "def send(self, from_email, to_list, cc_list, bcc_list, subject, text):\n\n to_address_list = []\n\n if len(to_list) > 0:\n for to_address in to_list:\n to_address_list.append(\n {\n \"email\": to_address,\n \"type\": \"to\"\n }\n )\n\n if len(cc_list) > 0:\n for cc_address in cc_list:\n to_address_list.append(\n {\n \"email\": cc_address,\n \"type\": \"cc\"\n }\n )\n\n if len(bcc_list) > 0:\n for bcc_address in bcc_list:\n to_address_list.append(\n {\n \"email\": bcc_address,\n \"type\": \"bcc\"\n }\n )\n\n sendgrid_data = {\n \"key\": sendgrid_key,\n \"message\": {\n \"text\": text,\n \"subject\": subject,\n \"from_email\": from_email,\n \"to\": to_address_list\n },\n \"async\": False,\n }\n\n response = requests.post(\n sendgrid_url,\n data=json.dumps(sendgrid_data)\n )\n\n if response.ok:\n status = 0\n else:\n status = 1\n\n message = str(response.content)\n\n return status, message", "def appendedEntries(self):\n self.contact_list.append({\"name\": self.first_name.title() + \" \" + self.last_name.title(), \"phone number\": self.phone_number, \"phone number type\": self.phone_number_type})", "def archive_contact_messages(self, org, contact):\n pass", "def fields_in_list(self, fields_in_list):\n\n self._fields_in_list = fields_in_list", "def fields_in_list(self, fields_in_list):\n\n self._fields_in_list = fields_in_list", "def add_contacts(self, contacts, group=None, group_uuid=None):\n payload = self._build_params(contacts=contacts, action='add', group=group, group_uuid=group_uuid)\n self._post('contact_actions', None, payload)", "def __init__(self, contact_loader):\n self.contacts_by_group_list = contact_loader.contacts_by_group_list\n self.contact_list = None", "def update_only_emailed_clients(recipient_list, file=\"db.json\") -> None:\n\n with TinyDB(file) as db:\n for client in recipient_list:\n query = Query()\n db.update(add(\"times contacted\", 1), (query[\"first name\"].matches(client.get_first_name())\n & (query[\"last name\"].matches(client.get_last_name()))))\n db.update(set_val(\"rem date\", datetime_to_string(default_rem_date)),\n (query[\"first name\"].matches(client.get_first_name())\n & (query[\"last name\"].matches(client.get_last_name())\n )))" ]
[ "0.6209503", "0.6202445", "0.588594", "0.588594", "0.5883504", "0.5845821", "0.5795876", "0.5775629", "0.55796677", "0.5562263", "0.5562263", "0.5559013", "0.5479643", "0.5460591", "0.53779536", "0.5320598", "0.53073066", "0.5305575", "0.52854943", "0.5274175", "0.5192591", "0.5121968", "0.5113486", "0.50693697", "0.5029035", "0.5010891", "0.50096", "0.5007492", "0.49756786", "0.49653065", "0.49653065", "0.4957423", "0.49395514", "0.4926072", "0.49036422", "0.48886657", "0.48883614", "0.4827787", "0.48177895", "0.48177895", "0.47843698", "0.47793344", "0.47782195", "0.4772784", "0.4772784", "0.47690243", "0.47679976", "0.47509006", "0.47490525", "0.4745818", "0.47420013", "0.4738187", "0.47350198", "0.4732864", "0.47066173", "0.4680452", "0.46777913", "0.46690828", "0.46402803", "0.46320206", "0.46003407", "0.45983198", "0.45887333", "0.45852426", "0.4579518", "0.45610544", "0.45582846", "0.455033", "0.452969", "0.45121688", "0.45078608", "0.45051056", "0.45047635", "0.44964176", "0.4493843", "0.44880277", "0.4487089", "0.44746184", "0.4472952", "0.44660527", "0.4462495", "0.44624025", "0.44611588", "0.4439524", "0.44338965", "0.44322392", "0.44172618", "0.44003206", "0.43955433", "0.43901408", "0.4382922", "0.4382315", "0.4381088", "0.43688384", "0.43652815", "0.43610385", "0.43610385", "0.43607998", "0.43484876", "0.4348318" ]
0.83265656
0
Gets the dnc_lists of this MessagingCampaign. The dnc lists to check before sending a message for this messaging campaign.
def dnc_lists(self): return self._dnc_lists
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_dmarc_messages(self):\n messages = []\n try:\n if self.opt_use_ssl:\n self.server = poplib.POP3_SSL(self.opt_pop3_server)\n self.server.user(self.opt_global_account[\"username\"])\n self.server.pass_(self.opt_global_account[\"password\"])\n else:\n self.server = poplib.POP3(self.opt_pop3_server)\n self.server.user(self.opt_global_account[\"username\"])\n self.server.pass_(self.opt_global_account[\"password\"])\n except Exception as e:\n raise Exception(\n \"Error connecting to %s with exception %s\" %\n (self.opt_pop3_server, str(e)))\n else:\n self.helper.log_debug(\n 'get_dmarc_messages: successfully connected to %s' %\n self.opt_pop3_server)\n messages = self.byte2str(self.server.uidl()[1])\n self.helper.log_info(\n 'get_dmarc_messages: %d messages' %\n len(messages))\n return messages", "def contact_list(self):\n return self._contact_list", "def get_drip_campaigns(self):\n return list(DripCampaign.objects(user_id=self.user_id))", "def get_message_list(self):\n \n result = requests.get(\n url = root_url + '/{}'.format(\"message\"),\n headers = { 'Authorization': api_key },\n )\n\n message_list = result.json()\n\n self.message_list = message_list", "def get_list_of_campaigns(self, limit=0, offset=0):\n logger.info(\"Function call: get_list_of_campaigns\")\n return self.__handle_result(self.__send_request('campaigns', 'GET', {'limit': limit or 0, 'offset': offset or 0}))", "def get_recipients(self) -> List[Client]:\n\n index_list = [i for i in range(len(self.int_var_list)) if self.int_var_list[i].get() == 1]\n return [self.client_list[i] for i in index_list]", "def get_campaign_name_list(self):\n campaigns = self.find('campaigns', {})\n campaign_names = []\n for campaign in campaigns:\n if 'name' in campaign:\n campaign_names.append(campaign['name'])\n return campaign_names", "def contact_lists(self):\n from hubspot3.contact_lists import ContactListsClient\n\n return ContactListsClient(**self.auth, **self.options)", "def _get_campaigns(self, params):\n return self._api.account.get_campaigns(params={**params, **self._state_filter()}, fields=[self.state_pk])", "def get_dns_list(self):\n return self.get_ipv4_dns_list()", "def dnc_lists(self, dnc_lists):\n \n self._dnc_lists = dnc_lists", "def get_message_list(self):\n count = 0\n for msg in self.mbox:\n if msg['From'].find(self.config['tgt_email']) > -1:\n dtime = arrow.get(msg['Date'], 'ddd, D MMM YYYY HH:mm:ss ZZ')\n message = dict({'from': msg['From'],\n 'date': dtime,\n 'subject': msg['Subject']})\n # boundary = msg.get_boundary()\n # if boundary is not None:\n # bounds = [m.start() for m\n # in re.finditer(boundary, str(msg))]\n # else:\n # bounds = list()\n # if len(bounds) > 2:\n # message['text'] = str(msg)[bounds[1]:bounds[2]]\n # else:\n # message['text'] = None\n pl = None\n if msg['Subject'].find(\":\") == -1:\n finished = False\n pl = msg.get_payload()\n while finished is False:\n if isinstance(pl, str):\n finished = True\n elif isinstance(pl, list):\n pl = pl[0].get_payload()\n else:\n raise ValueError(\"Non-list, non-str payload?\")\n break\n message['text'] = self.clean_text(str(pl))\n\n if message['text'] is not None:\n self.messages.append(message)\n count += 1\n # print count\n self.messages.sort(key=lambda item: item['date'])", "def GetListDoctors(self):\n\t\treturn self.ClientsMap.values()", "def get_mailchimp_lists(mc_api_key, server):\n try:\n client = MailchimpMarketing.Client()\n client.set_config({\n \"api_key\": mc_api_key,\n \"server\": server\n })\n\n response = client.lists.get_all_lists()\n print(response)\n except ApiClientError as error:\n print(\"Error: {}\".format(error.text))", "def _get_receivers_list(self):\n\n # TODO: document what this plugin expects to be in Dockerfile/where it gets info from\n global_component = self._get_component_label()\n # this relies on bump_release plugin configuring source.git_commit to actually be\n # branch name, not a commit\n if not isinstance(self.workflow.source, GitSource):\n raise PluginFailedException('Source is not of type \"GitSource\", panic!')\n git_branch = self.workflow.source.git_commit\n try:\n r = requests.get(urljoin(self.pdc_url, 'rest_api/v1/release-component-contacts/'),\n headers={'Authorization': 'Token %s' % self._get_pdc_token()},\n params={'global_component': global_component,\n 'dist_git_branch': git_branch,\n 'role': self.pdc_contact_role},\n verify=self.pdc_verify_cert)\n except requests.RequestException as e:\n self.log.error('failed to connect to PDC: %s', str(e))\n raise RuntimeError(e)\n\n if r.status_code != 200:\n self.log.error('PDC returned status code %s, full response: %s',\n r.status_code, r.text)\n raise RuntimeError('PDC returned non-200 status code (%s), see referenced build log' %\n r.status_code)\n\n contacts = r.json()\n\n if contacts['count'] == 0:\n self.log.error('no %s role for the component', self.pdc_contact_role)\n raise RuntimeError('no %s role for the component' % self.pdc_contact_role)\n\n send_to = []\n for contact in contacts['results']:\n send_to.append(contact['contact']['email'])\n\n return send_to", "def tracking_domain_list(self):\r\n params = base.get_params(None, locals())\r\n return self._get('tracking_domain_list', params)", "def get_dms_list(self, start_index, list_count):\n response = self.get(COMMAND_UIC, 'GetDmsList', [\n ('liststartindex', int(start_index)),\n ('listcount', int(list_count)),\n ])\n\n if not int(response['listcount']):\n return []\n\n return response_list(response['dmslist']['dms'])", "async def fetch_dm_channels(self):\n data = await self.http.get_dm_channels()\n channels = []\n for dm_channel_data in data.get('channels', data):\n dm_channel = self.http.create_channel(data=dm_channel_data)\n channels.append(dm_channel)\n\n return channels", "def GetContactList(self):\n\t\tfeeds = []\n\t\tfeed = self.client.GetContacts()\n\t\tfeeds.append(feed)\n\t\tnext = feed.GetNextLink()\n\t\twhile next:\n\t\t\tfeed = self.client.GetContacts(uri=next.href)\n\t\t\tfeeds.append(feed)\n\t\t\tnext = feed.GetNextLink()\n\t\t\n\t\tcontacts = []\n\t\tfor feed in feeds:\n\t\t\tif not feed.entry:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tfor i, entry in enumerate(feed.entry):\n\t\t\t\t\tcontacts.append(entry)\n\t\treturn contacts", "def get_contacts(self, count=-1, excluded_guid=None):\n current_len = len(self._contacts)\n if current_len == 0 or count == 0:\n return []\n\n if count < 0:\n count = current_len\n else:\n count = min(count, current_len)\n\n if excluded_guid is None:\n # Get the last `count` contacts.\n contact_list = self._contacts[-count:]\n else:\n contact_list = []\n for contact in reversed(self._contacts):\n if contact.guid == excluded_guid:\n continue\n contact_list.append(contact)\n if len(contact_list) >= count:\n break\n return contact_list", "def retrievelist(self,listofsharesofmessages):\n \n messageslist = []\n for shareslist in listofsharesofmessages :\n message = self.retrieve(shareslist)\n messageslist.append(message)\n return messageslist", "def get_sent_messages(self):\n return self.sent_messages", "def _messages_list(self, queue):\n\n return queue.messages()", "def sent_messages(self):\n return self._get_messages_from_folder_name('SentItems')", "def get_contacts(self):\n\n\t\treturn self.__contacts", "def detectionlists(self):\n return self._sdk_dependencies.detection_lists_module", "def get_conversations(self):\n\t\treturn self.conversations", "def getIpv4DnsList(self):\n with self.status._dhcp_status_mutex:\n if self.status.ipv4_lease_valid is None:\n return [None]\n else:\n return self.status.ipv4_dnslist", "def AllowedDomains(self)->list:\n return self._allowedDomains", "def email_ml_list(self):\n return self._request('email/ml/list', inspect_args_func(inspect.currentframe()), method='get')", "def get_list_of_senders(self):\n logger.info(\"Function call: get_senders\")\n return self.__handle_result(self.__send_request('senders'))", "def getDeviceList(self):\n return defer.succeed(self.discovered)", "def get_campaigns(self, uuid=None):\n params = self._build_params(uuid=uuid)\n return self._get_query('campaigns', params, Campaign)", "def get_list_of_ltcdatasink_mc_addrs(mdl=None, my_roleid=None):\n\n mcaddrs = []\n msg_refs = []\n\n mdl_parser = etree.XMLParser(remove_blank_text=True)\n root = etree.parse(mdl, mdl_parser)\n\n roleids = root.xpath(\"//mdl:RoleID\", namespaces=ns)\n\n for r in roleids:\n if r.text == my_roleid:\n ltc_sink_msg_refs = next(r.iterancestors()).findall(\"mdl:TmNSLTCDataSink/mdl:MessageDefinitionRefs/\"\n \"mdl:MessageDefinitionRef\", namespaces=ns)\n for ref in ltc_sink_msg_refs:\n msg_refs.append(ref.attrib[\"IDREF\"])\n\n for mref in msg_refs:\n msg = root.xpath(\"//mdl:MessageDefinition[@ID='{}']\".format(mref), namespaces=ns)\n addr = msg[0].find(\"mdl:DestinationAddress\", namespaces=ns).text\n mcaddrs.append(addr)\n\n print(\"Found {} Multicast Groups to subscribe to.\".format(len(mcaddrs)))\n return mcaddrs", "def getList(self):\n return self.list_", "def get_patient_list(self):\n return self._patient_list", "def get_datacomponent_list():\n global datacomponent_list\n\n if not datacomponent_list:\n datacomponent_list = stixhelpers.get_datacomponents(get_srcs())\n\n return datacomponent_list", "def cc_emails(self):\n return self._cc_emails", "def discovered_devices(self) -> List[BLEDevice]:\n return [d for d, _ in self._backend.seen_devices.values()]", "def get(self) -> List[Conversation]:\n return get_all_conversations(), 200", "def getCacheMessages(self):\n return self.db.getCacheMsgs()", "def get_mail_list(self) -> List[int]:\n response = self.IMAP.select(self.mailconfig.folderInbox)\n if response[0] != \"OK\":\n log.error(\"Error accessing Folder '%s': %s\" % (self.mailconfig.folderInbox, response[1][0].decode()))\n emailcount: int = int(response[1][0])\n if not emailcount > 0:\n return []\n log.info(\"%s email(s) in inbox\" % emailcount)\n\n response = self.IMAP.uid(\"search\", None, \"(ALL)\")\n if response[0] != \"OK\":\n log.error(\"Failed to retrieve mails from inbox: %s\" % response[1][0].decode())\n return []\n # TODO: Raise exception?\n indices: List[bytes] = response[1][0].split()\n return [int(x) for x in indices]", "def messages(self):\n return MessageNotification.messages", "def connected_components(self) -> List[list]:\n for n in self.dw_graph.get_all_v().values():\n n.distance=0.0\n mega_list = []\n for n in self.dw_graph.get_all_v().values():\n if n.distance!=-10:\n mega_list.append(self.connected_component(n.node_id))\n return mega_list", "def getSubscriptionList(self):\r\n return self.feeds", "def getList(self):\n return self.list", "def __get_msg_list(self):\n msg_list = []\n eld_msg_group = ELD_msg_group()\n with open(self.simulation_source_file, 'r') as sim_file:\n for line in sim_file:\n group_complete = False\n if line.startswith('#'):\n eld_msg_group.description = self.__get_description(line)\n elif line.startswith('speed'):\n # Normal line: parsing signal values for J1939 messages\n eld_msg_group.vehicle_speed = self.__get_int_value_from_line(line, 'speed')\n eld_msg_group.vehicle_distance = self.__get_int_value_from_line(line, 'distance')\n eld_msg_group.engine_speed = self.__get_int_value_from_line(line, 'engine_rpm')\n eld_msg_group.engine_hours = self.__get_engine_hours_from_line(line)\n elif line.startswith('duration'):\n eld_msg_group.duration = self.__get_duration_from_line(line)\n group_complete = True\n else:\n print('Error: Unknown simulation line format: {0}'.format(line))\n\n if group_complete:\n msg_list.append(eld_msg_group)\n eld_msg_group = ELD_msg_group()\n return msg_list", "def get_messages(character):\n mail = character.db.mail\n try:\n messages = [item for item in mail if item[TIMESTAMP] <= item[MESSAGE].date_sent]\n # Let's clean up mail storage for this user while we're at it.\n character.db.mail = messages\n except TypeError:\n messages = []\n return messages", "def logged_messages(self):\n return self._logged_messages", "def get_contacts(self):\n contacts = Membership.objects.filter(entity = self, key_contact = True).order_by('importance_to_entity')\n return contacts", "def company_lists(self):\n return self.client.get('company/named-lists')", "def check_domains(self, service_id, service_version):\n domain_list = self.fastly_cache[service_id]['domain_list']\n\n return domain_list", "def get_messages(self):\n return self.messages_received", "def get_messages(self):\n return self.messages_received", "def get_messages(self):\n return self.messages_received", "def get_contacts_list(self):\n contacts = self.driver.find_elements_by_class_name(\"_1wjpf\")\n s= [contact.text for contact in contacts] #extracts chats and last messsages\n print (\"get contacts: \"+str(s)) #print only chat names\n return s[::2] #returns only chat names", "def messages(self):\n return self._messages", "def messages(self):\n return self._messages", "def messages(self):\n return self._messages", "def __get_loaded_messages(self):\n messages = []\n for message in self.chat.find_elements(By.XPATH, \"\"\"//*[@id=\"main\"]/div[3]/div/div/div[3]/*\"\"\"):\n messages.append(MessageElement(message))\n return messages", "def listDomains(self):\n reply = self.rpc.getDomains(self.username,\n self.password)\n if reply[0] == 'UNKNOWN_ERROR':\n raise Exception(\"RPC returned error: \" + reply[0])\n return reply", "def list_contacts(self):\n return self.contacts", "def get_group_list(self):\n return [(item[0], item[1][0]) for item in self.contacts_by_group_list]", "def get_device_configs(self) -> List[dict]:\n device_config_list = self._internal_get_device_configs()\n\n return device_config_list", "def list(self) -> List[ListClientDto]:\n raise NotImplementedError", "def get_adcampaign_list(self, account_id):\n fields = 'id, name, campaign_status, start_time, end_time, ' \\\n 'daily_budget, lifetime_budget, budget_remaining'\n batch = [\n self.get_adaccount(account_id, ['currency'], batch=True),\n self.get_adcampaigns(account_id, fields, batch=True),\n self.get_stats_by_adcampaign(account_id, batch=True),\n ]\n return self.make_batch_request(batch)", "def foundInChannels(self, cls:\"Client\") -> List[\"Channel\"]:\n\n\t\tret:List[\"Channel\"] = []\n\n\t\tfor channel_name in self.found_in:\n\n\t\t\tCh:\"Channel\" = cls.channels.get(channel_name, None)\n\t\t\tif Ch: ret.append(Ch)\n\n\t\treturn ret", "def checklists(self):\n return self.pods.all().checklists", "def get_messages(self):\r\n return self.messages", "def get_diagnostic_list(self):\n return _get_diagnostic_list(self.run_dir)", "def get_contacts_list(self):\n return [(id + 1, contact) for id, contact in enumerate(self.contact_list)]", "def get_lists(self):\n return [{\"id\": lst[\"list_id\"], \"name\": lst[\"name\"]}\n for lst in List.objects(user_id=self.user_id, active=True)]", "async def get_discovered_devices(self) -> List[BLEDevice]:\n warn(\n \"This method will be removed in a future version of Bleak. Use the `discovered_devices` property instead.\",\n FutureWarning,\n stacklevel=2,\n )\n return self.discovered_devices", "def get_domains(self):\n\n response = self.call(method='getDomains')\n domains = []\n for d in response:\n domain = self.domain(domain=d['domain'])\n domains.append(domain)\n return domains", "def get_status_messages(self):\n return self.data[\"allMessagesForFrontend\"][\"messages\"]", "def list_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def get_messages(self):\n return self.addresses", "def get_directory_list(self):\r\n lines = []\r\n self.ftp.retrlines('LIST', lines.append)\r\n return lines", "def getDisciplinesList():\n return Gw2Spidy._request('disciplines')['results']", "def get_cached_contacts(self):\n return list(self._replacement_cache)", "def get_queryset(self):\n contact_data = Contact.objects.filter(contact_groups__in=Member.objects.filter(\n user=self.request.user).values('group_id').distinct())\n\n return contact_data", "def get_persisted_checklists(self):\r\n modulestore = get_modulestore(self.course.location)\r\n return modulestore.get_item(self.course.location).checklists", "def get_domain_whitelist(self):\n whitelist = self.charm_config[\"federation-domain-whitelist\"]\n return list(filter(None, whitelist.split(\",\")))", "def get_append_messages(self):\n\t\treturn self.checkAppendMessages.get_active()", "def get_msgs(self):\n msgs = []\n while True:\n try:\n msgs.append(self.get_msg(block=False))\n except Empty:\n break\n return msgs", "def get_msgs(self):\n msgs = []\n while True:\n try:\n msgs.append(self.get_msg(block=False))\n except Empty:\n break\n return msgs", "def get_notification_groups(self):\n if self.notification_group:\n return [self.notification_group]\n if self.profile.notification_group:\n return [self.profile.notification_group]\n ng = SystemNotification.get_notification_group(\"dns.change\")\n if ng:\n return [ng]\n else:\n return []", "def GetAvailableWordLists(self) :\r\n\t\ttry :\r\n\t\t\tself.DB_Cursor.execute(self.SQLCMDs['SelectWordLists'])\r\n\t\t\tWordLists = self.DB_Cursor.fetchall()\r\n\t\texcept Exception as detail :\r\n\t\t\tlogging.error(\"Failed to return word lists: %s\"%detail)\r\n\t\treturn WordLists", "def get_dns(self) -> Set:\n if self.dn_set.should_update():\n contacts_data = self.get_contacts_data()\n self.dn_set.update(set(contacts_data.get_dns()))\n return self.dn_set.data", "def _channels_list(self):\n result = self.slack.api_call(\"channels.list\")\n\n if not result.get(\"ok\"):\n logging.error(result['error'])\n return None\n\n return result['channels']", "def _get_listings(self):\n listing_ids = self.config.get('listing_ids', [])\n if len(listing_ids) == 0:\n return\n listing_ids = [lid.lower() for lid in listing_ids]\n params = {\n 'limit': 0,\n 'offset': 0,\n 'lang': self.portal_state.language(),\n }\n params.update(self.config)\n params['listing_ids'] = listing_ids\n params = prepare_search_params(params)\n results = search(params, batching=False, context=self.context)\n if results is None or len(results) == 0:\n return\n\n # sort the results based on the listing_ids\n results = [(item['id']['value'], item) for item in results]\n results = dict(results)\n self._listings = [\n results.get(id) for id in listing_ids if id in results\n ]", "def get_public_messages(self):\n messages = []\n for message in self.messages:\n if message.message_id != None:\n messages.append(message)\n return messages", "def _get_threats_lists(self):\n # response is googleapiclient.discovery.Resource object\n response = self.service.threatLists()\n \n # response is googleapiclient.http.HttpRequest object\n response = response.list()\n \n # response is a dict file\n response = response.execute()\n \n return response['threatLists']", "def get_all_msgs(self):\n data = self.database.select(self.tname)\n msgs = []\n for item in data:\n msgs.append((item[0], self.data_to_msg(item)))\n return msgs", "def get_communities(self):\n return self._communities.values()", "def _mdList(self):\n\t\treturn [\n\t\t\t\t\tself.mdProcessingStatus, \n\t\t\t\t\tself.mdOpenChCurrent,\n\t\t\t\t\tself.mdNStates, \n\t\t\t\t\tself.mdCurrentStep,\n\t\t\t\t\tself.mdBlockDepth,\n\t\t\t\t\tself.mdBlockSTD,\n\t\t\t\t\tself.mdEventStart,\n\t\t\t\t\tself.mdEventEnd,\n\t\t\t\t\tself.mdEventDelay,\n\t\t\t\t\tself.mdStateResTime,\n\t\t\t\t\tself.mdResTime,\n\t\t\t\t\tself.mdAbsEventStart,\n\t\t\t\t\tself.mdThreshold\n\t\t\t\t]", "def get_dcm(self):\n control_list = []\n for control in self.__control_list:\n if (control[0] != 'control'):\n control_list.append(control)\n return control_list", "def draft_messages(self):\n return self._get_messages_from_folder_name('Drafts')", "def get_recipients(self):\n recipients = set([user.email for user in self.users.all()])\n for group in self.groups.all():\n recipients.update([user.email for user in group.user_set.all()])\n return list(recipients)", "def get_received_messages(self):\n return self.received_messages" ]
[ "0.5988404", "0.58297056", "0.56870914", "0.55929554", "0.5453402", "0.5412666", "0.5395899", "0.5304845", "0.5249858", "0.5206307", "0.5206069", "0.5170666", "0.5170228", "0.51614743", "0.5143177", "0.51181716", "0.5111123", "0.50970674", "0.50523174", "0.504655", "0.5045429", "0.5012835", "0.4995541", "0.49917287", "0.4958403", "0.4955061", "0.49537343", "0.49376455", "0.49335316", "0.4913977", "0.4909252", "0.48825094", "0.487554", "0.48727974", "0.48708555", "0.486287", "0.4860228", "0.48595718", "0.48576418", "0.4845943", "0.48332742", "0.4811996", "0.48109338", "0.4807813", "0.4804231", "0.48040554", "0.4798178", "0.47940373", "0.47917774", "0.47844315", "0.47775075", "0.47710437", "0.47663006", "0.47663006", "0.47663006", "0.4764703", "0.47642818", "0.47642818", "0.47642818", "0.4760693", "0.47552946", "0.47536173", "0.47499433", "0.47449118", "0.4739136", "0.473834", "0.4729681", "0.4725961", "0.47191736", "0.47175956", "0.4710586", "0.47060516", "0.47003755", "0.46699363", "0.4669816", "0.46650657", "0.4658992", "0.4648106", "0.46469885", "0.46385852", "0.46360734", "0.46354565", "0.46331933", "0.46311298", "0.4624152", "0.4624152", "0.46168628", "0.46114305", "0.46052736", "0.4591488", "0.4589839", "0.45890298", "0.45871234", "0.4584502", "0.4577948", "0.45764482", "0.4569397", "0.4569007", "0.455989", "0.45547643" ]
0.70566237
0
Sets the dnc_lists of this MessagingCampaign. The dnc lists to check before sending a message for this messaging campaign.
def dnc_lists(self, dnc_lists): self._dnc_lists = dnc_lists
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetDomainsList(self, domainsList) :\n\t\t...", "def checklists(self, checklists):\n\n self._checklists = checklists", "def fdsid_list(self, fdsid_list):\n\n self._fdsid_list = fdsid_list", "def contact_list(self, contact_list):\n \n self._contact_list = contact_list", "def dnc_lists(self):\n return self._dnc_lists", "def do_send_list( self, a_list ):\r\n # --- this needs to be moved to task some set up here then on there\r\n self.logger.info( \"turn on sendList\" )\r\n self.send_list_ix = 0\r\n\r\n #self.send_list = [ 180, 920, 160, 1740, 160, 780, 160, 2840, 160, 1320, 160, 1340, 160, ] # 1180, 160, 2700, 160, 12780, 200, 920, \\\r\n #160, 2680, 160, 780, 160, 800, 160, 780, 160, 920, 160, 800, 140, 800, \\\r\n # 160 ]\r\n self.send_list = a_list\r\n self.com_driver.send( \"z\\n\" )\r\n self.list_send = True # if we were mult-threaded this would have to be here\r\n\r\n return", "def _set_scrolls(self, listOfScrolls):\n self._scrolls = listOfScrolls", "def forward_messages(self, message_list):\n\n def find_ports(destination):\n \"\"\"\n Return a list of the ports that according to the forwarding table\n lead to 'destination'.\n\n Arguments:\n destination: an instance of class NetworkDevice or an iterable\n of NetworkDevice instances.\n\n Returns:\n A set of the ports that lead to the devices in 'destination'.\n\n \"\"\"\n output_ports = set()\n if isinstance(destination, collections.Iterable):\n for device in destination:\n # ports leading to device\n ports_towards_device = self.forwarding_table.get(\n device, self.ports)\n output_ports.update(ports_towards_device)\n else:\n output_ports.update(\n self.forwarding_table.get(destination, self.ports))\n return output_ports\n\n for message in message_list:\n destinations = message.destination\n output_ports = find_ports(destinations)\n for port in output_ports:\n new_message = Message.from_message(message)\n self.env.process(\n self.instruct_transmission(new_message, port))", "def set_hosts(self, host_list: t.List[str]) -> None:\n if isinstance(host_list, str):\n host_list = [host_list.strip()]\n if not isinstance(host_list, list):\n raise TypeError(\"host_list argument must be a list of strings\")\n if not all(isinstance(host, str) for host in host_list):\n raise TypeError(\"host_list argument must be list of strings\")\n # TODO check length\n if self.batch:\n if hasattr(self, \"batch_settings\") and self.batch_settings:\n self.batch_settings.set_hostlist(host_list)\n\n if self.launcher == \"lsf\":\n for db in self.dbnodes:\n db.set_hosts(host_list)\n else:\n for host, db in zip(host_list, self.dbnodes):\n if isinstance(db.run_settings, AprunSettings):\n if not self.batch:\n db.run_settings.set_hostlist([host])\n else:\n db.run_settings.set_hostlist([host])\n\n if db.is_mpmd and hasattr(db.run_settings, \"mpmd\"):\n for i, mpmd_runsettings in enumerate(db.run_settings.mpmd):\n mpmd_runsettings.set_hostlist(host_list[i + 1])", "def set(self, varbindlist):\n comm = self.sessiondata.get_community(RW)\n if not comm:\n raise SNMPBadCommunity(\"No community!\")\n mo = CommunityBasedMessage(comm, SetRequestPDU() , self.sessiondata.version )\n for vb in varbindlist:\n mo.add_varbind(vb)\n resp = self._send_and_receive(mo)\n if resp.pdu.error_status:\n raise EXCEPTION_MAP[resp.pdu.error_status](resp.pdu.error_index)\n else:\n return resp.pdu.varbinds", "def send_message_list(message_lst: list, reciever: str, receiver_data: dict,\n users_to_remove: list) -> None:\n new_prev_mes: list = []\n final_message = ''\n for message in message_lst:\n if len(new_prev_mes) == 5:\n break\n if message not in receiver_data['usr_prevs_mes']:\n receiver_data['mes_limit'] -= 1\n final_message += f'\\n{message}'\n new_prev_mes.append(message)\n receiver_data['usr_prevs_mes'] = new_prev_mes\n final_message += '\\nReply stop to stop these notifications.'\n if len(new_prev_mes) != 0:\n send_message(reciever,\n 'New Vaccine Locations Detected!',\n final_message,\n receiver_data['carrier'])\n if receiver_data['mes_limit'] <= 0:\n users_to_remove.append(reciever)", "def setup_ncfile_list(self):\n self.ncfilelist = []\n for file in os.listdir(self.dirpath_netcdf):\n if file.endswith('.nc'):\n self.ncfilelist.append(osp.join(self.dirpath_netcdf, file))", "def set_dns_servers(self, hDnsServersList):\n\t\tcall_sdk_function('PrlVmDevNet_SetDnsServers', self.handle, conv_handle_arg(hDnsServersList))", "def set_blists(self, blists):\n self.blists = blists[:]", "def _create_mailing_list(cls):\n cls.mailing_list_1 = cls.env['mailing.list'].with_context(cls._test_context).create({\n 'name': 'List1',\n 'contact_ids': [\n (0, 0, {'name': 'Déboulonneur', 'email': 'fleurus@example.com'}),\n (0, 0, {'name': 'Gorramts', 'email': 'gorramts@example.com'}),\n (0, 0, {'name': 'Ybrant', 'email': 'ybrant@example.com'}),\n ]\n })\n cls.mailing_list_2 = cls.env['mailing.list'].with_context(cls._test_context).create({\n 'name': 'List2',\n 'contact_ids': [\n (0, 0, {'name': 'Gilberte', 'email': 'gilberte@example.com'}),\n (0, 0, {'name': 'Gilberte En Mieux', 'email': 'gilberte@example.com'}),\n (0, 0, {'name': 'Norbert', 'email': 'norbert@example.com'}),\n (0, 0, {'name': 'Ybrant', 'email': 'ybrant@example.com'}),\n ]\n })", "def update_emails_with_dlcs(dlcs, liaison=None):\n for dlc in dlcs:\n EmailMessage.objects.filter(\n record__author__dlc=dlc,\n date_sent__isnull=True).update(_liaison=liaison)", "def setDetectLists(self, value):\n return self._set(detectLists=value)", "def set_target_stocks_list(self, list_of_stocks):\n self.target_stocks = list_of_stocks", "def DistanceMatrices(self, dms):\r\n if not isinstance(dms, ListType):\r\n raise TypeError(\"The item passed in as the new list was not a \"\r\n \"list data type.\")\r\n if self._num_dms >= 0 and len(dms) != self._num_dms:\r\n raise ValueError(\"Cannot set %d distance matrices. Must provide \"\r\n \"exactly %d distance matrices.\" % (len(dms),\r\n self._num_dms))\r\n for dm in dms:\r\n if not isinstance(dm, DistanceMatrix):\r\n raise TypeError(\r\n 'Invalid type (%s); expected DistanceMatrix' %\r\n dm.__class__.__name__)\r\n if self._min_dm_size >= 0 and dm.shape[0] < self._min_dm_size:\r\n raise ValueError(\"Distance matrix of size %dx%d is smaller \"\r\n \"than the minimum allowable distance matrix \"\r\n \"size of %dx%d for this analysis.\" %\r\n (dm.shape[0], dm.shape[0], self._min_dm_size,\r\n self._min_dm_size))\r\n self._dms = dms", "def _setsenders_correspondent_53D(self, val):\n self.swift_obj.SendersCorrespondent_D = val\n self.swift_obj.SendersCorrespondent_D.swiftTag = \"53D\"", "def security_list_ids(self, security_list_ids):\n self._security_list_ids = security_list_ids", "def domains(self, domains):\n\n self._domains = domains", "def sync_all_lists(self):\r\n print(\"Started syncing influencer master lists with DB\")\r\n screen_names_on_lists = []\r\n self._add_or_update(screen_names_on_lists)\r\n print(\"Removing entries which are no longer on any list\")\r\n self._delete_entries_not_in_list(screen_names_on_lists) # remove entries from DB if they are on no list\r\n print(\"Sync complete\")", "def set_search_domains(self, hSearchDomainsList):\n\t\tcall_sdk_function('PrlVmDevNet_SetSearchDomains', self.handle, conv_handle_arg(hSearchDomainsList))", "def setListDoc(self, list):\n if list is None: list__o = None\n else: list__o = list._o\n libxml2mod.xmlSetListDoc(list__o, self._o)", "def test_set_multiple_ca_list(self):\n secert = load_certificate(FILETYPE_PEM, server_cert_pem)\n clcert = load_certificate(FILETYPE_PEM, server_cert_pem)\n\n sedesc = secert.get_subject()\n cldesc = clcert.get_subject()\n\n def multiple_ca(ctx):\n L = [sedesc, cldesc]\n ctx.set_client_ca_list(L)\n return L\n\n self._check_client_ca_list(multiple_ca)", "def fields_in_list(self, fields_in_list):\n\n self._fields_in_list = fields_in_list", "def fields_in_list(self, fields_in_list):\n\n self._fields_in_list = fields_in_list", "def set_search_domains(self, hSearchDomainsList):\n\t\tcall_sdk_function('PrlVmCfg_SetSearchDomains', self.handle, conv_handle_arg(hSearchDomainsList))", "def cc_emails(self, cc_emails):\n\n self._cc_emails = cc_emails", "def setListPunctCmplx(self, list):\n\t\tself.list_punct_cmplx = list", "def set_label_list(self, label_lists):\n\n if isinstance(label_lists, annotations.LabelList):\n label_lists = [label_lists]\n\n for label_list in label_lists:\n if label_list.idx is None:\n label_list.idx = 'default'\n\n label_list.utterance = self\n self.label_lists[label_list.idx] = label_list", "def collectibles(self, collectibles):\n\n self._collectibles = collectibles", "def collectibles(self, collectibles):\n\n self._collectibles = collectibles", "def setConflictList(self, conflict_list):\n return\n # if conflict_list is None or conflict_list == []:\n # self.resetConflictList()\n # else:\n # self.conflict_list = conflict_list ", "def SetLists(self,IndList,ObjList):\n \n if not len(IndList)==len(ObjList):\n raise ValueError(\"IndList and ObjList must be of the same length\")\n \n if not prod([type(x)==tuple for x in IndList]):\n IndFormatError=1\n elif not prod([len(x)==3 for x in IndList]):\n IndFormatError=1\n \n IndFormatError=0\n \n if IndFormatError:\n raise ValueError(\"IndList must be list of 3-tuples\")\n \n if not prod([shape(x) == self.__shape for x in ObjList])==1:\n print([shape(x) for x in ObjList])\n raise ValueError(\"ObjList must contain arrays of the same shape as the BZO (shape %s)\"%str(self.__shape))\n \n \n self.__IndList=IndList\n self.__ObjList=ObjList\n \n self.__Set_NumList() \n self.__SortLists()", "def update_lists():\n global donor_totals_list\n global donor_donations_list\n global donor_names_list\n global last_donation_list\n donor_totals_list = get_all_donor_totals()\n donor_donations_list = get_list_of_donations()\n donor_names_list = get_list_of_donors()\n last_donation_list = get_max_donation_date_list()", "def send_mass_messages(self, recipient_list, sender, message=\"\", subject=\"\"):\n try:\n for s in recipient_list:\n self.send_message(to=s, sender=sender, message=message, subject=subject)\n except TypeError:\n return -1\n return 1", "def contact_lists(self):\n from hubspot3.contact_lists import ContactListsClient\n\n return ContactListsClient(**self.auth, **self.options)", "def list(self, list):\n if list is None:\n raise ValueError(\"Invalid value for `list`, must not be `None`\") # noqa: E501\n\n self._list = list", "def setup_lists(self):\n pass", "def update_device_list(self):\n\n # Update devices via HTTP request (basic device data - no status)\n self.__http_update_device_list()\n\n # Fetch status for each known device via MQTT\n for gdev in self.__devices.values():\n gdev.request_status()", "def synonyms(self, synonyms: List[str]):\n\n self._synonyms = synonyms", "def receive(self, command_list):\n for cmd in command_list:\n self._send_cmd_with_mapped_ids(cmd)", "def send_lists(self, content):\n self.send(text_data=json.dumps(content))", "def setDelays(self, d):\n raise NotImplementedError", "def email_ml_set_can_send_on_behalf(self, maillist=None, maillist_uid=None, subscriber=None, subscriber_uid=None, can_send_on_behalf=None):\n if not maillist and not maillist_uid:\n raise ValueError('Maillist or uid required')\n if not subscriber and not subscriber_uid:\n raise ValueError('Subscriber or uid required')\n return self._request('email/ml/set_can_send_on_behalf', inspect_args_func(inspect.currentframe()))", "def set_device_rules(self, rules, rule_objs):\n self.logger.debug(\"set_device_rules: rules: {}\".format(rules))\n self._load_device_rules(rules, rule_objs=rule_objs)\n self._determine_cli_command_list()\n self._determine_get_method_list()", "def setDnsNameservers(self, nameservers):\n # type: (tp.Union[str, tp.List[str]])->None\n if isinstance(nameservers, str):\n nameservers = nameservers.split()\n self._validator.validate_one(\n 'dns-nameservers', VALID_OPTS['dns-nameservers'], nameservers)\n self._ifAttributes['dns-nameservers'] = nameservers", "def network_ids(self, network_ids):\n\n self._network_ids = network_ids", "def nsg_ids(self, nsg_ids):\n self._nsg_ids = nsg_ids", "def set_device_ids(self, device_ids):\n if not all(isinstance(device_id, str) for device_id in device_ids):\n raise ApiError(\"One or more invalid device IDs\")\n self._update_criteria(\"device.id\", device_ids)\n return self", "def SetCannoLinks( self, cannotLinks ):\n\t\tself.cannotLinkConstraints = [ frozenset(constraint) for constraint in cannotLinks ]", "def __set_port_list(self):\n\n self._coms = [str(i.device) for i in sorted(self.ports)]", "def dedicated_hosts(self, dedicated_hosts):\n\n self._dedicated_hosts = dedicated_hosts", "def scheduled_builds(self, scheduled_builds):\n\n self._scheduled_builds = scheduled_builds", "def setRoomList(self,roomList):\n self.roomList = roomList", "def setRoomList(self,roomList):\n self.roomList = roomList", "def SetImageList(self, imageList):\r\n\r\n self._imageList = imageList", "def refreshLists(self):\n \n contacts = sorted(contact.getContacts(self.db), key = lambda contact: unicode(contact))\n messages = sorted(message.getMessages(self.db), key = lambda message: unicode(message))\n self.userList.replaceList([(unicode(c), c) for c in contacts])\n self.messageList.replaceList([(unicode(m), m) for m in messages])", "def _send_batch(self, service_checks: list):\n for service_check in service_checks:\n self._send(service_check)", "def test_POST_send_list(self):\n\t\tself.POST_list()\n\t\tlist = self.GET_data('/api/list/' + self.list_id)\n\t\tself.POST_data('/api/list/' + self.list_id + '/send', data=list)", "def member_list(self, member_list):\n\n self._member_list = member_list", "def AssignImageList(self, imageList):\r\n\r\n self.SetImageList(imageList)", "def receiveContactList(self, contactList):", "def setAllowedCookies(self, list_):\n if not self.__loaded:\n self.__load()\n \n self.__exceptionsAllow = list_[:]\n self.__exceptionsAllow.sort()\n self.__saveTimer.changeOccurred()", "def set_event_list(self):\n self.eventList = self.get_event_list()", "def AssignImageList(self, imageList):\r\n\r\n self.SetImageList(imageList)\r\n self._ownsImageListNormal = True", "def set_xList(self, *xList):\n assert len(xList) == self.__nx\n self.__x = xList\n self.__xshape = xList[0].shape # Reset the shape of the input.", "def _fit_content_lists(self, requested_lists):\n raise requested_lists > 0 or AssertionError\n if requested_lists != self._num_contents:\n while requested_lists < self._num_contents:\n self._pop_content_list()\n\n while requested_lists > self._num_contents:\n self._push_content_list()", "def set_values(self, value_list, chat_id):\n coin, value, action = value_list\n self.clients[chat_id][0].criptos[coin] = []\n self.clients[chat_id][0].criptos[coin].append(float(value))\n self.clients[chat_id][0].criptos[coin].append(action)\n\n if coin not in self.clients[chat_id][0].pairs:\n self.clients[chat_id][0].pairs.append(coin)", "def set_metric_variables(self, metric_variable_values: List[Any]) -> None:\n with self._lock:\n self._set_metric_variables(metric_variable_values)", "def dc_coupled(self, dc_coupled):\n\n self._dc_coupled = dc_coupled", "def set_workflows(self, wf_list):\n self._data_dict[self.KEY_BI_WORKFLOWS] = wf_list", "def test_DistanceMatrices_setter(self):\r\n dms = [self.overview_dm, self.overview_dm]\r\n self.mc.DistanceMatrices = dms\r\n self.assertEqual(self.mc.DistanceMatrices, dms)", "async def set_contact_group_contacts(dbcon: DBConnection,\n contact_group_id: int, contact_ids: Iterable[int]) -> None:\n\n async def _run(cur: Cursor) -> None:\n q = \"\"\"delete from contact_group_contacts where contact_group_id=%s\"\"\"\n await cur.execute(q, (contact_group_id,))\n for contact_id in contact_ids:\n q = \"\"\"insert into contact_group_contacts (contact_group_id, contact_id) values (%s, %s)\"\"\"\n q_args = (contact_group_id, contact_id)\n await cur.execute(q, q_args)\n\n if not await contact_group_exists(dbcon, contact_group_id):\n raise errors.InvalidArguments('contact group does not exist')\n await dbcon.transact(_run)", "def createListMessageClients(lista, query_id, status,\n client_id, specialist_id, queries_list=None):\n data_obj = lista[0]\n\n node_specialist = Params.PREFIX['specialist'] + str(specialist_id)\n node_client = Params.PREFIX['client'] + str(client_id)\n\n if status >= 4:\n data_obj['isQueryActive'] = False\n else:\n data_obj['isQueryActive'] = True\n\n data_obj['queries'] = queries_list\n qpending = get_queries_pending_to_solve(specialist=specialist_id,\n client=client_id)\n query_current = {\n \"status\": status,\n \"title\": data_obj['title'],\n \"date\": str(data_obj['date']),\n \"message\": data_obj['message'],\n \"id\": data_obj['id'],\n \"specialist_id\": data_obj['specialist']\n }\n data_obj['queryCurrent'] = query_current\n del data_obj['specialist']\n del data_obj['message']\n del data_obj['title']\n del data_obj['date']\n del data_obj['id']\n # import pdb; pdb.set_trace()\n db.child(\"messagesList/specialist/\").child(\n node_specialist).child(node_client).set(data_obj)\n\n db.child(\"messagesList/specialist/\").child(\n node_specialist).child(node_client).update(\n {\"pending_queries_to_solve\": qpending})", "def set_documents_names(cls, input_list_names: List[str]) -> None:\n cls.documents_names = input_list_names", "def update_recipients_list(recipient_list, id_data):\n\n email_sock = s.socket(s.AF_INET, s.SOCK_DGRAM)\n email_sock.settimeout(1)\n request = json.dumps({'type': 'retrieve', 'device_id': id_data})\n email_address = \"\"\n try:\n email_sock.sendto(bytes(request, 'utf-8'), (c.DEVICE_MANAGER, 3210))\n reply, _ = email_sock.recvfrom(1024)\n reply_json = json.loads(reply)\n email_address = reply_json['email']\n except (s.gaierror, s.timeout) as e:\n logging.error(f'Failed to retrieve email for {id_data}. E:{str(e)}')\n finally:\n email_sock.close()\n\n if len(email_address) != 0:\n if not recipient_list:\n recipient_list = [email_address]\n else:\n recipient_list.append(email_address)\n\n return recipient_list", "def origin_list(self, origin_list: List[Origin]):\n\n self._origin_list = origin_list", "def set_checklists_status(auth, args):\n global checklists_on\n\n if auth['checklists'] == \"true\":\n checklists_on = True\n else:\n checklists_on = False\n\n # reverse the config setting if specified by the CLI option\n# if args['--checklists']:\n# checklists_on = not checklists_on\n\n return", "def fill_ads_pool(self, ads_list, append=True):\n for ad in ads_list:\n if not isinstance(ad, Ad):\n raise RuntimeError(\"Only Ad objects can be stored with the method fill_ads_pool\")\n if ad.ad_category not in self.categories:\n raise RuntimeError(\"Only News objects with category stored in the Agent categories can be stored\"\n \"with the method fill_news_pool. An News with category \" + ad.ad_category +\n \" is trying to be stored, but only categories \" + str(self.categories) + \" can\"\n \"be handled by the Agent.\")\n if append:\n for ad in ads_list:\n self.ads_pool.append(ad)\n else:\n self.ads_pool = ads_list\n\n for ad in ads_list:\n cat_index = self.categories.index(ad.ad_category)\n if ad.exclude_competitors:\n ex_index = 1\n else:\n ex_index = 0\n self.ads_per_category[cat_index][ex_index].append(ad)", "def send_reminders(self, send_reminders):\n\n self._send_reminders = send_reminders", "def send_list(self, frequencies=[1e9,2e9,3e9,4e9], powers=[-10,-5,-2,0], dwell=0.01, delay=0):\r\n _debug('simq03b_api.send_list')\r\n \r\n # Handle integers or lists for either frequencies or powers\r\n if not _s.fun.is_iterable(frequencies): frequencies = [frequencies]\r\n if not _s.fun.is_iterable(powers): powers = [powers]\r\n \r\n # Handle numpy arrays\r\n if not type(frequencies) == list: frequencies = list(frequencies)\r\n if not type(powers) == list: powers = list(powers)\r\n \r\n # Handle length-1 arrays:\r\n if len(frequencies) == 1: frequencies = frequencies*len(powers)\r\n if len(powers) == 1: powers = powers *len(frequencies)\r\n \r\n # Poop if the lengths don't match\r\n if not len(frequencies) == len(powers): \r\n print(\"ERROR: Lengths must match!\")\r\n return\r\n \r\n #The mode switch to Fixed when we write a power and dwell list. \r\n #So I track the initial mode to put it back at the end. \r\n initial_mode = self.get_mode()\r\n \r\n # Let's choose a list. \r\n #To know the available list, the query is 'SOUR1:LIST:CAT?'\r\n self.write('SOUR:LIST:SEL /VAR/USE') \r\n \r\n #Prepare the strings for the list command\r\n str_freq = 'SOUR:LIST:FREQ ' + str(frequencies[0]) #String for the frequency list command\r\n str_pow = 'SOUR:LIST:POW ' + str(powers[0]) #String for the power list command\r\n str_dwell = 'SOUR:LIST:DWEL '+str(dwell) #String for the dwell list command\r\n for i in range(1,len(frequencies)):\r\n str_freq += ', ' + str(frequencies[i])\r\n str_pow += ', ' + str(powers[i])\r\n \r\n # For debugging\r\n print(str_freq)\r\n print(str_pow)\r\n print(str_dwell)\r\n \r\n self.write(str_freq)\r\n self.write(str_pow)\r\n self.write(str_dwell)\r\n \r\n # In SMIQ manual, it says:\r\n # Caution: This command has to be given after every creating and changing of a list.\r\n self.write('SOUR:LIST:LEARn')\r\n \r\n #Apparently the SMA change to Fixed mode after the power and the Dwell list is send... \r\n #So I just switch back to the initial mode to make sure we end up in the same state. \r\n self.set_mode(initial_mode)", "def _set_cluster_list(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"cluster-list\", rest_name=\"cluster-list\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"cluster_list must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"cluster-list\", rest_name=\"cluster-list\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__cluster_list = t\n if hasattr(self, '_set'):\n self._set()", "async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]:\n new_contacts = await self._client.set_certificate_contacts(\n vault_base_url=self.vault_url,\n contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]),\n **kwargs\n )\n return [\n CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list\n ]", "def email_ml_subscribers(self, maillist=None, maillist_uid=None):\n if not maillist and not maillist_uid:\n raise ValueError('Maillist or uid required')\n return self._request('email/ml/subscribers', inspect_args_func(inspect.currentframe()), method='get')", "def cloudflare_waf_ip_lists_list_request(self, list_id: str = None, page: int = None,\n page_size: int = None) -> Dict[str, Any]:\n params = remove_empty_elements({\n 'page': page,\n 'per_page': page_size\n })\n\n ip_list = f'/{list_id}' if list_id else ''\n\n return self._http_request(\n method='GET',\n url_suffix=f'accounts/{self.account_id}/rules/lists{ip_list}',\n params=params)", "def requested_object_list(self, requested_object_list):\n\n self._requested_object_list = requested_object_list", "def setCSLabels(self, labels):\n\n if isinstance(labels, list):\n if len(labels) == self._n_csets:\n if all((lbl is None or isinstance(lbl, str))\n for lbl in labels):\n self._cslabels = list(labels)\n else:\n raise ValueError('all items of labels must be strings')\n else:\n raise ValueError('length of labels must be equal to the '\n 'number of coordinate sets')\n else:\n raise TypeError('labels must be a list')", "def set_all(self, host_names, ip_address):\n for host_name in host_names:\n self.set_one(host_name, ip_address)", "def selected_field_list(self, selected_field_list):\n\n self._selected_field_list = selected_field_list", "def setSyncData( self, bSerial, rgsUserList ):\n\n\t\ttry:\n\t\t\toServer = self._dbServerList.getServer( bSerial=bSerial )\n\t\t\tif not oServer.checkHasAuth():\n\t\t\t\traise Exception, 'auth not supported for this serial'\n\n\t\t\tdbgMsg( 'syncing user list with serial-[%s]' % bSerial )\n\n\t\t\tself._deleteUsers( bSerial )\n\n\t\t\tfor rgsUser in rgsUserList:\n\t\t\t\toUser = self._thawUser( rgsUser )\n\t\t\t\tself._addUser( bSerial, oUser )\n\n\t\t\tdbgMsg( 'finished syncing user list with serial-[%s]' % bSerial )\n\n\t\t\treturn True\n\n\t\texcept Exception, e:\n\t\t\terrMsg( 'error while syncing user list with serial-[%s] [%s]' % ( bSerial, e ) )\n\t\t\treturn False", "def set_gnb_search_list(self, gnb_addresses: list) -> None:\n self.config[\"gnbSearchList\"] = gnb_addresses", "def set_targets(self, targets: List[List[float]]):\n assert len(self.data) == len(targets)\n for i in range(len(self.data)):\n self.data[i].set_targets(targets[i])", "def set_color_list(self, new_list):\n self.__clr_list = itertools.cycle(new_list)", "def setBlockedCookies(self, list_):\n if not self.__loaded:\n self.__load()\n \n self.__exceptionsBlock = list_[:]\n self.__exceptionsBlock.sort()\n self.__saveTimer.changeOccurred()", "def setListDoc(self, doc):\n if doc is None: doc__o = None\n else: doc__o = doc._o\n libxml2mod.xmlSetListDoc(self._o, doc__o)", "def set_servers(self, servers):\n if isinstance(servers, six.string_types):\n servers = [servers]\n\n assert servers, \"No memcached servers supplied\"\n self._servers = [Protocol(\n server=server,\n username=self.username,\n password=self.password,\n compression=self.compression,\n socket_timeout=self.socket_timeout,\n pickle_protocol=self.pickle_protocol,\n pickler=self.pickler,\n unpickler=self.unpickler,\n tls_context=self.tls_context,\n ) for server in servers]", "def _set_neighs_list_list_list(self, key):\n self.ks = list(range(len(key))) if self.ks is None else self.ks\n if self._constant_neighs:\n self.idxs = np.array(key)\n else:\n self.idxs = key\n if len(self.idxs[0]) != len(self.iss):\n self.iss = list(range(len(self.idxs[0])))\n if self.staticneighs:\n self.idxs = self.idxs[0]\n self._setted = True" ]
[ "0.5641915", "0.55063945", "0.52784514", "0.5267238", "0.51484704", "0.5096262", "0.48523757", "0.48513708", "0.48345873", "0.48015624", "0.47823006", "0.47243136", "0.47188637", "0.47175246", "0.4708469", "0.47014678", "0.46647304", "0.466445", "0.46205124", "0.45986927", "0.45935085", "0.4573834", "0.45719153", "0.45717672", "0.455192", "0.45004776", "0.44919387", "0.44919387", "0.44664395", "0.4448866", "0.44473332", "0.4428426", "0.44201046", "0.44201046", "0.44155473", "0.43859795", "0.43855578", "0.4372903", "0.4368643", "0.436406", "0.4356721", "0.43398795", "0.43344036", "0.43164104", "0.43059745", "0.42992765", "0.42981118", "0.42816058", "0.428092", "0.4279513", "0.42657226", "0.42647147", "0.42618057", "0.42581975", "0.42503527", "0.42427388", "0.42269108", "0.42269108", "0.42240494", "0.4211851", "0.420612", "0.41992122", "0.41878766", "0.41668662", "0.41556183", "0.4155268", "0.41419953", "0.41407818", "0.41394356", "0.41358578", "0.41262758", "0.41229546", "0.41220844", "0.41171154", "0.41170382", "0.41126", "0.4112068", "0.41021562", "0.4095897", "0.4093921", "0.409206", "0.4085907", "0.40773618", "0.40769047", "0.4076609", "0.40727633", "0.40724432", "0.4071307", "0.40678778", "0.40676087", "0.40659407", "0.40652975", "0.40604064", "0.40588072", "0.40585646", "0.40508288", "0.40384403", "0.40384328", "0.40383902", "0.40383658" ]
0.75755775
0
Gets the always_running of this MessagingCampaign. Whether this messaging campaign is always running
def always_running(self): return self._always_running
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def IsRunning(self):\n return self.running", "def running(self):\n return self.scheduler.running", "def is_running(self):\n return self._running", "def is_running(self):\n return self._running", "def is_running(self):\n return self._running.is_set()", "def running(self):\n\n return self._running", "def is_running(self):\n return self._is_running", "def is_running(self):\n return self._is_running", "def is_running(self):\n return self.running", "def is_running(self):\n return self.running", "def is_running(self):\n return self.running", "def is_running(self) -> bool:\n return self._running.is_set()", "def isScheduleRunning(self):\n if DPxIsDinSchedRunning() == 0:\n schedule_running = False\n else:\n schedule_running = True\n return schedule_running", "def is_running(self):\n\t\treturn self._running", "def always_running(self, always_running):\n \n self._always_running = always_running", "def Running(self):\n return self.Timer.IsRunning()", "def running(self):\n info = self.info()\n return info['running']", "def running(self): # type: () -> bool\n return self.state['Running']", "def is_running(self):\n data = self._poll()\n return data.get('building', False)", "def is_running(self):\n return self.type_id == STATE_RUNNING", "def is_running(self) -> bool:\n return self._is_running", "async def is_running(self, **kwargs: Any) -> bool:\n return self._enabled", "def running(self):\n return self._state == RUNNING_STATE", "def running(self) -> bool:\n return self._running", "def _is_running(self):\n return self._run_state.is_running()", "def is_running(self) -> Awaitable[bool]:\n return self.instance.is_running()", "def is_running(self) -> bool:\r\n return self.__running", "def is_running(self):\n return self.current_state == self.States.RUNNING", "def is_running(self):\n self.__condition.acquire()\n result = self.__is_running\n self.__condition.release()\n return result", "def is_running(self):\n\n return self._state == \"RUNNING\"", "def running(self):\n return self._lifetime_state in {\"starting\",\"running\",\"finishing\"}", "def _is_running(self):\n # Public interface is given by get_status instead.\n self._update()\n return True if self.running_mode else False", "def is_running(self):\n return self._task.running()", "def is_running(self):\n return self._task.running()", "def is_started(self):\n return self._started", "def is_running(self):\n # type: () -> bool\n return self._run_state.is_running()", "def running(self):\n\t\treturn self._start is not None", "def running(self):\n return bool(self.proc and self._running())", "def is_running(self):\n\t\treturn self in _running", "def running(self):\n return self.sub_process and self.sub_process.is_alive()", "def running(self) -> float:\n return self._running", "def is_chiller_running(self) -> bool:\n\n return self.send(self.cmd.GET_COOLING_RUNNING)", "def is_started(self):\n return bool(self._processes)", "def running(self):\n with self._done_condition:\n return self._state == RUNNING", "def Enabled(self):\n return self._get_attribute('enabled')", "def is_running(self):\n return all(p.status == 'running' for p in self.values())", "def is_always_active(self) -> bool:\n if len(self.active_periods) == 0:\n return True\n\n if len(self.active_periods) == 1:\n period = self.active_periods[0]\n if period.lower == 0 and period.upper == 24000:\n return True\n\n return False", "def is_game_started(self):\r\n\t\treturn self._is_game_started", "def running(self):\n if self.done() and self._is_running:\n self._is_running = False\n return self._is_running", "def is_started(self):\n self.get_state()\n return self._is_started()", "def started(self):\n return self._started", "def _isrunning(self):\n return self.dp.state()==PyTango.DevState.RUNNING", "def is_running(self):\n # do we have a job ID to work with?\n if self.jobid == None:\n return False\n else:\n q_status = self.queue.get_status(self.jobid)\n\n if q_status == self.queue.state[\"active\"]:\n self.meta[\"status\"] = 'PENDING'\n return True\n else:\n return False", "def is_running(self):\n return self.action_thread and self.action_thread.is_alive()", "def is_container_running(self):\n return self._is_container_running", "def IsRunning(self):\n current_urn = self.Get(self.Schema.CURRENT_FLOW_URN)\n if current_urn:\n current_flow = aff4.FACTORY.Open(urn=current_urn,\n token=self.token, mode=\"r\")\n runner = current_flow.GetRunner()\n return runner.context.state == rdfvalue.Flow.State.RUNNING\n return False", "def running(self):\n return not self._kill_event.is_set()", "def started(self):\n return self._started.get()", "def enabled(self):\n return self._get('enabled')", "def running(self):\n return self.input_processing_running", "def running(self):\n return self.input_processing_running", "def is_active(self):\n return self._is_active", "def is_active(self):\n return self._is_active", "def is_active(self):\n return self._is_active", "def has_started(self) -> bool:\n return self._started", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_on(self):\n return self._is_on", "def is_running(self):\n return self._event_loop is not None and self._event_loop.is_running()", "def enabled(self):\n\n return self._enabled", "def is_running(self):\n return self.style['position'][0] == 'running()'", "def is_on(self):\n return self.state == WORKING_STATE", "def is_on(self):\n return self.state == WORKING_STATE", "def is_on(self):\n return self.state == WORKING_STATE", "def is_reminder_on(self):\n return self.__is_reminder_on", "def is_active(self):\n return self._active", "async def is_running(self, **kwargs: Any) -> bool:\n return True", "def isActive(self):\n return self.data.active", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def enabled(self):\n return self._enabled", "def _is_started(self):\n self.started = self.state == 1\n return self.started", "def active(self):\n return self.starting == 0 and self.stopped == 0", "def is_running(self):\n if self._thread and self._thread.is_alive:\n return True\n\n return False", "def isActive(self):\n return self._timerID is not None", "def is_running(self) -> bool:\n return False", "def active(self) -> bool:\n return pulumi.get(self, \"active\")" ]
[ "0.684747", "0.68215805", "0.6736479", "0.6736479", "0.67310226", "0.67029005", "0.6684114", "0.6684114", "0.6658457", "0.6658457", "0.6658457", "0.6624625", "0.6622321", "0.6609278", "0.6596", "0.6578254", "0.65689", "0.65341485", "0.6479026", "0.6433435", "0.6432685", "0.6391873", "0.63847125", "0.63835776", "0.63794404", "0.6378868", "0.637607", "0.63648105", "0.63579166", "0.6352973", "0.6340525", "0.6330799", "0.63215274", "0.63215274", "0.63129836", "0.62704176", "0.62557244", "0.6243694", "0.6165981", "0.6106241", "0.60784847", "0.6044979", "0.6040178", "0.60159457", "0.5995344", "0.59924376", "0.59808916", "0.5978786", "0.59742856", "0.59702384", "0.59609634", "0.59421283", "0.594202", "0.59412324", "0.59271747", "0.59201264", "0.59173316", "0.5904309", "0.5888048", "0.58874726", "0.58874726", "0.58623695", "0.58623695", "0.58623695", "0.5856148", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.5852767", "0.58501285", "0.5849389", "0.58373845", "0.58348095", "0.58348095", "0.58348095", "0.5834559", "0.582827", "0.5807588", "0.58058727", "0.58021283", "0.58021283", "0.58021283", "0.58021283", "0.58021283", "0.58021283", "0.58021283", "0.58021283", "0.5801343", "0.57689977", "0.57669944", "0.57504636", "0.5743137", "0.57395804" ]
0.8078336
0
Sets the always_running of this MessagingCampaign. Whether this messaging campaign is always running
def always_running(self, always_running): self._always_running = always_running
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def always_running(self):\n return self._always_running", "def set_as_running(self):\n with self._running_condition:\n assert self._state == PENDING_STATE\n self._state = RUNNING_STATE\n self._running_condition.notify()", "def set_running(self):\n with self._done_condition:\n if self._state == PENDING:\n self._state = RUNNING\n return True\n return False", "def running(self, running):\n\n self._running = running", "def running(self, running):\n\n self._running = running", "def running(self, running):\n\n self._running = running", "def is_always(self, is_always):\n\n self._is_always = is_always", "def isScheduleRunning(self):\n if DPxIsDinSchedRunning() == 0:\n schedule_running = False\n else:\n schedule_running = True\n return schedule_running", "def is_running(self) -> bool:\n return self._running.is_set()", "async def is_running(self, **kwargs: Any) -> bool:\n return self._enabled", "def mark_running(self):\r\n self.status = RUNNING", "def is_running(self):\n return self._running.is_set()", "def is_running(self):\n return self._is_running", "def is_running(self):\n return self._is_running", "def is_running(self):\n return self.type_id == STATE_RUNNING", "def is_running(self) -> bool:\n return self._is_running", "def is_running(self):\n\n return self._state == \"RUNNING\"", "def is_running(self):\n return self.running", "def is_running(self):\n return self.running", "def is_running(self):\n return self.running", "def is_running(self):\n return self._running", "def is_running(self):\n return self._running", "def IsRunning(self):\n return self.running", "def is_running(self) -> bool:\r\n return self.__running", "def is_running(self):\n return self.current_state == self.States.RUNNING", "def is_running(self):\n\t\treturn self._running", "def set_running_behavior(self, behavior: Behavior) -> None:", "def running(self) -> bool:\n return self._running", "def is_always_active(self) -> bool:\n if len(self.active_periods) == 0:\n return True\n\n if len(self.active_periods) == 1:\n period = self.active_periods[0]\n if period.lower == 0 and period.upper == 24000:\n return True\n\n return False", "def whenRunning(self):\n whenRunning = self.options.get(RunnerOptions.whenRunning)\n if whenRunning is not None:\n whenRunning(self.options)", "def mark_running(self):\n LOGGER.debug('Marking current_state as: %s', self.States.RUNNING)\n self.current_state = self.States.RUNNING", "def running(self): # type: () -> bool\n return self.state['Running']", "def running(self):\n\n return self._running", "def running(self):\n return self._state == RUNNING_STATE", "def _is_running(self):\n # Public interface is given by get_status instead.\n self._update()\n return True if self.running_mode else False", "def _isrunning(self):\n return self.dp.state()==PyTango.DevState.RUNNING", "def running(self):\n return self._lifetime_state in {\"starting\",\"running\",\"finishing\"}", "def running(self, running: float):\n if running is None:\n raise ValueError(\"Invalid value for `running`, must not be `None`\") # noqa: E501\n \n self._running = running", "def is_running(self) -> bool:\n return False", "def running(self):\n return self.scheduler.running", "def mark_started(self):\n self.started = datetime.now()\n self.save()", "def is_running(self):\n # do we have a job ID to work with?\n if self.jobid == None:\n return False\n else:\n q_status = self.queue.get_status(self.jobid)\n\n if q_status == self.queue.state[\"active\"]:\n self.meta[\"status\"] = 'PENDING'\n return True\n else:\n return False", "def _is_running(self):\n return self._run_state.is_running()", "def always_send(self):\n\n return self._always_send", "async def is_running(self, **kwargs: Any) -> bool:\n return True", "def is_running(self):\n # type: () -> bool\n return self._run_state.is_running()", "def is_running(self):\n return self.style['position'][0] == 'running()'", "def is_running(self):\n return all(p.status == 'running' for p in self.values())", "async def set_playing(self, value: bool):\n await self._pytheos.api.player.set_play_state(self.id, models.player.PlayState.Playing if value else models.player.PlayState.Stopped)", "async def async_turn_on_when_active(self, **kwargs: Any) -> None:\n await self._data.controller.programs.start(self.entity_description.uid)\n self._update_activities()", "def is_started(self, is_started):\n\n self._is_started = is_started", "def is_container_running(self, is_container_running):\n\n self._is_container_running = is_container_running", "def is_running(self):\n\t\treturn self in _running", "def turn_on(self, **kwargs):\n if not self.is_on:\n _LOGGER.debug(\"Sending START command to: %s\", self._name)\n self._api.control('START')\n self._mower_status = STATUS_EXECUTING_START\n self.schedule_update_ha_state()", "def running(self):\n\t\treturn self._start is not None", "def is_started(self):\n return self._started", "def set_job_started(self, job_id):\n try:\n self._session.query(JobEntity).\\\n filter(JobEntity.id == job_id).\\\n update({'started': datetime.datetime.now()})\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def running(self) -> float:\n return self._running", "def messaging(self, value: bool):\n if type(value) is not bool:\n raise TypeError(\"Value must be of type 'bool' ('{}' given)\".format(type(value)))\n\n self._messaging = value", "def is_forced_run(self):\n try:\n v = environment.get(\"Run\")\n return v.lower() == \"force\"\n except KeyError:\n return False", "def is_running(self) -> Awaitable[bool]:\n return self.instance.is_running()", "async def async_turn_on_when_active(self, **kwargs: Any) -> None:\n # 1. Use duration parameter if provided from service call\n duration = kwargs.get(CONF_DURATION)\n if not duration:\n if (\n self._entry.options[CONF_USE_APP_RUN_TIMES]\n and ATTR_ZONE_RUN_TIME in self._attr_extra_state_attributes\n ):\n # 2. Use app's zone-specific default, if enabled and available\n duration = self._attr_extra_state_attributes[ATTR_ZONE_RUN_TIME]\n else:\n # 3. Fall back to global zone default duration\n duration = self._entry.options[CONF_DEFAULT_ZONE_RUN_TIME]\n await self._data.controller.zones.start(\n self.entity_description.uid,\n duration,\n )\n self._update_activities()", "def turn_on(self, **kwargs):\n self._is_on = True", "def is_running(self):\n data = self._poll()\n return data.get('building', False)", "async def autoplay(self, ctx: commands.Context) -> Optional[bool]:\n\n self.queue[ctx.guild.id].autoplay = not self.queue[ctx.guild.id].autoplay\n return self.queue[ctx.guild.id].autoplay", "def Running(self):\n return self.Timer.IsRunning()", "def set_status_running(self) -> None:\n if self._is_aborted():\n return\n assert self._status == self.Status.WAITING_FOR_TEST_START\n self._status = self.Status.RUNNING\n self.notify_update()", "def __arm(self):\n self._running = True", "def mark_datarun_running(self, datarun_id):\n datarun = self.get_datarun(datarun_id)\n if datarun.status == RunStatus.PENDING:\n datarun.status = RunStatus.RUNNING\n datarun.start_time = datetime.now()", "def isSetPersistent(self):\n return _libsbml.Trigger_isSetPersistent(self)", "def _is_started(self):\n self.started = self.state == 1\n return self.started", "def running(self):\n info = self.info()\n return info['running']", "def is_reminder_on(self):\n return self.__is_reminder_on", "def is_on(self) -> bool:\n return self._is_on", "def turn_on(self) -> None:\n\n chromecast = self._get_chromecast()\n if not chromecast.is_idle:\n # Already turned on\n return\n\n if chromecast.app_id is not None:\n # Quit the previous app before starting splash screen or media player\n chromecast.quit_app()\n\n # The only way we can turn the Chromecast is on is by launching an app\n if chromecast.cast_type == pychromecast.const.CAST_TYPE_CHROMECAST:\n app_data = {\"media_id\": CAST_SPLASH, \"media_type\": \"image/png\"}\n quick_play(chromecast, \"default_media_receiver\", app_data)\n else:\n chromecast.start_app(pychromecast.config.APP_MEDIA_RECEIVER)", "def running(self):\n if self.done() and self._is_running:\n self._is_running = False\n return self._is_running", "def is_running_manager(self) -> bool:\n return self.get_value(self._manager_running_attribute) == '1'", "def autoplay(self):\n # type: () -> bool\n return self._autoplay", "def start_running_manager(self) -> None:\n self.add_value(self._manager_running_attribute, '1')", "def is_running(self):\n self.__condition.acquire()\n result = self.__is_running\n self.__condition.release()\n return result", "def is_active(self, is_active):\n if self.local_vars_configuration.client_side_validation and is_active is None: # noqa: E501\n raise ValueError(\"Invalid value for `is_active`, must not be `None`\") # noqa: E501\n\n self._is_active = is_active", "def running(self):\n return bool(self.proc and self._running())", "def isStarting(self ):\n if self._starting :\n self._starting = False\n return True\n return False", "def start(self):\n return StateRunning(created_at=self.created_at)", "def get_running_campaign(self):\n kwargs = {}\n kwargs['status'] = 1\n tday = datetime.utcnow().replace(tzinfo=utc)\n kwargs['startingdate__lte'] = datetime(tday.year, tday.month, tday.day,\n tday.hour, tday.minute, tday.second, tday.microsecond).replace(tzinfo=utc)\n kwargs['expirationdate__gte'] = datetime(tday.year, tday.month, tday.day,\n tday.hour, tday.minute, tday.second, tday.microsecond).replace(tzinfo=utc)\n\n s_time = \"%s:%s:%s\" % (\n str(tday.hour), str(tday.minute), str(tday.second))\n kwargs['daily_start_time__lte'] = datetime.strptime(s_time, '%H:%M:%S')\n kwargs['daily_stop_time__gte'] = datetime.strptime(s_time, '%H:%M:%S')\n\n # weekday status 1 - YES\n # self.model._meta.get_field(tday.strftime(\"%A\").lower()).value()\n kwargs[tday.strftime(\"%A\").lower()] = 1\n\n return Campaign.objects.filter(**kwargs)", "def auto_scaling_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"auto_scaling_enabled\")", "def autoprovisioned(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"autoprovisioned\")", "def active(self):\n return self.starting == 0 and self.stopped == 0", "def set_automatic(self, mode):\n self.slam.controlled = not mode\n if mode:\n self.slam.resume()", "def get_prog_runatstart(self):\n #en = self._get_prop(\"runAtStartup\")\n #return bool( en == \"true\" )\n return bool(self._mydict['runAtStartup'] == \"true\")", "def set_is_org_active(self, is_org_active):\n self.is_org_active = is_org_active", "def handleNewIsRunning(self, isRunning):\n assert self.notify.debugStateCall(self)\n if isRunning:\n self.startIfNeeded()\n else:\n self.request('Off')", "def is_on(self):\n return self.state == WORKING_STATE", "def is_on(self):\n return self.state == WORKING_STATE", "def is_on(self):\n return self.state == WORKING_STATE", "def is_running(self):\n return self._task.running()", "def is_running(self):\n return self._task.running()", "def is_active(self, is_active):\n \n self._is_active = is_active", "def force_run_ingestion(self):\n # Set this value when you want to run the ingestion forcefully (ignores runtime).\n return os.environ.get('SNYK_INGESTION_FORCE_RUN', 'false').lower() in ('1', 'yes', 'true')", "def set_auto_start_import(self, flag):\n\t\tself.checkAutoStartImport.set_active(flag)" ]
[ "0.70217943", "0.6201809", "0.61069536", "0.5851402", "0.5851402", "0.5851402", "0.568576", "0.5624877", "0.5585618", "0.5526143", "0.5486297", "0.54765916", "0.5464183", "0.5464183", "0.5460435", "0.5454445", "0.5423037", "0.5418796", "0.5418796", "0.5418796", "0.53875816", "0.53875816", "0.53815275", "0.53335404", "0.5295837", "0.5268198", "0.52670527", "0.52579874", "0.525458", "0.5237627", "0.52345955", "0.5217852", "0.520496", "0.5188681", "0.5175929", "0.5120197", "0.5109754", "0.510729", "0.50889903", "0.50837934", "0.50744134", "0.50683177", "0.50610334", "0.50422966", "0.50382066", "0.5021832", "0.50118357", "0.4982654", "0.49768403", "0.49745166", "0.49744755", "0.49591273", "0.4957428", "0.49535522", "0.49501774", "0.4940855", "0.49377084", "0.4928402", "0.49281093", "0.49209327", "0.4894559", "0.4893366", "0.4880842", "0.48743552", "0.48727125", "0.4871007", "0.48708022", "0.48631325", "0.48618892", "0.4844362", "0.48439708", "0.48361427", "0.48302102", "0.48164034", "0.48115477", "0.48113203", "0.481026", "0.4803012", "0.4785486", "0.47734803", "0.4766373", "0.47657424", "0.4760868", "0.47542202", "0.47452262", "0.47435936", "0.47343794", "0.47338206", "0.47336748", "0.4719461", "0.47138846", "0.47125632", "0.4710111", "0.4710111", "0.4710111", "0.47097692", "0.47097692", "0.47062963", "0.47059888", "0.47022083" ]
0.8169366
0
Gets the contact_sorts of this MessagingCampaign. The order in which to sort contacts for dialing, based on up to four columns.
def contact_sorts(self): return self._contact_sorts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getSorted(self):\n return sorted(self.contacts)", "def contact_sorts(self, contact_sorts):\n \n self._contact_sorts = contact_sorts", "def get_sort_columns(self):\n col_sort_orders = self.gridpreference_sort.all().values_list('column__id', flat=True)\n return GridColumn.objects.select_related().all().filter(id__in=col_sort_orders)", "def sort_list(self, key_):\n options = {\n 'index': 0,\n 'name' : 1,\n 'surname': 2,\n 'email': 3,\n 'phone': 4,\n }\n if key_ in options.keys():\n key_ = options.get(key_)\n\n return(sorted(self.contacts, key = lambda x: x[key_]))", "def get_contacts(self):\n contacts = Membership.objects.filter(entity = self, key_contact = True).order_by('importance_to_entity')\n return contacts", "def get_sort_columns_raw(self):\n col_sort_orders = self.gridpreference_sort.all().select_related()\n \n return [x.sort_display for x in col_sort_orders]", "def sort_contacts(contacts):\n \n key_list = list(contacts.keys()) #get keys\n key_list.sort() #sort key_list\n sorted_list = [] #initialize sorted list\n for key in key_list:\n contact = (key, contacts[key][0], contacts[key][1]) #create tuple\n sorted_list += [contact] #add tuple to list\n \n return(sorted_list)", "def get_sorted_fields(cls):\n return sorted(\n cls.get_fields(), key=lambda x: (x._primary and 1 or 2, x._order))", "def get_sorted_activities(self):\n return helpers.get_sorted_activities(self)", "def sort_contacts(self, method, order):\n \n method_l = method.lower()\n order_l = order.lower()\n \n if method_l == 'name' and order_l == 'asc':\n name_sort = sorted(self.contacts, key=lambda x: x[0])\n for x in name_sort:\n print(x)\n return name_sort\n elif method_l == 'name' and order_l == 'desc':\n name_sort = sorted(self.contacts, key=lambda x: x[0], reverse=True)\n for x in name_sort:\n print(x)\n return name_sort \n \n elif method_l == 'zipcode' and order_l == 'asc':\n zip_sort = sorted(self.contacts, key=lambda y: y[3])\n for x in zip_sort:\n print(x)\n return zip_sort\n elif method_l == 'zipcode' and order_l == 'desc':\n zip_sort = sorted(self.contacts, key=lambda y: y[3],reverse=True)\n for x in zip_sort:\n print(x)\n return zip_sort", "def get_all_contacts(self):\n self.init_db(self._testing)\n\n query = \"SELECT {} FROM {} ORDER BY id;\".format(\", \".join(Contact.columns_with_uid), Contact.table_name)\n\n data = self.db.conn.execute(query)\n\n return [Contact(*item) for item in data]", "def get_contacts_list(self):\n return [(id + 1, contact) for id, contact in enumerate(self.contact_list)]", "def get_contacts(self):\n\n\t\treturn self.__contacts", "def contact_list(self):\n return self._contact_list", "def cols_sorted(self, cmp=None, key=None, reverse=False):\n return self.select(*sorted(self.names, cmp, key, reverse))", "def _get_order_bys(self, record_class, sorts, convert_key_names_func):\n result = list()\n for sort in sorts:\n attr_name = convert_key_names_func(sort.attr)\n if attr_name is not None and hasattr(record_class, attr_name):\n if sort.direction == \"ASC\":\n result.append(getattr(record_class, attr_name).asc())\n else:\n result.append(getattr(record_class, attr_name).desc())\n else:\n raise AttributeError(\"Invalid attribute.\")\n return result", "def _sort(self, groups):\n return sorted(groups, key=lambda group: (group.name.lower(), group.pubid))", "def get_order_columns(self):\n return self.order_columns", "def GetContactList(self):\n\t\tfeeds = []\n\t\tfeed = self.client.GetContacts()\n\t\tfeeds.append(feed)\n\t\tnext = feed.GetNextLink()\n\t\twhile next:\n\t\t\tfeed = self.client.GetContacts(uri=next.href)\n\t\t\tfeeds.append(feed)\n\t\t\tnext = feed.GetNextLink()\n\t\t\n\t\tcontacts = []\n\t\tfor feed in feeds:\n\t\t\tif not feed.entry:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tfor i, entry in enumerate(feed.entry):\n\t\t\t\t\tcontacts.append(entry)\n\t\treturn contacts", "def sort(self):\n # Sort here actually uses the tuple comparison we defined in the Card class\n self.cards.sort()", "def sort_orders(self) -> Dict[int, SortOrder]:\n return {sort_order.order_id: sort_order for sort_order in self.metadata.sort_orders}", "def list_contacts(self):\n return self.contacts", "def get_all_comments_ascending(self):\n try:\n return self.comments.order_by('commented_date')\n except(ValueError, IntegrityError, OperationalError):\n return []", "def get_sort_by(self):\n\n\t\treturn self.__sort_by", "def _sort_data(self, sort_data_by='position'):\n all_mutants = iter(self)\n if sort_data_by=='position':\n sorted_data = sorted(all_mutants, key = lambda m: (m.position, m.IB))\n # x.position here is an Insertion_position object and has a sensible cmp function\n # TODO do unaligned/multi-aligned/unknown positions sort sensibly here?\n elif sort_data_by=='read_count':\n if self.multi_dataset: \n raise MutantError(\"Sorting by readcount in print_data not implemented for multi-datasets!\")\n sorted_data = sorted(all_mutants, key = lambda m: (m.total_read_count, m.perfect_read_count, m.position, m.IB), \n reverse=True)\n else:\n raise MutantError(\"Can't sort mutants by %s - only position or readcount are implemented!\"%sort_data_by)\n return sorted_data", "def sort_cards(self):\n self.cards.sort(key=operator.attrgetter('persona', 'rank'))\n self.update_position()", "def sortedFields(cls):\n return [\n i[0] for i in sorted(cls._nameToValue.items(), key=lambda item: item[1])\n ]", "def sort(self):\n self.cards.sort()", "def sort(self):\n self.cards.sort()", "def find_top_five_most_contacted_listings_per_month(self):\n available_dates = self.contacts.quer_distinct_components(\"contact_date\", \"Contacts\")\n available_listing_ids = self.contacts.quer_distinct_components(\"listing_id\", \"Contacts\")\n \n self.ordered = {}\n for date in available_dates:\n listing = []\n month = self.contacts.quer_component_using_column(\"listing_id\", \"contact_date\", \"Contacts\", date)\n new = [month[0] for month in month]\n for listing_id in available_listing_ids:\n count = new.count(listing_id)\n listing.append([count, listing_id])\n listing = sorted(listing, key=lambda x: x[0], reverse=True)\n\n self.ordered[date] = listing[:5]", "def get_ordered_resources(self):\n \n return self.resources.visible().order_by('members__ordering')", "def getSorted(self,column,reverse):\n data = self.data\n items = data.keys()\n if column == 'Package':\n items.sort(reverse=reverse)\n elif column == 'Files':\n items.sort(key=lambda x: len(data[x].fileSizeCrcs),reverse=reverse)\n else:\n items.sort()\n attr = column.lower()\n if column in ('Package','Group'):\n getter = lambda x: object.__getattribute__(data[x],attr).lower()\n items.sort(key=getter,reverse=reverse)\n else:\n getter = lambda x: object.__getattribute__(data[x],attr)\n items.sort(key=getter,reverse=reverse)\n #--Special sorters\n if settings['bash.installers.sortStructure']:\n items.sort(key=lambda x: data[x].type)\n if settings['bash.installers.sortActive']:\n items.sort(key=lambda x: not data[x].isActive)\n if settings['bash.installers.sortProjects']:\n items.sort(key=lambda x: not isinstance(data[x],InstallerProject))\n return items", "def ordering(self, qs):\n request = self.request\n # Number of columns that are used in sorting\n try:\n i_sorting_cols = int(request.REQUEST.get('iSortingCols', 0))\n except ValueError:\n i_sorting_cols = 0\n\n order = []\n order_columns = self.get_order_columns()\n for i in range(i_sorting_cols):\n # sorting column\n try:\n i_sort_col = int(request.REQUEST.get('iSortCol_%s' % i))\n except ValueError:\n i_sort_col = 0\n # sorting order\n s_sort_dir = request.REQUEST.get('sSortDir_%s' % i)\n\n sdir = '-' if s_sort_dir == 'desc' else ''\n\n sortcol = order_columns[i_sort_col]\n if isinstance(sortcol, list):\n for sc in sortcol:\n order.append('%s%s' % (sdir, sc))\n else:\n order.append('%s%s' % (sdir, sortcol))\n if order:\n return qs.order_by(*order)\n return qs", "def get_fields(self):\n \n fields = []\n for order in self.order_lst:\n fields += order.get_fields()\n \n fields = list(set(fields))\n \n out_fields = self.eod.sort_fields(fields)\n \n return out_fields", "def get_queryset(self):\n contact_data = Contact.objects.filter(contact_groups__in=Member.objects.filter(\n user=self.request.user).values('group_id').distinct())\n\n return contact_data", "def get_contacts(self, count=-1, excluded_guid=None):\n current_len = len(self._contacts)\n if current_len == 0 or count == 0:\n return []\n\n if count < 0:\n count = current_len\n else:\n count = min(count, current_len)\n\n if excluded_guid is None:\n # Get the last `count` contacts.\n contact_list = self._contacts[-count:]\n else:\n contact_list = []\n for contact in reversed(self._contacts):\n if contact.guid == excluded_guid:\n continue\n contact_list.append(contact)\n if len(contact_list) >= count:\n break\n return contact_list", "def sort_keys_generate(self, mail):\n\t\t# Reset sort keys for every mail:\n\t\tself.sort_keys = []\n\t\tfor key, form in self.selectors:\n\t\t\t# Sort by filter matches only (1:1):\n\t\t\tif key in self.filter_matches.keys():\n\t\t\t\tself.sort_keys_add(key, form, self.filter_matches[key])\n\t\t\t# Sort by all header parts (1:N):\n\t\t\telse:\n\t\t\t\tself.sort_keys_add(key, form, header_values(key, mail))\n\t\treturn len(self.sort_keys)", "def sort(self):\n sorted_entries = [] # type: list[MSBModel]\n for entry_subtype in MSBModelSubtype:\n sorted_entries += list(sorted(self.get_entries(entry_subtype), key=lambda m: m.name))\n self._entries = sorted_entries", "def get_contacts(self, uuids=None, urns=None, groups=None, before=None, after=None, deleted=None, pager=None):\n params = self._build_params(uuid=uuids, urns=urns, group_uuids=groups, before=before, after=after,\n deleted=deleted)\n return Contact.deserialize_list(self._get_multiple('contacts', params, pager))", "def _sort_cards(self):\n if not self.has_to_sort or not self.sort_by:\n return\n lst = sorted(self.cards,\n key=attrgetter(self.sort_by),\n reverse=self.cmp_reverse)\n if lst != self.cards:\n self._clear_cards()\n self.cards = lst\n for card in self.cards[\n (self.current_page - 1) * self.max_cards:\n self.current_page * self.max_cards]:\n if not self._add_card(card):\n return", "def sorted(self):\n sorted_list = [x for x in self.events.iteritems()]\n sorted_list.sort(key=lambda x: x[1])\n return sorted_list", "def sorted_events(self):\n return sorted(self._events, key=Event.report_sort_key)", "def sort(self):\n self.fragment_list.sort()", "def standard_sorting(cls, zmat):\n if zmat is None:\n return None\n nats = len(zmat)\n ncoords = 3*nats - 6\n if nats < 4:\n return None\n else:\n r_coords = [0, 1, 3]\n a_coords = [2, 4]\n t_coords = [5]\n if nats > 4:\n extra = np.arange(6, ncoords+1)\n r_coords += extra[::4].tolist()\n a_coords += extra[1::4].tolist()\n t_coords += extra[2::4].tolist()\n return np.argsort(np.concatenate([r_coords, a_coords, t_coords]))", "def dmc_order(self):\n return sorted(self.lookup_table, key=lambda clr: int(clr.id) if clr.id.isdigit() else 0)", "def get_all_sorted(self):\n self.sort_and_reduce()\n return self.data", "def sort(self):\n self.chain_list.sort()\n for chain in self.chain_list:\n chain.sort()", "def get_vendors_grid_column_names_by_order(self):\n self.column_name_list = self.get_grid_column_names_by_order(self.vendors_div_id)\n return self.column_name_list", "def sortby(self):\n return self._sortby", "def get_sort_order(self):\n\n\t\treturn self.__sort_order", "def get_sorted_students(self):\n results = self.__create_student_and_grade_dto()\n results.sort(self.__compare_dtos_on_grade)\n return results", "def sort_results(self):\n pass", "def get_contacts(self, uuid=None, urn=None, group=None, deleted=None, before=None, after=None):\n params = self._build_params(uuid=uuid, urn=urn, group=group, deleted=deleted, before=before, after=after)\n return self._get_query('contacts', params, Contact)", "def _sort_columns(self, order):\n unknown = set(self._columns) - set(order)\n if unknown:\n names = \", \".join(str(name) for name in unknown)\n raise ValueError(f\"Unknown columns: {names}\")\n\n cols = [self.column_location(column) for column in order]\n\n self._columns = [self._columns[col] for col in cols]\n self._data = [[row[col] for col in cols] for row in self._data]", "def get_group_list(self):\n return [(item[0], item[1][0]) for item in self.contacts_by_group_list]", "def get_country_groups_grid_column_names_by_order(self):\n self.column_name_list = self.get_grid_column_names_by_order(self.country_groups_grid_div_id)\n return self.column_name_list", "def ordering(self):\r\n if hasattr(self, \"queryset\"):\r\n aliases = {}\r\n for bound_column in self.table.columns:\r\n aliases[bound_column.order_by_alias] = bound_column.order_by\r\n try:\r\n return next(segment(self.queryset.query.order_by, aliases))\r\n except StopIteration:\r\n pass", "def sort(self):\r\n\t\treturn sorted(self.sample)", "def object_list(self):\n\n def _sort(ob, ol):\n reverse = ob.startswith(\"-\")\n ob = ob[1:] if reverse else ob\n for column in self.columns:\n if column.sort_key_fn is not None and column.name == ob:\n return sorted(ol, key=column.sort_key_fn, reverse=reverse)\n if self._meta.order_by and hasattr(ol, \"order_by\"):\n return ol.order_by(*self._meta.order_by.split(\"|\"))\n return ol\n\n ol = self._object_list\n ob = self._meta.order_by\n if not ob: return ol\n if isinstance(ob, basestring):\n return _sort(ob, ol)\n elif isinstance(ob, list):\n ob.reverse()\n for fn in ob:\n ol = _sort(fn, ol)\n return ol", "def sort(self):\n sort_key = self.data.chromosome.apply(sorter_chrom)\n self.data = (\n self.data.assign(_sort_key_=sort_key)\n .sort_values(by=[\"_sort_key_\", \"start\", \"end\"], kind=\"mergesort\")\n .drop(\"_sort_key_\", axis=1)\n .reset_index(drop=True)\n )", "def ListAllContacts(self):\n feed = self.gd_client.GetContacts()\n self.contacts = self.CleanPhoneNumbers(self.GetContactsInfo(feed))\n return self.contacts", "def get_orderings(self):\n if self._orderings is Undefined:\n self._orderings = self.normalize_orderings(self.ordering)\n return self._orderings", "def _sort_hybrid_meta_cols(self):\n self.__hybrid_meta_cols = sorted(\n [c for c in self._hybrid_meta.columns\n if not c.startswith(self._INTERNAL_COL_PREFIX)],\n key=self._column_sorting_key\n )", "def sort(self):\r\n self.candidates.sort(key=self.sortFitness)\r\n return", "def sort(points):\n if len(points) == 0:\n return []\n \n starting_vertex = min(points)\n reference_point = starting_vertex + Point2D(0, 1)\n \n return sorted(points, key=partial(\n get_angle_and_distance, point_2=starting_vertex, point_3=reference_point\n ))", "def sortby(self):\n ...", "def ordering(self):\n value = []\n for i in self:\n if isinstance(i, PQ):\n value.extend(i.ordering())\n else:\n value.append(i)\n\n return value", "def make_order_by_combination(self):\n order_by_list = []\n order_by = self.request.GET.get(\"order_by\", None)\n\n if order_by:\n order_by_list = [SORT_BY_REFERENCE_DICT[i.strip()]\n for i in order_by.split(\",\")]\n\n return order_by_list", "def get_re_analysis_grid_column_names_by_order(self):\n self.column_name_list = self.get_grid_column_names_by_order(self.re_analysis_grid_div_id)\n return self.column_name_list", "def sort(self, value_key=None, ascending=True):\r\n\t\tsorted_indexes = MultiPointData.sort(self, value_key=value_key, ascending=ascending)\r\n\t\tself.sdr = np.array(self.sdr)[sorted_indexes]\r\n\t\treturn sorted_indexes", "def get_destinations_grid_column_names_by_order(self):\n self.column_name_list = self.get_grid_column_names_by_order(self.destinations_grid_div_id)\n return self.column_name_list", "def sort_by_position(self):\n sorted_indx = np.argsort(self.vehicles.get_absolute_position(self.ids))\n sorted_ids = np.array(self.ids)[sorted_indx]\n return sorted_ids, None", "def order(self):\n\n return np.array([bond.order for bond in self])", "def _sorting_order_columns(\n self,\n m,\n byRow=0,\n usingCellValue=0,\n descending=True,\n file_name=None,\n client_name=None):\n\n def _get_list_without_excluded_items(client_name, _keep_at_end):\n\n if _keep_at_end != []:\n return [str(c[usingCellValue].Value) + \" \" + \\\n c.TopMember.Label for c in m[byRow] if not \\\n c.TopMember.Label in _keep_at_end]\n\n elif client_name is not None:\n return [str(c[usingCellValue].Value) + \" \" + \\\n c.TopMember.Label for c in m[byRow] if \\\n c.TopMember.Label != client_name]\n\n else:\n return [str(c[usingCellValue].Value) + \" \" +\n c.TopMember.Label for c in m[byRow]]\n\n _keep_at_end = self._get_keep_at_end(file_name)\n\n _lst = _get_list_without_excluded_items(client_name, _keep_at_end)\n\n # reverse the list to make ascending order\n if descending:\n _lst_incl_cells = list(reversed(self._sorted_nicely(_lst)))\n else:\n _lst_incl_cells = list(self._sorted_nicely(_lst))\n\n return self._rank_position(_lst_incl_cells)", "def GetSortImages(self):\n\n return self.sort_down, self.sort_up", "def _sort_compounds(self):\n self.sorted_molecules = sorted(self.values(), key=operator.attrgetter('criterion'))", "def getTableOrderFields():\n return [\"report__reporter__org_name\",\n \"report__domain\",\n \"dkim\",\n \"spf\",\n \"disposition\",\n \"\", # raw dkim domains/results are not ordered\n \"\", # raw dkim domains/results are not ordered\n \"count\",\n \"source_ip\",\n \"country_iso_code\",\n \"report__date_range_begin\",\n \"report__date_range_end\",\n \"report__report_id\"]", "def get_all_contacts(self,\n hook,\n resource,\n data=None,\n headers=None,\n extra_options=None):\n all_pages = []\n total_contacts = -1\n next_token = None\n\n while len(all_pages) != total_contacts:\n if not next_token:\n result = hook.run('{}/contacts'.format(resource),\n data,\n headers,\n extra_options).json()\n else:\n result = hook.run('{}/contacts/{}'.format(resource, next_token),\n data,\n headers,\n extra_options).json()\n\n all_pages += result.get('contacts', None)\n\n total_contacts = result.get('total_contacts', None)\n\n if 'bookmark' in result:\n next_token = result.get('bookmark', None)\n\n return all_pages", "async def get_contacts_for_contact_group(dbcon: DBConnection, contact_group_id: int) -> Iterable[object_models.Contact]:\n q = \"\"\"select\n contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active\n from contact_group_contacts, contacts\n where contact_group_contacts.contact_group_id = %s\n and contact_group_contacts.contact_id = contacts.id\"\"\"\n return [object_models.Contact(*row) for row in await dbcon.fetch_all(q, (contact_group_id,))]", "def reorder( self ):\n self.sorted.sort(self.compareFunction)", "def sort(self, objects, reverse=True):\n return sorted(objects,\n key=lambda objlink: objlink[self],\n reverse=reverse)", "def sort(self):\n\n self.models.sort(key=methodcaller('get_age'))", "def get_rates_grid_column_names_by_order(self):\n self.column_name_list = self.get_grid_column_names_by_order(self.rates_grid_div_id)\n return self.column_name_list", "def get_sorted():\n return sorted(country_list, key=get_pop_and_name)", "def _sort_records(self):\n self.records.sort(reverse=True, key=lambda record: record.timestamp)", "def __SortLists(self): \n\n \n AS=argsort(self.__NumList)\n\n self.__IndList=[self.__IndList[i] for i in AS]#list(self.__IndList[AS])\n self.__ObjList=[self.__ObjList[i] for i in AS]#list(self.__IndList[AS])\n self.__NumList=[self.__NumList[i] for i in AS]", "def mech_tuples_sorted(self):\n return sorted(self.mech_tuples, key=lambda mech_tuple: mech_tuple[0].name)", "def sortChoices(self):\n self.formatList.sort()", "def sorted_cities(seed):\n\n db = generate_db(seed, cities, sales_people, sales_range, stay_range, success_rate, total_visits)\n sorted_list = get_sorted_cities(db)\n return sorted_list", "def orderList(dataSource,**kwargs):\n\treturn sorted(dataSource)", "def get_ordering(self, request, queryset, view):\n ordering = []\n params = get_datatables_ordering(request.query_params)\n if params:\n fields = [param.strip() for param in params.split(',')]\n ordering = self.remove_invalid_fields(queryset, fields, view, request)\n if ordering:\n return ordering\n\n # No ordering was included, or all the ordering fields were invalid\n return self.get_default_ordering(view)", "def sort(self):\n tmp = list(zip(self.user_points, self.user_ids));\n tmp = sorted(tmp, reverse=True);\n self.user_points, self.user_ids = list(zip(*tmp));\n \n self.user_points = list(self.user_points);\n self.user_ids = list(self.user_ids);", "def sort_facility(self):\n self.entries.sort(key=lambda x: x.severity)\n self.entries.sort(key=lambda x: x.facility)", "def sort_decls(self):\n\n #import simplecssbuilder\n #SHORTHAND_REL = simplecssbuilder.SHORTHAND_REL\n\n def decl_key(decl):\n \"\"\"key for sorting declarations\"\"\"\n prop = decl.split(':')[0] # get property name\n if str(prop) in SHORTHAND_REL_inv:\n return SHORTHAND_REL_inv[str(prop)]\n else:\n return str(prop)\n\n def sort_decls_clique(clique):\n \"\"\"Sort the declarations in clique\"\"\"\n (_,ps) = clique\n ps.sort(key=decl_key)\n\n # compute the inverse of SHORTHAND_REL\n SHORTHAND_REL_inv = dict()\n for k,vs in SHORTHAND_REL.iteritems():\n for v in vs:\n SHORTHAND_REL_inv[v] = k\n\n #print 'PRINTING CLIQUES'\n for clique in self.cliques:\n #print clique\n sort_decls_clique(clique)\n #print clique", "def sort(self):\n self.deckcards.sort()", "def auth_contact_methods(self):\n return self._auth_contact_options", "def get_dialed_digits_grid_column_names_by_order(self):\n self.column_name_list = self.get_grid_column_names_by_order(self.dialed_digits_grid_div_id)\n return self.column_name_list", "async def get_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.get_certificate_contacts(\n vault_base_url=self._vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def get_vendor_price_list_detail_dial_digits_grid_column_names_by_order(self):\n self.wait_for_ajax_spinner_load(300)\n self.column_name_list = self.get_grid_column_names_by_order(self.vendor_price_list_detail_dial_digits_grid_div_id)\n return self.column_name_list", "def get_all(self):\n total_contacts = []\n get_count = {\n 'query': {\n 'object': 'CONTACT',\n 'select': {\n 'field': 'RECORDNO'\n },\n 'pagesize': '1'\n }\n }\n\n response = self.format_and_send_request(get_count)\n count = int(response['data']['@totalcount'])\n pagesize = 2000\n offset = 0\n for i in range(0, count, pagesize):\n data = {\n 'query': {\n 'object': 'CONTACT',\n 'select': {\n 'field': [\n 'RECORDNO',\n 'CONTACTNAME',\n 'COMPANYNAME',\n 'FIRSTNAME',\n 'LASTNAME',\n 'INITIAL',\n 'PRINTAS',\n 'TAXABLE',\n 'MAILADDRESS.ADDRESS1'\n ]\n },\n 'pagesize': pagesize,\n 'offset': offset\n }\n }\n contacts = self.format_and_send_request(data)['data']['CONTACT']\n total_contacts = total_contacts + contacts\n offset = offset + pagesize\n return total_contacts" ]
[ "0.6996267", "0.6311992", "0.60650474", "0.60044813", "0.5959529", "0.5943762", "0.5862135", "0.57969254", "0.5660018", "0.5624674", "0.5591094", "0.55369407", "0.52994704", "0.52872974", "0.52799004", "0.5243885", "0.5241674", "0.5228304", "0.5202736", "0.51900417", "0.5163623", "0.5146807", "0.5140924", "0.51158285", "0.50962734", "0.5085119", "0.50788325", "0.5068385", "0.5068385", "0.50534034", "0.5047913", "0.5047846", "0.50441617", "0.50414646", "0.5041407", "0.50269544", "0.5015454", "0.49980253", "0.49952057", "0.49664444", "0.49531704", "0.49190608", "0.4902544", "0.4896536", "0.48882434", "0.48791003", "0.48786145", "0.4865455", "0.48602858", "0.48544568", "0.48501918", "0.4845632", "0.48425314", "0.4838493", "0.4818483", "0.48134315", "0.4793284", "0.47806332", "0.47737807", "0.4773751", "0.47731295", "0.47708702", "0.47672823", "0.4765094", "0.476465", "0.47638273", "0.4761922", "0.47560275", "0.473108", "0.47257927", "0.4723661", "0.47140506", "0.4703703", "0.4700238", "0.46968478", "0.46866348", "0.468496", "0.4683311", "0.46819076", "0.46682334", "0.46672207", "0.46656257", "0.4653696", "0.46536666", "0.46494108", "0.4647414", "0.4643485", "0.46428916", "0.46347624", "0.46339834", "0.4633218", "0.4628451", "0.46270838", "0.4622138", "0.4621548", "0.46115243", "0.46111012", "0.46109602", "0.46082383", "0.4607261" ]
0.8388801
0
Sets the contact_sorts of this MessagingCampaign. The order in which to sort contacts for dialing, based on up to four columns.
def contact_sorts(self, contact_sorts): self._contact_sorts = contact_sorts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def contact_sorts(self):\n return self._contact_sorts", "def set_sorts(self, sorts: List[DataGridSort]):\n self.sorts = sorts", "def set_contacts(self, contacts):\n\n\t\tif contacts is not None and not isinstance(contacts, list):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: contacts EXPECTED TYPE: list', None, None)\n\t\t\n\t\tself.__contacts = contacts\n\t\tself.__key_modified['Contacts'] = 1", "def sort(self, sort):\n\n self._sort = sort", "def contacts(self, contacts):\n\n self._contacts = contacts", "def contacts(self, contacts):\n\n self._contacts = contacts", "def getSorted(self):\n return sorted(self.contacts)", "def sort_order(self, sort_order: int):\n\n self._sort_order = sort_order", "def sort_order(self, sort_order):\n\n self._sort_order = sort_order", "def _sort_columns(self, order):\n unknown = set(self._columns) - set(order)\n if unknown:\n names = \", \".join(str(name) for name in unknown)\n raise ValueError(f\"Unknown columns: {names}\")\n\n cols = [self.column_location(column) for column in order]\n\n self._columns = [self._columns[col] for col in cols]\n self._data = [[row[col] for col in cols] for row in self._data]", "def sort_contacts(self, method, order):\n \n method_l = method.lower()\n order_l = order.lower()\n \n if method_l == 'name' and order_l == 'asc':\n name_sort = sorted(self.contacts, key=lambda x: x[0])\n for x in name_sort:\n print(x)\n return name_sort\n elif method_l == 'name' and order_l == 'desc':\n name_sort = sorted(self.contacts, key=lambda x: x[0], reverse=True)\n for x in name_sort:\n print(x)\n return name_sort \n \n elif method_l == 'zipcode' and order_l == 'asc':\n zip_sort = sorted(self.contacts, key=lambda y: y[3])\n for x in zip_sort:\n print(x)\n return zip_sort\n elif method_l == 'zipcode' and order_l == 'desc':\n zip_sort = sorted(self.contacts, key=lambda y: y[3],reverse=True)\n for x in zip_sort:\n print(x)\n return zip_sort", "def sort_list(self, key_):\n options = {\n 'index': 0,\n 'name' : 1,\n 'surname': 2,\n 'email': 3,\n 'phone': 4,\n }\n if key_ in options.keys():\n key_ = options.get(key_)\n\n return(sorted(self.contacts, key = lambda x: x[key_]))", "def reorder( self ):\n self.sorted.sort(self.compareFunction)", "def sort_contacts(contacts):\n \n key_list = list(contacts.keys()) #get keys\n key_list.sort() #sort key_list\n sorted_list = [] #initialize sorted list\n for key in key_list:\n contact = (key, contacts[key][0], contacts[key][1]) #create tuple\n sorted_list += [contact] #add tuple to list\n \n return(sorted_list)", "def sort(self):\n self.cards.sort()", "def sort(self):\n self.cards.sort()", "def sort_cards(self):\n self.cards.sort(key=operator.attrgetter('persona', 'rank'))\n self.update_position()", "def sortChoices(self):\n self.formatList.sort()", "def sort(self):\n sort_key = self.data.chromosome.apply(sorter_chrom)\n self.data = (\n self.data.assign(_sort_key_=sort_key)\n .sort_values(by=[\"_sort_key_\", \"start\", \"end\"], kind=\"mergesort\")\n .drop(\"_sort_key_\", axis=1)\n .reset_index(drop=True)\n )", "def set_sort_by(self, sort_by):\n\n\t\tif sort_by is not None and not isinstance(sort_by, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: sort_by EXPECTED TYPE: str', None, None)\n\t\t\n\t\tself.__sort_by = sort_by\n\t\tself.__key_modified['sort_by'] = 1", "def contact_list(self, contact_list):\n \n self._contact_list = contact_list", "def set_sort_order(self, sort_order):\n\n\t\tif sort_order is not None and not isinstance(sort_order, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: sort_order EXPECTED TYPE: str', None, None)\n\t\t\n\t\tself.__sort_order = sort_order\n\t\tself.__key_modified['sort_order'] = 1", "def sort(self):\n # Sort here actually uses the tuple comparison we defined in the Card class\n self.cards.sort()", "async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]:\n new_contacts = await self._client.set_certificate_contacts(\n vault_base_url=self.vault_url,\n contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]),\n **kwargs\n )\n return [\n CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list\n ]", "def sort(self):\n tmp = list(zip(self.user_points, self.user_ids));\n tmp = sorted(tmp, reverse=True);\n self.user_points, self.user_ids = list(zip(*tmp));\n \n self.user_points = list(self.user_points);\n self.user_ids = list(self.user_ids);", "def set_as_sort1(self):\n if self.is_sort1:\n #if self.analysis_code == 1:\n #self.nonlinear_factor = np.nan\n #print(self.data_code)\n #print(self._times, type(self._times))\n #aaa\n return\n #print(f'{self.class_name}-{self.table_name}')\n self.table_name = SORT2_TABLE_NAME_MAP[self.table_name]\n self.sort_bits[1] = 0 # sort1\n self.sort_method = 1\n assert self.is_sort1 is True, self.is_sort1\n self._update_time_word()", "def sort(self, cmp=None, key=None, reverse=False):\n o = order(list(self), cmp, key, reverse)\n # Modify the table in place, more than one variable may be referencing it:\n r=list(self._table); [self._table.__setitem__(i2, r[i1]) for i2, i1 in enumerate(o)]", "def sort_by_default(self):\n self.data.sort()", "def sort(self):\n self.chain_list.sort()\n for chain in self.chain_list:\n chain.sort()", "def sort_columns(self):\n extra_cols = []\n for col in self.data.columns:\n if col not in self._required_columns:\n extra_cols.append(col)\n sorted_colnames = list(self._required_columns) + sorted(extra_cols)\n assert len(sorted_colnames) == len(self.data.columns)\n self.data = self.data.reindex(columns=sorted_colnames)", "def refreshContacts(self):\n contact_ids = self._getAllContactIds()\n contacts = self._getContacts(contact_ids)\n\n self.contacts = [LineContact(self, contact) for contact in contacts]\n\n self.contacts.sort()", "def sort(self):\n sorted_entries = [] # type: list[MSBModel]\n for entry_subtype in MSBModelSubtype:\n sorted_entries += list(sorted(self.get_entries(entry_subtype), key=lambda m: m.name))\n self._entries = sorted_entries", "def sorting(self, name, on='@', within=False, between=False, fix=None,\n ascending=False, sort_by_weight='auto'):\n for n in name:\n is_array = self.is_array(n)\n collection = 'masks' if is_array else 'columns'\n if on != '@' and not is_array:\n msg = \"Column to sort on can only be changed for array summaries!\"\n raise NotImplementedError(msg)\n if on == '@' and is_array:\n for source in self.sources(n):\n self.sorting(source, fix=fix, within=within,\n between=between, ascending=ascending,\n sort_by_weight=sort_by_weight)\n else:\n if 'rules' not in self._meta[collection][n]:\n self._meta[collection][n]['rules'] = {'x': {}, 'y': {}}\n if not is_array:\n n_fix = self._clean_codes_against_meta(n, fix)\n else:\n n_fix = self._clean_items_against_meta(n, fix)\n rule_update = {'ascending': ascending,\n 'within': within,\n 'between': between,\n 'fixed': n_fix,\n 'sort_on': on,\n 'with_weight': sort_by_weight}\n self._meta[collection][n]['rules']['x']['sortx'] = rule_update\n return None", "def sortby(self, sortby):\n self._sortby = sortby", "def sort(self):\n\t\twith self.AutoSplitlines():\n\t\t\tself.lines = sorted(self.lines)", "def sort_results(self, sort_option):\r\n self.model.sort_data(sort_option)", "def contact_points(self, contact_points: object):\n\n self._contact_points = contact_points", "def update_contacts(self, contacts):\n\n if contacts.time.size != 1:\n raise IndexError(\"Contacts should be from one frame only\")\n if contacts.channel.size != self.contacts.channel.size:\n self.new_contact_set(contacts)\n return # Prevent calling update_contacts recursively\n self.contacts = contacts\n contacts = np.array(contacts)\n\n for i, actor in enumerate(self.contacts_actors):\n # mapper = actors.GetNextActor().GetMapper()\n mapper = actor.GetMapper()\n self.contacts_actors[i].GetProperty().SetColor(self.contacts_color)\n self.contacts_actors[i].GetProperty().SetOpacity(self.contacts_opacity)\n source = vtkSphereSource()\n source.SetCenter(contacts[0:3, i])\n source.SetRadius(self.contacts_size)\n mapper.SetInputConnection(source.GetOutputPort())", "def sort(self):\n self.notes.sort()", "def sort_keys_generate(self, mail):\n\t\t# Reset sort keys for every mail:\n\t\tself.sort_keys = []\n\t\tfor key, form in self.selectors:\n\t\t\t# Sort by filter matches only (1:1):\n\t\t\tif key in self.filter_matches.keys():\n\t\t\t\tself.sort_keys_add(key, form, self.filter_matches[key])\n\t\t\t# Sort by all header parts (1:N):\n\t\t\telse:\n\t\t\t\tself.sort_keys_add(key, form, header_values(key, mail))\n\t\treturn len(self.sort_keys)", "def sort(self, *args, **kwargs):\n self._sequence.sort(*args, **kwargs)", "def ordering(self, qs):\n request = self.request\n # Number of columns that are used in sorting\n try:\n i_sorting_cols = int(request.REQUEST.get('iSortingCols', 0))\n except ValueError:\n i_sorting_cols = 0\n\n order = []\n order_columns = self.get_order_columns()\n for i in range(i_sorting_cols):\n # sorting column\n try:\n i_sort_col = int(request.REQUEST.get('iSortCol_%s' % i))\n except ValueError:\n i_sort_col = 0\n # sorting order\n s_sort_dir = request.REQUEST.get('sSortDir_%s' % i)\n\n sdir = '-' if s_sort_dir == 'desc' else ''\n\n sortcol = order_columns[i_sort_col]\n if isinstance(sortcol, list):\n for sc in sortcol:\n order.append('%s%s' % (sdir, sc))\n else:\n order.append('%s%s' % (sdir, sortcol))\n if order:\n return qs.order_by(*order)\n return qs", "def _sort_cards(self):\n if not self.has_to_sort or not self.sort_by:\n return\n lst = sorted(self.cards,\n key=attrgetter(self.sort_by),\n reverse=self.cmp_reverse)\n if lst != self.cards:\n self._clear_cards()\n self.cards = lst\n for card in self.cards[\n (self.current_page - 1) * self.max_cards:\n self.current_page * self.max_cards]:\n if not self._add_card(card):\n return", "def sortValues(self, cmp=None, key=None, reverse=False):\n self._sortValues = dict(cmp=cmp, key=key, reverse=reverse)\n return self", "def _sort_compounds(self):\n self.sorted_molecules = sorted(self.values(), key=operator.attrgetter('criterion'))", "def sort(self, column, order=Qt.AscendingOrder):\n if(column == Columns.Date):\n self.sorting = Sorting.Date\n elif(column == Columns.Code):\n self.sorting = Sorting.Code\n elif(column == Columns.User):\n self.sorting = Sorting.User\n elif(column == Columns.Tags):\n self.sorting = Sorting.Priviledges\n elif(column == Columns.TimesRequested):\n self.sorting = Sorting.TimesRequested\n\n if(order == Qt.DescendingOrder):\n self.sorting |= Sorting.Reversed\n\n self._reset_view()", "def sort(self):\n self.model_list.sort()\n for model in self.model_list:\n model.sort()", "def sort(\n self, sort_order: list = None, value_order: str = None, suit_order: list = None\n ) -> None:\n # Set default arguments\n sort_order = [\"value\", \"suit\"] if not sort_order else sort_order\n suit_order = (\n [\"clubs\", \"diamonds\", \"spades\", \"hearts\", \"none\"]\n if not suit_order\n else suit_order\n )\n value_order = \"asc\" if not value_order else value_order\n\n for sort_option in sort_order:\n sort_fx = getattr(self, f\"_sort_by_{sort_option}\")\n order_params = locals().get(f\"{sort_option}_order\")\n self.cards = sort_fx(self.cards, order_params)", "def sort_by(self, sort_direction: epl_imagery_pb2.SortDirection):\n # TODO if you want to sort by multiple parameters, then this class will have to have a pointer to the filter\n if self.metadata_filters.sorted_by:\n self.metadata_filters.sorted_by.query_params.sort_direction = epl_imagery_pb2.NOT_SORTED\n\n self.metadata_filters.sorted_by = self\n\n # class that contains it, and upon updating this class there is a call back to the container class to insert\n # this parameter in a list\n self.query_params.sort_direction = sort_direction\n self.b_initialized = True", "def on_combo_sort_col_names_currentIndexChanged(self, index):\n if self.ui.sort_radio_asc.isChecked():\n self.model.setSort(index, Qt.AscendingOrder)\n else:\n self.model.setSort(index, Qt.DescendingOrder)\n self.model.select()", "def change_sort(self, sorting_choice):\r\n self.message = \"place have been sorted by: {}\".format(sorting_choice)\r\n self.place_list.sort(sorting_choice)\r\n self.root.ids.entriesBox.clear_widgets()\r\n self.create_widget()\r\n sort_index = self.sort_choices.index(sorting_choice)\r\n self.current_sort = self.sort_choices[sort_index]", "def _sort_data(self, sort_data_by='position'):\n all_mutants = iter(self)\n if sort_data_by=='position':\n sorted_data = sorted(all_mutants, key = lambda m: (m.position, m.IB))\n # x.position here is an Insertion_position object and has a sensible cmp function\n # TODO do unaligned/multi-aligned/unknown positions sort sensibly here?\n elif sort_data_by=='read_count':\n if self.multi_dataset: \n raise MutantError(\"Sorting by readcount in print_data not implemented for multi-datasets!\")\n sorted_data = sorted(all_mutants, key = lambda m: (m.total_read_count, m.perfect_read_count, m.position, m.IB), \n reverse=True)\n else:\n raise MutantError(\"Can't sort mutants by %s - only position or readcount are implemented!\"%sort_data_by)\n return sorted_data", "def sort_data(self):\n\n # zips the game_list and game_Scores, sorts the result by scores, and then puts them back.\n self.game_list, self.game_scores = zip(*sorted(zip(self.game_list, self.game_scores), key=lambda pair: pair[1]))", "def new_contact_set(self, contacts):\n if contacts.time.size != 1:\n raise IndexError(\"Contacts should be from one frame only\")\n self.contacts = contacts\n\n # Remove previous actors from the scene\n for actor in self.contacts_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.contacts_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtk.vtkPoints()\n for i in range(contacts.channel.size):\n # Create a mapper\n mapper = vtkPolyDataMapper()\n\n # Create an actor\n self.contacts_actors.append(vtkActor())\n self.contacts_actors[i].SetMapper(mapper)\n\n self.parent_window.ren.AddActor(self.contacts_actors[i])\n\n # Update marker position\n self.update_contacts(self.contacts)", "def archive_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='archive'))", "def sortby(self):\n ...", "def set_sort_priority(self, priority):\n self.__sorting_priority = priority", "def sort(self):\n self.fragment_list.sort()", "def sort(self):\n self.deckcards.sort()", "def default_sort_column(self, default_sort_column):\n\n self._default_sort_column = default_sort_column", "def sort_index(self):\n def s(t):\n return tuple(sorted(t, key=self.clade_order))\n self.scf.index = self.scf.index.map(s)\n self.scf.index.names = [s.split('_')[0] for s in self.scf.index[0]]", "def _sort_modes(self):\n sort_idx = np.lexsort((self.modes[:, 1], self.modes[:, 0], self.modes[:, 2]))\n self._modes = self.modes[sort_idx]", "def sort(self, exprs):\n self._sort_exprs = exprs\n return self", "def find_top_five_most_contacted_listings_per_month(self):\n available_dates = self.contacts.quer_distinct_components(\"contact_date\", \"Contacts\")\n available_listing_ids = self.contacts.quer_distinct_components(\"listing_id\", \"Contacts\")\n \n self.ordered = {}\n for date in available_dates:\n listing = []\n month = self.contacts.quer_component_using_column(\"listing_id\", \"contact_date\", \"Contacts\", date)\n new = [month[0] for month in month]\n for listing_id in available_listing_ids:\n count = new.count(listing_id)\n listing.append([count, listing_id])\n listing = sorted(listing, key=lambda x: x[0], reverse=True)\n\n self.ordered[date] = listing[:5]", "def sort(self):\n self.words = set(sorted(self.words))", "def _sort_validators(self):\n self._validators = sorted_poset(\n iterable=self._validators,\n key=lambda record: record.context,\n reverse=True)\n\n self._is_sorted = True", "def __SortLists(self): \n\n \n AS=argsort(self.__NumList)\n\n self.__IndList=[self.__IndList[i] for i in AS]#list(self.__IndList[AS])\n self.__ObjList=[self.__ObjList[i] for i in AS]#list(self.__IndList[AS])\n self.__NumList=[self.__NumList[i] for i in AS]", "def _sort_hybrid_meta_cols(self):\n self.__hybrid_meta_cols = sorted(\n [c for c in self._hybrid_meta.columns\n if not c.startswith(self._INTERNAL_COL_PREFIX)],\n key=self._column_sorting_key\n )", "def _sort(self, groups):\n return sorted(groups, key=lambda group: (group.name.lower(), group.pubid))", "def sort_order(self, sort_order: int):\n if sort_order is None:\n raise ValueError(\"Invalid value for `sort_order`, must not be `None`\")\n\n self._sort_order = sort_order", "def cols_sorted(self, cmp=None, key=None, reverse=False):\n return self.select(*sorted(self.names, cmp, key, reverse))", "def sort(self, reverse=False, seq_position=False):\n if seq_position:\n self.edits = sorted(self.edits, key=lambda edit: edit.sequence_position, reverse=reverse)\n else:\n self.edits.sort(reverse=reverse)", "def set_contacts_color(self, contacts_color):\n self.contacts_color = contacts_color\n self.update_contacts(self.contacts)", "def sort(self):\r\n self.candidates.sort(key=self.sortFitness)\r\n return", "def set_contacts_size(self, contacts_size):\n self.contacts_size = contacts_size\n self.update_contacts(self.contacts)", "def _sort_records(self):\n self.records.sort(reverse=True, key=lambda record: record.timestamp)", "def sort(self, desc):\n self.__sortByIndex(0, desc)", "def order_entries(self):\n for key in self.entries.keys():\n entry_set = self.entries[key]\n if len(entry_set) > 0:\n sorted_set = self.order_entry_set(entry_set)\n self.entries[key] = sorted_set", "def set_sorting_enabled(self, value):\n self.tableWidget.setSortingEnabled(value)", "def _sort(self):\n self.population.sort()\n self.population.reverse()", "def sort(self, Ncol, order):\n self.emit(SIGNAL(\"layoutAboutToBeChanged()\"))\n self.arraydata = sorted(self.arraydata, key=operator.itemgetter(Ncol)) \n if order == Qt.DescendingOrder:\n self.arraydata.reverse()\n self.emit(SIGNAL(\"layoutChanged()\"))", "def order_by(self, *fields):\n self._evaluated = False\n if self._order is None:\n self._order = []\n\n for field in fields:\n direction = \"asc\"\n if field.startswith('-'):\n direction = \"desc\"\n field = field[1:]\n\n self._order.append({ field : direction })\n\n return self", "def __handle_sorts(self, df):\n for sort in self.sorts:\n ascending = True if sort.order == SortOrder.ASCENDING else False\n if sort.sortType == SortType.ABSOLUTE_VALUE:\n df = df.reindex(df[sort.columnName].abs().sort_values(ascending=ascending, na_position='last').index)\n else:\n df = df.sort_values(by=sort.columnName, ascending=ascending, na_position='last')\n return df", "def sort(self, Ncol, order):\n self.emit(SIGNAL(\"layoutAboutToBeChanged()\"))\n self.arraydata = sorted(self.arraydata, key=operator.itemgetter(Ncol)) \n if order != Qt.DescendingOrder:\n self.arraydata.reverse()\n self.emit(SIGNAL(\"layoutChanged()\"))", "def sort(self):\n self.list.sort(key=lambda x: ''.join)", "def _config_sortable(self, sortable):\n for col in self[\"columns\"]:\n command = (lambda c=col: self._sort_column(c, True)) if sortable else \"\"\n self.heading(col, command=command)\n self._sortable = sortable", "def sort(self, col, order):\r\n self.emit(SIGNAL(\"layoutAboutToBeChanged()\"))\r\n self.mylist = sorted(self.mylist,\r\n key=operator.itemgetter(col))\r\n if order == Qt.DescendingOrder:\r\n self.mylist.reverse()\r\n self.emit(SIGNAL(\"layoutChanged()\"))", "def archive_contact_messages(self, org, contact):\n pass", "def sort(self,desc):\n\tself.__sort(\"\",\"\",desc)", "def sort(self):\n\t\tself.servers = sorted(self.servers, key=lambda s: s.load)\n\t\tself.servers = sorted(self.servers, key=lambda s: s.distance_class)\n\t\tself.servers = sorted(self.servers, key=lambda s: s.country == self.locale_info.country, reverse=True)", "def sort_list(self,list_):\r\n list_.sort()", "def sort(self):\r\n self.list.sort(key=lambda x: ''.join(x))", "def _sort(self):\n self.rows.sort(key=lambda x: (x['PERC1'], x['EQ'], x['PASS'], x['W2']),\n reverse=True)\n\n rank = 0\n prev_perc = 0\n prev_rank = 0\n for row in self.rows:\n if row[\"NR\"] == 0:\n # Something has already populated NR as 0 - so we set rank as\n # 0 too\n row['_RANK'] = 0\n row['_NR'] = 0\n continue\n\n # Increment our count\n rank += 1\n if row['PERC1'] == prev_perc:\n row['NR'] = \"\"\n row['_NR'] = prev_rank # I.e. joint 6th will be 6 here\n row['_RANK'] = rank # I.e. joint 6th could be 7, or 8 etc. here\n else:\n row['NR'] = rank\n row['_NR'] = rank\n row['_RANK'] = rank\n prev_perc = row['PERC1']\n prev_rank = rank", "def support_contacts(self, support_contacts):\n self._support_contacts = support_contacts", "def sort(self, varnames):\n varnames = self._find_vars(varnames, unique=True, empty_ok=False)\n var_ind_list = list(map(self._varlist.index, varnames))\n new_srtlist = var_ind_list + [None]*(self._nvar - len(varnames))\n if self._srtlist == new_srtlist:\n return\n sort_key = lambda row: [row[i] for i in var_ind_list]\n self._varvals.sort(key = sort_key)\n self._srtlist = new_srtlist\n self._changed = True", "def _sortHandler(self) -> None:\n response, columnIndex, ascending = self._sortDialog()\n order = Qt.AscendingOrder if ascending else Qt.DescendingOrder\n if response:\n self._mainFileView.sortByColumn(columnIndex, order)", "def sort(self, col, order):\r\n self.emit(SIGNAL(\"layoutAboutToBeChanged()\"))\r\n self.mylist = sorted(self.mylist,\r\n key=operator.itemgetter(col))\r\n if order == QtCore.Qt.DescendingOrder:\r\n self.mylist.reverse()\r\n self.emit(SIGNAL(\"layoutChanged()\"))", "def sort(self, *keys):\n s = self._clone()\n s._sort = []\n for k in keys:\n if isinstance(k, str) and k.startswith('-'):\n k = {k[1:]: {\"order\": \"desc\"}}\n s._sort.append(k)\n return s", "def sort_results(self):\n pass", "def sort(self):\r\n\t\tif ScoreOpt.isGroupVassals():\r\n\t\t\tself._playerScores.sort(lambda x, y: cmp(x.sortKey(), y.sortKey()))\r\n\t\t\tself._playerScores.reverse()\r\n\t\tmaxPlayers = ScoreOpt.getMaxPlayers()\r\n\t\tif maxPlayers > 0 and len(self._playerScores) > maxPlayers:\r\n\t\t\tself._playerScores = self._playerScores[len(self._playerScores) - maxPlayers:]" ]
[ "0.68852335", "0.62442034", "0.55268556", "0.55110604", "0.5504529", "0.5504529", "0.5466908", "0.5404668", "0.53766954", "0.5268589", "0.5230543", "0.51540035", "0.5129339", "0.51096964", "0.5105754", "0.5105754", "0.50918037", "0.5086296", "0.5057816", "0.50474155", "0.5042188", "0.5041556", "0.5036751", "0.5035697", "0.5002974", "0.50019294", "0.49803436", "0.49789461", "0.49717766", "0.49578923", "0.49491292", "0.49486026", "0.49473795", "0.48982584", "0.48805714", "0.48801833", "0.48718756", "0.4870136", "0.48625818", "0.48607174", "0.48415756", "0.4834982", "0.4823429", "0.48219877", "0.48168123", "0.4814803", "0.4805905", "0.47877848", "0.47877783", "0.47610614", "0.47608137", "0.4753627", "0.47371605", "0.4734365", "0.47293544", "0.4714441", "0.47053185", "0.46903205", "0.46881077", "0.46848482", "0.46605363", "0.4640835", "0.463668", "0.46365178", "0.46160963", "0.4615528", "0.46013612", "0.45940915", "0.45902848", "0.45811465", "0.4579352", "0.45768893", "0.45718065", "0.45682454", "0.45531368", "0.4551914", "0.45501623", "0.4547541", "0.45462838", "0.4539768", "0.45272782", "0.45258668", "0.45238617", "0.45214728", "0.45041692", "0.45007595", "0.44976404", "0.4485218", "0.4484666", "0.4483942", "0.44642183", "0.44591796", "0.44561988", "0.44523314", "0.44503754", "0.44474712", "0.44424075", "0.44384557", "0.4434525", "0.44257858" ]
0.8539102
0
Gets the messages_per_minute of this MessagingCampaign. How many messages this messaging campaign will send per minute.
def messages_per_minute(self): return self._messages_per_minute
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def messages_per_minute(self, messages_per_minute):\n \n self._messages_per_minute = messages_per_minute", "def get_words_per_minute(self):\n return self.words_per_minute", "def query_plans_per_minute(self) -> int:\n return pulumi.get(self, \"query_plans_per_minute\")", "def getNumOfMsgSend_interval(self):\n return self.MsgSendCount_interval", "def message_count(self):\n return self._message_count", "def message_count(self):\n return len(self.messages)", "def get_limit_per_second(self):\n pass", "def message_count_limit(self) -> ConfigNodePropertyInteger:\n return self._message_count_limit", "def unit_ms(self):\n return (self.time_base / 1000.0) / 60.0", "def minutes(self):\n return int(int(self) / 60)", "def length_minutes(self):\n return self._length_minutes", "def calculate_fetch_size(minutes: int):\n return round(minutes / CONF.interval) if minutes >= CONF.interval else 1", "def freq_minutes(self):\n return 5", "def _get_milleseconds(self):\n return int(round(time.time() * 1000))", "def fan_timer_duration(self) -> int:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"fan_timer_duration\"))\r\n return self._fan_timer_duration.seconds / 60", "def ticks_per_second(self):\n return self._ticks_per_second", "def kills_per_min(self):\n return self._kills_per_min", "def getDurationMs(self):\n return self.durationMs", "def count(self, page_size=10, vtimeout=10):\r\n a = self.get_attributes('ApproximateNumberOfMessages')\r\n return int(a['ApproximateNumberOfMessages'])", "def _unit_ms(self):\n return (self.time_base / 1000.0) / 60.0", "def last_seen_minutes(self):\n return (self.last_seen.seconds % 3600) / 60", "def mileage(self):\n return str(self._delivery_service.total_mileage())", "def message_length(self):\n return self._message_length", "def total_minutes(td):\n return total_seconds(td) / 60", "def get_minutes(self, datetime):\n return datetime.hour*60.0+datetime.minute+datetime.second/60", "def minute(self) -> int:\r\n return self._minute", "def getMessageCount(self):\n return 9", "def poll_interval_in_milliseconds(self):\n\n return self._poll_interval_in_milliseconds", "def calculate_wpm(self, delta_seconds: int):\n minutes = delta_seconds / 60\n return self.total_estimated_words() / minutes", "def shared_runners_minutes_limit(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"shared_runners_minutes_limit\")", "def get_total_mass(self) -> int:\n total_mass = 0\n for i_complex, i_abundance in self._complexes.items():\n total_mass += i_complex.get_size_of_complex() * i_abundance\n return total_mass", "def talk_durations(self):\n c = self.conn.cursor()\n\n c.execute('''SELECT strftime('%s', MAX(`date`)) - strftime('%s', MIN(`date`)) AS duration\n FROM messages\n WHERE gmail_labels LIKE '%Chat%'\n GROUP BY gmail_thread_id\n HAVING duration > 0;''')\n\n data = {'<= 1 min.': 0, '1 - 10 mins.': 0,\n '10 - 30 mins.': 0, '30 mins. - 1 hr.': 0,\n '> 1 hr.': 0}\n for row in c.fetchall():\n if row[0] <= 60:\n data['<= 1 min.'] += 1\n elif row[0] <= 600:\n data['1 - 10 mins.'] += 1\n elif row[0] <= 1800:\n data['10 - 30 mins.'] += 1\n elif row[0] <= 3600:\n data['30 mins. - 1 hr.'] += 1\n else:\n data['> 1 hr.'] += 1\n\n trace = pgo.Pie(\n labels=data.keys(),\n values=data.values(),\n marker=dict(\n colors=[\n self.config.get('color', 'primary'),\n self.config.get('color', 'secondary'),\n ]\n )\n )\n\n layout_args = plotly_default_layout_options()\n layout_args['title'] = 'Chat Durations'\n del layout_args['xaxis']\n del layout_args['yaxis']\n\n layout = pgo.Layout(**layout_args)\n\n return plotly_output(pgo.Figure(data=[trace], layout=layout))", "def duration_in_minutes(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"duration_in_minutes\")", "def getNumOfMsgSend(self):\n return self.MsgSendCount", "def _get_time_interval_in_minutes(self):\n return self.visa.get_request_interval_in_minutes()", "def mins(self):\n return self._mins", "def get_sent_messages(self):\n return self.sent_messages", "def sent_messages(self):\n return self._get_messages_from_folder_name('SentItems')", "def getNumOfMessagesToSend(self):\n return len(self.SendMessageBuffer)", "def messages_sent_by_user(self):\n self.__messages_by_user = self.__df[self.__df['message'].notna()].groupby('user')['message'].count()\n return self.__messages_by_user", "def minute_asleep_frequency(self):\n minutes_asleep = defaultdict(int)\n for shift in self.shifts:\n for minute in shift.minutes_asleep:\n minutes_asleep[minute] += 1\n return minutes_asleep", "def getNumOfMsgRec_interval(self):\n return self.MsgReceiveCount_interval", "def minutes(self):\n return int((self.end - self.start).total_seconds()) / 60", "def total_mass(self):\n return self._total_mass", "def total_mass(self):\n return self._total_mass", "def get_messages_count(khoros_object, user_settings=None, user_id=None, login=None, email=None):\n user_settings = _process_settings_and_user_id(khoros_object, user_settings, user_id, login, email)\n return _get_count(khoros_object, user_settings['id'], 'messages')", "def calculate_total_minutes_now(self):\n total_seconds = (timezone.now() - self.login_time).total_seconds()\n return total_seconds", "def TimeMinutes(self):\n return '%2.2d:%2.2d' % (self._hour, self._minute)", "def talk_times(self):\n c = self.conn.cursor()\n\n c.execute('''SELECT strftime('%H', `date`) AS hour, COUNT(message_key) AS talk_messages\n FROM messages\n WHERE gmail_labels LIKE '%Chat%'\n GROUP BY hour\n ORDER BY hour ASC;''')\n\n data = OrderedDict()\n for row in c.fetchall():\n data[row[0]] = row[1]\n\n total_messages = sum(data.values())\n percentages = OrderedDict()\n for hour in data.keys():\n percentages[hour] = str(round(float(data[hour])/float(total_messages) * 100, 2)) + '%'\n\n data_args = dict(\n x=data.keys(),\n y=data.values(),\n text=percentages.values(),\n name='Chat messages',\n marker=dict(\n color=self.config.get('color', 'primary')\n ),\n fill='tozeroy',\n )\n\n layout_args = plotly_default_layout_options()\n layout_args['title'] = 'Chat Times (UTC)'\n layout_args['xaxis']['title'] = 'Hour of day (UTC)'\n layout_args['yaxis']['title'] = 'Chat messages'\n\n trace = pgo.Scatter(**data_args)\n layout = pgo.Layout(**layout_args)\n\n return plotly_output(pgo.Figure(data=[trace], layout=layout))", "def getMessageCountPerDay(self):\n\n # NOTE: We first filter distinct record ids for this filter set\n # and then use those record ids as additional filter parameter when we\n # perform the actual query for message count by date. This workaround\n # is (?) required to not get duplicate record rows that we can't\n # `distinct` away when using `annotate`, due to some crazy db joins.\n # TODO: Revise the workaround\n\n # Query distinct record ids for this filter set\n distinct_records = Record.objects.filter(\n self.getQuery()).distinct().values(\"id\")\n\n\n # Query the sum of message counts per day for above filtered\n # records, ordered by date in ascending order\n return Record.objects.filter(id__in=distinct_records).values(\n \"report__date_range_begin\").annotate(\n date=TruncDay(\"report__date_range_begin\"),\n cnt=Sum(\"count\")).values(\"date\", \"cnt\").order_by(\"date\")", "def calculate_seconds_in_minutes(minutes):\n return int(minutes * 60)", "def get_messages(self):\n return self.messages_received", "def get_messages(self):\n return self.messages_received", "def get_messages(self):\n return self.messages_received", "def get_number_of_messages(queue_name):\n queue = sqs.get_queue_by_name(QueueName=queue_name)\n return queue.attributes.get('ApproximateNumberOfMessages')", "def get_mass(self):\n return self.m", "def _get_messages(self):\n try:\n messages = self.channel.get_messages(int(self.get_argument('since_timestamp', 0)))\n\n except ValueError as e:\n messages = self.channel.get_messages()\n\n return messages", "def get_message_queue_size(self):\n with self.lock:\n return len(self.message_queue)", "def get_current_minute_distribution(self):\n return self._delegate.get_current_bin().to_distribution()", "def total_millimetres(self):\n raise NotImplementedError(\"The total length property must be overriden in derived classes.\")", "def max_total_recipients(self) -> ConfigNodePropertyInteger:\n return self._max_total_recipients", "def ms(self):\n\t\treturn self._ms", "def get_messages(character):\n mail = character.db.mail\n try:\n messages = [item for item in mail if item[TIMESTAMP] <= item[MESSAGE].date_sent]\n # Let's clean up mail storage for this user while we're at it.\n character.db.mail = messages\n except TypeError:\n messages = []\n return messages", "def cooldown_minutes(self):\n return self._cooldown_minutes", "def getAllMessages(self):\n return self.db.getAllMessages()", "def talk_thread_sizes(self):\n c = self.conn.cursor()\n\n c.execute('''SELECT gmail_thread_id,\n strftime('%Y-%m-%d', `date`) AS thread_date,\n COUNT(message_key) as thread_size,\n GROUP_CONCAT(DISTINCT `from`) AS participants\n FROM messages\n WHERE gmail_labels LIKE '%Chat%'\n GROUP BY gmail_thread_id;''')\n\n messages = []\n marker_sizes = []\n dates = []\n descriptions = []\n for row in c.fetchall():\n messages.append(row[2])\n marker_sizes.append(max(10, row[2]/5))\n dates.append(row[1])\n descriptions.append('Messages: ' + str(row[2]) +\n '<br>Date: ' + str(row[1]) +\n '<br>Participants:<br> - ' + str(row[3]).replace(',', '<br> - ')\n )\n\n trace = pgo.Scatter(\n x=dates,\n y=messages,\n mode='markers',\n marker=dict(\n size=marker_sizes,\n ),\n text=descriptions\n )\n\n layout_args = plotly_default_layout_options()\n layout_args['title'] = 'Chat Thread Sizes'\n layout_args['hovermode'] = 'closest'\n layout_args['height'] = 800\n layout_args['margin'] = pgo.Margin(**layout_args['margin'])\n layout_args['xaxis']['title'] = 'Date'\n layout_args['yaxis']['title'] = 'Messages in thread'\n layout = pgo.Layout(**layout_args)\n\n return plotly_output(pgo.Figure(data=[trace], layout=layout))", "def media_duration(self):\n return self._media_duration", "def mass(self):\n return self._mass", "def mass(self):\n return self._mass", "def num_pending_messages(self):\n if self.connected:\n return len(self.messenger.num_pending_messages())\n return 0", "def getMinute(self):\n return _libsbml.Date_getMinute(self)", "def set_words_per_minute(self, words_per_minute):\n is_valid_wpm = 5.0 <= words_per_minute <= 60.0\n if is_valid_wpm:\n self.words_per_minute = words_per_minute\n self.dot_time_in_msec = 1200.0 / self.words_per_minute\n # Synthesizes sample data for the current dot length.\n self._cache_dot_dash_sample_data()\n self._cache_silence_sample_data()\n return is_valid_wpm", "def max_telemetry_items_per_second(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_telemetry_items_per_second\")", "def throughput(self):\n return self.cwnd_from_file * self.mss / self.rtt", "def get_per_page(self):\n\n\t\treturn self.__per_page", "def messages(self):\n return MessageNotification.messages", "def __get_loaded_messages(self):\n messages = []\n for message in self.chat.find_elements(By.XPATH, \"\"\"//*[@id=\"main\"]/div[3]/div/div/div[3]/*\"\"\"):\n messages.append(MessageElement(message))\n return messages", "def message_count(self):\n pass", "def _get_delay(self):\n delay = int(60 / self.REQUESTS_PER_MINUTE * self.delay_multiplier)\n return delay", "def _get_delay(self):\n delay = int(60 / self.REQUESTS_PER_MINUTE * len(accounts.get_all()))\n return delay", "def _get_delay(self):\n delay = int(60 / self.REQUESTS_PER_MINUTE * len(accounts.get_all()))\n return delay", "def get_message_count(self):\n return self.buffer.count()", "def messages(self, uid=0, **kwargs):\n messages = Messages(self.base_uri, self.auth)\n return self.get_subresource_instances(uid, instance=messages,\n resource=\"messages\", params=kwargs)", "def messages(self):\n return self._messages", "def messages(self):\n return self._messages", "def messages(self):\n return self._messages", "def get_messages(self):\r\n return self.messages", "def get_period_of_monitoring(self):\n return int(self.config['period_of_monitoring_seconds'])", "def distance_miles(self):\n return self._distance_miles", "def media_duration(self):\n return self._table.active_track_total_time.total_seconds()", "def get_num_messages_daily(self, date):\n cursor = self.get_cursor()\n end_date = date + relativedelta(days=1)\n query = 'SELECT count(*) AS num FROM messages WHERE created_on > %s AND created_on < %s'\n cursor.execute(query, (date, end_date))\n count = cursor.fetchall()\n return count[0]['num']", "def get_messages(self):\n res = self.conn.cursor().execute(\"SELECT * FROM messages\")\n return res.fetchall()", "def message_count(self) -> int:\n return len(self._leased_messages)", "def getPostedMessages(cls):\n with cls.messageLock:\n postedMessages = cls.postedMessages\n cls.postedMessages = \"\"\n return postedMessages", "def AMPMMinutes(self):\n return '%2.2d:%2.2d %s' % (self._pmhour, self._minute, self._pm)", "def message_box_size_limit(self) -> ConfigNodePropertyInteger:\n return self._message_box_size_limit", "def hero_damage_per_min(self):\n return self._hero_damage_per_min", "def mass(self):\n return _cantera.reactor_mass(self.__reactor_id)", "def logged_messages(self):\n return self._logged_messages", "def to_length_secs(self):\n return (self.bpm / 60.0) / self.period" ]
[ "0.686777", "0.6715453", "0.5897515", "0.57897204", "0.5508112", "0.5492951", "0.5468848", "0.5431342", "0.5427984", "0.54239345", "0.5376645", "0.5357198", "0.52036357", "0.5193324", "0.5188115", "0.51773685", "0.51770973", "0.51570386", "0.51300323", "0.512672", "0.51112086", "0.51089203", "0.51057315", "0.510142", "0.5084566", "0.50815916", "0.4983262", "0.49818733", "0.49705178", "0.49692985", "0.49594393", "0.4936416", "0.49113664", "0.49007887", "0.48960784", "0.48887283", "0.48860237", "0.48809436", "0.48794237", "0.48676345", "0.4864417", "0.48524383", "0.4846793", "0.4845425", "0.4845425", "0.4844216", "0.48425615", "0.48260707", "0.48223105", "0.4815491", "0.48086587", "0.47817585", "0.47817585", "0.47817585", "0.477398", "0.47514492", "0.47492796", "0.4745965", "0.47440556", "0.4740454", "0.47367862", "0.47350234", "0.47336626", "0.473345", "0.471222", "0.47092018", "0.47008502", "0.46873504", "0.46873504", "0.4686423", "0.46740216", "0.4661743", "0.46573666", "0.4657264", "0.46532744", "0.46348697", "0.46332628", "0.46281663", "0.46184954", "0.46183777", "0.46183777", "0.46161348", "0.46141118", "0.4603115", "0.4603115", "0.4603115", "0.45988655", "0.45950678", "0.45899814", "0.4584164", "0.4578698", "0.45768303", "0.4576068", "0.4575742", "0.45733887", "0.45726955", "0.45710167", "0.45558456", "0.45508826", "0.45503753" ]
0.8563438
0
Sets the messages_per_minute of this MessagingCampaign. How many messages this messaging campaign will send per minute.
def messages_per_minute(self, messages_per_minute): self._messages_per_minute = messages_per_minute
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def messages_per_minute(self):\n return self._messages_per_minute", "def set_words_per_minute(self, words_per_minute):\n is_valid_wpm = 5.0 <= words_per_minute <= 60.0\n if is_valid_wpm:\n self.words_per_minute = words_per_minute\n self.dot_time_in_msec = 1200.0 / self.words_per_minute\n # Synthesizes sample data for the current dot length.\n self._cache_dot_dash_sample_data()\n self._cache_silence_sample_data()\n return is_valid_wpm", "def set_limit_per_second(self, rate_limit_per_second):\n pass", "def kills_per_min(self, kills_per_min):\n\n self._kills_per_min = kills_per_min", "def set_fan_timer_duration(self, minutes: int = 5):\r\n self._fan_timer_duration = timedelta(minutes=minutes)\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"fan_timer_duration\", minutes))", "def drive_time_minutes(self, drive_time_minutes):\n\n self._drive_time_minutes = drive_time_minutes", "def message_count_limit(self, message_count_limit: ConfigNodePropertyInteger):\n\n self._message_count_limit = message_count_limit", "def cooldown_minutes(self, cooldown_minutes):\n\n self._cooldown_minutes = cooldown_minutes", "def report_minute_distribution(self):\n self.histogram_granularities.add(histogram_granularity.MINUTE)\n return self", "def run_for_mins(bot, nr_mins):\n for i in range(1, nr_mins+1):\n time.sleep(60)\n bot.send_msg('It has been {} minutes.'.format(i))", "def query_plans_per_minute(self) -> int:\n return pulumi.get(self, \"query_plans_per_minute\")", "def length_minutes(self, length_minutes):\n \n self._length_minutes = length_minutes", "def get_words_per_minute(self):\n return self.words_per_minute", "def set_minute(self, minute):\n if minute not in range(60):\n raise ValueError(\"Second value must be in range [0..59] but is {}\".format(minute))\n\n # First we separate the tens and the digit\n tens, digit = divmod(int(minute), 10)\n\n # Then we add them in a single int\n reg_value = (tens << 4) | digit\n\n # The we add it to the register\n self.__write_register(_REGISTER_MINUTE, reg_value)", "def set_custom_speed(self, bytes_per_second):\n self._custom_speed = bytes_per_second", "def time_remaining_minutes(self, time_remaining_minutes):\n\n self._time_remaining_minutes = time_remaining_minutes", "def message_count(self, message_count):\r\n\r\n self._message_count = message_count", "def message_box_size_limit(self, message_box_size_limit: ConfigNodePropertyInteger):\n\n self._message_box_size_limit = message_box_size_limit", "def set_mem_per_proc(self, mem_mb):\n QueueAdapter.set_mem_per_proc(self, mem_mb)\n #self.qparams[\"mem\"] = self.mem_per_proc", "def set_message_rate(self, msg_type, rate):\n pass", "def per_page(self, per_page):\n\n self._per_page = per_page", "def set_max_message_size(self, size: int = 1_073_741_824) -> None:\n self.set_db_conf(\"proto-max-bulk-len\", str(size))", "def rate_per_unit(self, rate_per_unit):\n\n self._rate_per_unit = rate_per_unit", "def TimeMinutes(self):\n return '%2.2d:%2.2d' % (self._hour, self._minute)", "def freq_minutes(self):\n return 5", "def every_minute(self, time, function, args=None, kwargs=None, name=None):\n if args is None:\n args = list()\n if kwargs is None:\n kwargs = dict()\n if name is None:\n name = function.__name__+(f'_{len(self.config)+1}' if function.__name__ in self.config else '')\n self.config[name] = {'mode':'every_minute', 'time':int(time), 'function':function, 'args':args, \n 'kwargs':kwargs, 'execute_num':0, 'runner':(function, args, kwargs, name),\n 'time_init':datetime.datetime.now()}\n self.params.tracker_dict[name] = dict()", "def cooldown_grace_period_minutes(self, cooldown_grace_period_minutes):\n\n self._cooldown_grace_period_minutes = cooldown_grace_period_minutes", "def set_Minute(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Minute', value)", "def hero_damage_per_min(self, hero_damage_per_min):\n\n self._hero_damage_per_min = hero_damage_per_min", "def setMinute(self, *args):\n return _libsbml.Date_setMinute(self, *args)", "def _setMassFlowRate(self, mdot):\n if self._verbose:\n print self._name+': setting mdot to '+`mdot`+' kg/s'\n if type(mdot) == types.InstanceType:\n self.setFunction(mdot)\n else:\n _cantera.flowdev_setMassFlowRate(self.flowdev_id(), mdot)", "def minute(self) -> int:\r\n return self._minute", "def set(self, mdot = 0.0):\n self._setMassFlowRate(mdot)", "def vms_every(self, vms_every):\n\n self._vms_every = vms_every", "def set_ticks_per_second(self, ticks_per_second: Optional[int] = None) -> None:\n if ticks_per_second is not None:\n self.ticks_per_second = ticks_per_second\n self.clock = pygame.time.Clock()\n else:\n self.ticks_per_second = None\n self.clock = None", "def AMPMMinutes(self):\n return '%2.2d:%2.2d %s' % (self._pmhour, self._minute, self._pm)", "def message_count_limit(self) -> ConfigNodePropertyInteger:\n return self._message_count_limit", "def set_update_interval(self, interval_ms):\n self._update_interval_ms = interval_ms", "def mass(self, mass):\n\n self._mass = mass", "def mass(self, mass):\n\n self._mass = mass", "def mass(self, mass):\n\n self._mass = mass", "def get_limit_per_second(self):\n pass", "def setBlockMassParams(self):\n for b in self.getBlocks():\n b.p.kgHM = b.getHMMass() / units.G_PER_KG\n b.p.kgFis = b.getFissileMass() / units.G_PER_KG\n b.p.puFrac = (\n b.getPuMoles() / b.p.molesHmBOL if b.p.molesHmBOL > 0.0 else 0.0\n )", "def get_minutes(self, datetime):\n return datetime.hour*60.0+datetime.minute+datetime.second/60", "def set_sleep_time(self, milliseconds:int):\n self.send_command(f\"configure mainLoopSleepTime {milliseconds}\")", "def xp_per_min(self, xp_per_min):\n\n self._xp_per_min = xp_per_min", "def shots_per_game(self, shots_per_game):\n\n self._shots_per_game = shots_per_game", "def goals_per_game(self, goals_per_game):\n\n self._goals_per_game = goals_per_game", "def set_mem_per_proc(self, mem_mb):\n super(SGEAdapter, self).set_mem_per_proc(mem_mb)\n self.qparams[\"mem_per_slot\"] = str(int(self.mem_per_proc)) + \"M\"", "def minutes(self):\n return int(int(self) / 60)", "def calculate_fetch_size(minutes: int):\n return round(minutes / CONF.interval) if minutes >= CONF.interval else 1", "def donations_per_week(self, donations_per_week):\n\n self._donations_per_week = donations_per_week", "def clock_speed(self, clock_speed):\n\n self._clock_speed = clock_speed", "async def _msgvote_interval(self, ctx, interval: int):\n\n if 1 <= interval <= 60:\n self.settings[\"interval\"] = interval\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"I will check each message's votes every \"\n \"{} seconds.\".format(interval))\n else:\n await self.bot.say(\"Invalid interval. Must be an integer \"\n \"between 1-60.\")", "def message_properties(self, message_properties: ConfigNodePropertyArray):\n\n self._message_properties = message_properties", "def set_per_page(self, per_page):\n\n\t\tif per_page is not None and not isinstance(per_page, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: per_page EXPECTED TYPE: int', None, None)\n\t\t\n\t\tself.__per_page = per_page\n\t\tself.__key_modified['per_page'] = 1", "def _broadcast_message_to_users(self, message):\n self.logger.info(f\"Broadcasting message `{message}`\")\n for id, name in self.users.items():\n time.sleep(.1) # Telegram servers does not let you send more than 30 messages per second\n try:\n self.updater.bot.sendMessage(int(id), message)\n\n except BaseException as e:\n traceback.print_exc()\n self.logger.info(f'Failed to broadcast message to {name} due to {e}')", "def gold_per_min(self, gold_per_min):\n\n self._gold_per_min = gold_per_min", "def set_mem_per_proc(self, mem_mb):\n super(PbsProAdapter, self).set_mem_per_proc(mem_mb)\n #self.qparams[\"mem\"] = self.mem_per_proc", "def send_mass_messages(self, recipient_list, sender, message=\"\", subject=\"\"):\n try:\n for s in recipient_list:\n self.send_message(to=s, sender=sender, message=message, subject=subject)\n except TypeError:\n return -1\n return 1", "def unit_ms(self):\n return (self.time_base / 1000.0) / 60.0", "def setMessage(self, message):\n self.message = str(message)\n self.app.processEvents()\n QtCore.QTimer(self).singleShot(5,self._messageDelayed)\n self.app.processEvents()", "def max_scans_per_month(self, max_scans_per_month):\n\n self._max_scans_per_month = max_scans_per_month", "def send_messages(self, bot, update, messages):\n\n for msg in messages:\n self.send_message(bot, update, msg)", "def _retry_send_messages():\n\n max_retry_value = getattr(settings, \"DJMAIL_MAX_RETRY_NUMBER\", 3)\n queryset = models.Message.objects.filter(status=models.STATUS_FAILED)\\\n .filter(retry_count__lte=max_retry_value)\\\n .order_by(\"-priority\", \"created_at\")\n\n connection = _get_real_backend()\n paginator = Paginator(list(queryset), getattr(settings, \"DJMAIL_MAX_BULK_RETRY_SEND\", 10))\n\n for page_index in paginator.page_range:\n connection.open()\n for message_model in paginator.page(page_index).object_list:\n email = message_model.get_email_message()\n sended = connection.send_messages([email])\n\n if sended == 1:\n message_model.status = models.STATUS_SENT\n message_model.sent_at = timezone.now()\n else:\n message_model.retry_count += 1\n\n message_model.save()\n\n connection.close()", "def modify_minute_job_schedule(self):\n job_schedule_modify_minute = netapp_utils.zapi\\\n .NaElement.create_node_with_children(\n 'job-schedule-cron-modify',\n **{'job-schedule-name': self.name})\n job_schedule_modify_minute.add_node_with_children(\n 'job-schedule-cron-minute',\n **{'cron-minute': str(self.job_minutes)})\n try:\n self.server.invoke_successfully(job_schedule_modify_minute,\n enable_tunneling=True)\n except netapp_utils.zapi.NaApiError as error:\n self.module.fail_json(msg='Error modifying job schedule %s: %s'\n % (self.name, to_native(error)),\n exception=traceback.format_exc())", "def setMinutesOffset(self, *args):\n return _libsbml.Date_setMinutesOffset(self, *args)", "def send_reminders(self, send_reminders):\n\n self._send_reminders = send_reminders", "def hero_healing_per_min(self, hero_healing_per_min):\n\n self._hero_healing_per_min = hero_healing_per_min", "def set_timelimit(self, timelimit):\n self._timelimit = timelimit", "def sms_enabled(self, sms_enabled):\n\n self._sms_enabled = sms_enabled", "def media_images_content_length_min(self, media_images_content_length_min):\n\n self._media_images_content_length_min = media_images_content_length_min", "def messages(self, messages):\n\n self._messages = messages", "def messages(self, messages):\n\n self._messages = messages", "def messages(self, messages):\n\n self._messages = messages", "def scheduled_reset_period(self, scheduled_reset_period):\n\n self._scheduled_reset_period = scheduled_reset_period", "def mass_per_bin(self, time_edges, sample_rate=25):\n\n return mass_per_bin(self._sfh_calculator, time_edges, sample_rate=sample_rate)", "def com_adobe_aem_screens_player_pingfrequency(self, com_adobe_aem_screens_player_pingfrequency: ConfigNodePropertyInteger):\n\n self._com_adobe_aem_screens_player_pingfrequency = com_adobe_aem_screens_player_pingfrequency", "def set_update_rate(self, delay_ms):\n self._log_msg_start(\"Setting NMEA message update rate\")\n self._ubx.send(\"CFG-RATE\", measRate=delay_ms, navRate=1, timeRef=1)", "def media_videos_count_min(self, media_videos_count_min):\n\n self._media_videos_count_min = media_videos_count_min", "def messages(self, messages):\n self._messages = messages", "def win_lead_second_per(self, win_lead_second_per):\n\n self._win_lead_second_per = win_lead_second_per", "def media_images_count_min(self, media_images_count_min):\n\n self._media_images_count_min = media_images_count_min", "def convert_to_minutes(s):\r\n m = math.floor(s / 60)\r\n s -= m * 60\r\n return '%dm %ds' % (m, s)", "def def_mass(self,mass):\n\n self.mass=float(mass)", "def vm_templates_every(self, vm_templates_every):\n\n self._vm_templates_every = vm_templates_every", "def set_message_class(self, message_class):\r\n self.message_class = message_class", "def getNumOfMsgSend_interval(self):\n return self.MsgSendCount_interval", "def _set_message(self, value):\n self.__message = value", "def kills_per_min(self):\n return self._kills_per_min", "def com_adobe_aem_screens_device_pasword_minlength(self, com_adobe_aem_screens_device_pasword_minlength: ConfigNodePropertyInteger):\n\n self._com_adobe_aem_screens_device_pasword_minlength = com_adobe_aem_screens_device_pasword_minlength", "def set_mass(self, mass):\n _pal.lib.geometry_set_mass(self._geometry, c.c_float(mass))", "def setMessage(self, message):\n self._message = message", "def send_all(self, msg):\n self.update_chats()\n for c in self.chats:\n self.send_message(msg, c)", "def total_mass(self, total_mass):\n\n self._total_mass = total_mass", "def convert_to_minutes(s):\n m = math.floor(s / 60)\n s -= m * 60\n return '%dm %ds' % (m, s)", "def set_mem_per_proc(self, mem_mb):\n # Hack needed because abinit is still not able to estimate memory.\n # COMMENTED by David.\n # This is not needed anymore here because the \"hack\" is performed directly in select_qadapter/_use_qadpos_pconf\n # methods of TaskManager. Moreover, this hack should be performed somewhere else (this part should be\n # independent of abinit ... and if we want to have less memory than the average memory available per node, we\n # have to allow it!)\n #if mem_mb <= self.min_mem_per_proc: mem_mb = self.min_mem_per_proc\n self._mem_per_proc = int(mem_mb)", "async def set_timer(self, ctx: commands.Context, seconds: int = None):\n if seconds is not None:\n await ctx.cfg_guild.autopostseconds.set(seconds)\n await ctx.send(\"Auto-post timer has been set to {}\".format(seconds))\n else:\n seconds = await ctx.cfg_guild.autopostseconds()\n await ctx.send(f\"Currently posting every {seconds} seconds.\")", "def message_uris(self, message_uris):\n\n self._message_uris = message_uris", "def plural(self, plural):\n self._plural = plural" ]
[ "0.66696775", "0.58786", "0.5668301", "0.5376296", "0.5365551", "0.52839804", "0.51178545", "0.5013213", "0.49938592", "0.47783017", "0.4773923", "0.4772288", "0.4751765", "0.47004074", "0.46891505", "0.46256608", "0.460433", "0.45917523", "0.45578098", "0.45366237", "0.45331538", "0.4500748", "0.44628927", "0.44518393", "0.4441813", "0.4435244", "0.44014743", "0.43995833", "0.43952876", "0.4350517", "0.42840827", "0.42835185", "0.42611307", "0.42582124", "0.4244814", "0.42306513", "0.422961", "0.42271838", "0.42238158", "0.42238158", "0.42238158", "0.42117727", "0.42088538", "0.42005968", "0.41903552", "0.41830188", "0.41761917", "0.41705582", "0.4166886", "0.41285032", "0.41190338", "0.4111708", "0.4099543", "0.4091126", "0.40744433", "0.4070522", "0.4065923", "0.4060147", "0.40557572", "0.40530244", "0.40452552", "0.40252185", "0.40251794", "0.40191612", "0.4017807", "0.40139532", "0.40120998", "0.40059885", "0.4005041", "0.3995813", "0.39937449", "0.39851567", "0.3978182", "0.3978182", "0.3978182", "0.39714497", "0.39695466", "0.39660785", "0.3955293", "0.39539385", "0.3947131", "0.3945006", "0.39373374", "0.39330167", "0.39320448", "0.39261702", "0.39251584", "0.39226955", "0.39052388", "0.39000493", "0.38994753", "0.38965878", "0.38908586", "0.38897446", "0.38889816", "0.38884825", "0.38875988", "0.38773885", "0.3875353", "0.3873965" ]
0.87528765
0
Gets the errors of this MessagingCampaign. A list of current error conditions associated with this messaging campaign.
def errors(self): return self._errors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_errors(self):\n return [result for result in self.values() if result.outcome == Result.ERROR]", "def Errors(self):\n return self._get_attribute('errors')", "def getErrorsList(self):\n return self.__errors", "def errors (self):\n return self._errors", "def errors (self):\n return self._errors", "def getErrors(self):\n return self.errors", "def errors(self):\n return self.__errors", "def errors(self) -> List[Error]:\n return self._errors_files + list(self._errors.values())", "def errors(self):\r\n if not hasattr(self, '_errors_cache'):\r\n self._errors_cache = self.form.get_field_errors(self)\r\n return self._errors_cache", "def get_errors(self, request):\n\n value = request._get_parameter_value(self)\n return value.errors", "def Errors(self):\r\n\t\treturn self._get_attribute('errors')", "def errors(self):\n return self._properties.get(\"errors\")", "def errors(self) -> Tuple[MqexsErrorInfo, ...]:\n return self.__errors", "def retrieve_error_messages(self):\n return self.errors_seen[:]", "def validation_errors(self):\n return self._validation_errors", "def errors(self) -> pulumi.Output[Sequence['outputs.BatchAIErrorResponse']]:\n return pulumi.get(self, \"errors\")", "def get_validation_errors(self):\n return [err.to_dict() for err in self._schema.validator.validation_errors]", "def errors(self):\n _errors = {}\n # pylint: disable=no-member\n for name, field in self._fields.items():\n if field.errors:\n _errors[name] = field.errors.pop()\n\n return _errors", "def all_errors(self) -> List[XMLSchemaParseError]:\n errors = []\n for comp in self.iter_components():\n if comp.errors:\n errors.extend(comp.errors)\n return errors", "def get_errors(self):\n df = self.get_events()\n return df[df.error.notnull()]", "def errors(self) -> List[Error]:\n # May have inherited errors with a different path.\n for error in self._errors.values():\n error.path = self.path\n if self.is_removed: # Mark all of our errors as non-persistant.\n error.is_persistant = False\n return list(self._errors.values())", "def get_error_messages(self):\n\n if len(self._sensor_results_list) == 0:\n return\n\n error_msgs = []\n\n for reading in self._sensor_results_list:\n if reading.is_error():\n error_msgs.append(reading.get_error_msg())\n\n if len(error_msgs) > 0:\n return error_msgs\n else:\n return \"No Error Readings\"", "def analysis_errors(self) -> str:\n errors = []\n\n # Get any callback errors\n for cid, callback in self._analysis_callbacks.items():\n if callback.status == AnalysisStatus.ERROR:\n errors.append(f\"\\n[Analysis Callback ID: {cid}]: {callback.error_msg}\")\n\n return \"\".join(errors)", "def job_errors(self) -> str:\n errors = []\n\n # Get any job errors\n for job in self._jobs.values():\n if job and job.status() == JobStatus.ERROR:\n if hasattr(job, \"error_message\"):\n error_msg = job.error_message()\n else:\n error_msg = \"\"\n errors.append(f\"\\n[Job ID: {job.job_id()}]: {error_msg}\")\n\n # Get any job futures errors:\n for jid, fut in self._job_futures.items():\n if fut and fut.done() and fut.exception():\n ex = fut.exception()\n errors.append(\n f\"[Job ID: {jid}]\"\n \"\\n\".join(traceback.format_exception(type(ex), ex, ex.__traceback__))\n )\n return \"\".join(errors)", "def error_data(self):\n\n if not self.__settings:\n return []\n\n return self.__transaction_errors", "def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors", "def errors(self) -> List[Error]:", "def getBuildErrors(self):\n return [x for x in self.xeps if x.buildErrors]", "def errors(self) -> str:\n return self.job_errors() + self.analysis_errors()", "def getErrors(self) -> java.util.Collection:\n ...", "def error(self) -> list:\n return self.__err", "def errors():\n return THE_LOGGER.errors", "def report_transaction_error_messages(self):\n response = self.__get_transaction_response()\n\n # get response data from response object\n response_data = response.json()\n\n # get error messages\n response_error = response_data['Error']\n response_error_messages = response_error['messages']\n\n # add all error messages to the report\n error_messages_to_report = []\n for response_error_message in response_error_messages:\n error_description = response_error_message['description']\n error_messages_to_report.append(error_description)\n\n return error_messages_to_report", "def error_messages(self) -> List[str]:\n spatial_msgs = []\n temporal_msgs = []\n if self.spatial:\n spatial_msgs = [m for v, m in self.spatial_validations if not v(self.spatial)]\n if self.temporal:\n temporal_msgs = [m for v, m in self.temporal_validations if not v(self.temporal)]\n\n return spatial_msgs + temporal_msgs", "def getErrorLog(self):\n return _libsbml.SBMLValidator_getErrorLog(self)", "def get_errors(self):\n errors = []\n\n if not self.title:\n msg = 'Title not found: {0}'.format(self.number)\n print(msg)\n errors.append(msg)\n\n if not self.ref:\n msg = 'Ref not found: {0}'.format(self.number)\n print(msg)\n errors.append(msg)\n\n chapter_index = int(self.number) - 1\n\n # get the expected number of frames for this chapter\n expected_frame_count = chapters_and_frames.frame_counts[chapter_index]\n\n for x in range(1, expected_frame_count + 1):\n\n # frame id is formatted like '01-01'\n frame_id = '{0}-{1}'.format(self.number.zfill(2), str(x).zfill(2))\n\n # get the next frame\n frame = next((f for f in self.frames if f['id'] == frame_id), None) # type: dict\n if not frame:\n msg = 'Frame not found: {0}'.format(frame_id)\n print(msg)\n errors.append(msg)\n else:\n # check the frame img and values\n if 'img' not in frame or not frame['img']:\n msg = 'Attribute \"img\" is missing for frame {0}'.format(frame_id)\n print(msg)\n errors.append(msg)\n\n if 'text' not in frame or not frame['text']:\n msg = 'Attribute \"text\" is missing for frame {0}'.format(frame_id)\n print(msg)\n errors.append(msg)\n\n return errors", "def GetAll(self):\n return self._errors.copy()", "def getErrors(self):\n errorList = []\n\n # E0\n try:\n if not self.e0.isValid():\n errorList.append(\"Invalid first error axis in ErrorEllipse Class\")\n except (NameError, AttributeError):\n errorList.append(\"No first error axis in ErrorEllipse Class.\")\n\n # E1\n try:\n if not self.e1.isValid():\n errorList.append(\"Invalid second error axis in ErrorEllipse Class\")\n except (NameError, AttributeError):\n errorList.append(\"No second error axis in ErrorEllipse Class.\")\n\n # E2\n try:\n if not self.e2.isValid():\n errorList.append(\"Invalid third error axis in ErrorEllipse Class\")\n except (NameError, AttributeError):\n errorList.append(\"No third error axis in ErrorEllipse Class.\")\n\n # maximumHorizontalProjection\n try:\n self.maximumHorizontalProjection\n except (NameError, AttributeError):\n errorList.append(\"No MaximumHorizontalProjection in ErrorEllipse Class.\")\n\n # maximumVerticalProjection\n try:\n self.maximumVerticalProjection\n except (NameError, AttributeError):\n errorList.append(\"No MaximumVerticalProjection in ErrorEllipse Class\")\n\n # equivalentHorizontalRadius\n try:\n self.equivalentHorizontalRadius\n except (NameError, AttributeError):\n errorList.append(\"No EquivalentHorizontalRadius in ErrorEllipse class\")\n\n return errorList", "def get_form_errors(form):\n all_errors = []\n for field in form.errors:\n all_errors += form.errors[field]\n return all_errors", "def get_error(self) -> List[str]:\n return []", "def get_error(self) -> List[str]:\n return []", "def getNumErrors(self):\n return _libsbml.XMLErrorLog_getNumErrors(self)", "def security_errors(self):\n errors = ErrorDict()\n for f in [\"honeypot\", \"timestamp\", \"security_hash\"]:\n if f in self.errors:\n errors[f] = self.errors[f]\n return errors", "def get_render_errors(self, revision_id):\n url = DeckhandClient.get_path(\n DeckhandPaths.RENDERED_REVISION_DOCS\n ).format(revision_id)\n\n errors = []\n\n LOG.debug(\"Retrieving rendered docs checking for validation messages\")\n response = self._get_request(url)\n if response.status_code >= 400:\n err_resp = yaml.safe_load(response.text)\n errors = err_resp.get('details', {}).get('messageList', [])\n if not errors:\n # default message if none were specified.\n errors.append({\n \"error\": True,\n \"message\": (\"Deckhand has reported an error but did not \"\n \"specify messages. Response: {}\".format(\n response.text))})\n return errors", "def get_errors(response):\n errors = response.get(\"error\")\n if errors:\n return [e.get(\"message\") for e in errors]\n return None", "def errors(self):\n return self.args[1]", "def errors(self):\n raise NotImplementedError", "def getParseErrors(self):\n return [x for x in self.xeps if x.parseErrors]", "def get_field_errors(self, field):\r\n identifier = format_html('{0}.{1}', self.form_name, field.name)\r\n return self.error_class([SafeTuple((identifier, '$pristine', '$pristine', 'invalid', e))\r\n for e in self.errors.get(field.name, [])])", "def get_all_failures(self):\n return self._get_filtered_results(success=False)", "def errors(self):\n\n dict = {\"Stellar Mass Error\":[self.st_masserr1,self.st_masserr2],\n \"Stellar Radius Error\":[self.st_raderr1,self.st_raderr2]}\n\n return dict", "def get_errors(self, path: str,\n is_ancillary: bool = False,\n is_system: bool = False,\n is_removed: bool = False) -> List[str]:\n u_file = self.__api.files.get(path, is_ancillary=is_ancillary,\n is_system=is_system,\n is_removed=is_removed)\n return [e.message for e in u_file.errors]", "def get_encoding_errors(self):\n return self._encoding_errors", "def get_errors(self):\n return {'loss': self.loss.data[0]}", "def date_errors(self):\r\n try:\r\n _date_errors = self._validate_date(self.sourceDateCol)\r\n return _date_errors\r\n except:\r\n return None", "def formatErrors(self):\n errorlist = []\n xepsWithErrors = sorted(\n set(self.getParseErrors() + self.getBuildErrors()),\n key=lambda x: str(x))\n if self.getErrors() or xepsWithErrors:\n if self.getErrors():\n errorlist.append(\"********** Read errors **********\")\n for error in self.getErrors():\n errorlist.append(error)\n for xep in xepsWithErrors:\n errorlist.append(\n \"********** Error report for {} **********\".format(str(xep)))\n if xep.parseErrors:\n errorlist.append(\"********** Parsing Errors **********\")\n errors = list(set(xep.parseErrors))\n for error in errors:\n errorlist.append(error)\n if xep.buildErrors:\n errorlist.append(\"********** Build Errors **********\")\n for error in xep.buildErrors:\n if len(error.splitlines()) > 4:\n error = ''.join(error.splitlines()[:4])\n errorlist.append(error)\n return '\\n'.join(errorlist)\n else:\n return None", "def error_details(self):\n return self._error_details", "def errors_fatal(self) -> List[Error]:\n return self._errors_fatal_files + self._errors_fatal", "def error_map(self):\n return self._error_map", "def getErrorLog(self):\n return _libsbml.XMLInputStream_getErrorLog(self)", "def getErrorMessage(self):\n return self._errorMessage", "def failed_messages(self, namespace, queue):\n failed = []\n for m in self.messages(namespace, queue):\n if m.error:\n failed.append(m)\n return failed", "def get_aggregated_exceptions(self) -> Payload:\n return Payload(aggregated_errors=list(self._aggregated_exceptions.values()))", "def _get_resp_body_errors(self):\n\n if self._resp_body_errors and len(self._resp_body_errors) > 0:\n return self._resp_body_errors\n\n errors = []\n warnings = []\n resp_codes = []\n\n if self.verb is None:\n return errors\n\n dom = self.response.dom()\n if dom is None:\n return errors\n\n for e in dom.findall('Errors'):\n eSeverity = None\n eClass = None\n eShortMsg = None\n eLongMsg = None\n eCode = None\n\n try:\n eSeverity = e.findall('SeverityCode')[0].text\n except IndexError:\n pass\n\n try:\n eClass = e.findall('ErrorClassification')[0].text\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n except IndexError:\n pass\n\n try:\n eShortMsg = smart_encode(e.findall('ShortMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eLongMsg = smart_encode(e.findall('LongMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n if int(eCode) not in resp_codes:\n resp_codes.append(int(eCode))\n except IndexError:\n pass\n\n msg = str(\"Class: {eClass}, Severity: {severity}, Code: {code}, {shortMsg} {longMsg}\") \\\n .format(eClass=eClass, severity=eSeverity, code=eCode, shortMsg=eShortMsg,\n longMsg=eLongMsg)\n\n # from IPython import embed; embed()\n\n if eSeverity == 'Warning':\n warnings.append(msg)\n else:\n errors.append(msg)\n\n self._resp_body_warnings = warnings\n self._resp_body_errors = errors\n self._resp_codes = resp_codes\n\n if self.config.get('warnings') and len(warnings) > 0:\n log.warn(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(warnings)))\n\n if self.response.reply.Ack == 'Failure':\n if self.config.get('errors'):\n log.error(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(errors)))\n\n return errors\n\n return []", "def get_errors(self, response: response_domain_model.Response, question_code: str) -> Sequence['ValidationError']:\n ...", "def _find_errors(self, new_response: response_domain_model.Response):\n self._reset_child_state()\n for response in self._responses_to_replay:\n for question_code, conditional in self.requirements.items():\n uncaught_errors = conditional.get_errors(response, question_code=question_code)\n if uncaught_errors:\n # Should not be able to get errors on previously passed responses\n # (invalid answers should be ignored when checking conditionals)\n raise Exception('Invalid answers found in previously checked responses')\n\n new_errors = []\n for question_code, conditional in self.requirements.items():\n new_errors.extend(conditional.get_errors(new_response, question_code=question_code))\n\n if new_errors:\n # Recurse again until there are no new errors found\n new_errors.extend(self._find_errors(new_response))\n\n return new_errors", "def get_error_count(self):\n return sum(1 for outcome in (r.outcome for r in self.values()) if outcome == Result.ERROR)", "def invalid_values(self):\n\n return self._get_value(self.invalid_values_provider)", "def getErrorReport(self):\n return self.sError;", "def getErrorReport(self):\n return self.sError;", "def filter_draft_errors(result):\n error_messages = []\n for field, msgs in result.get('messages', {}).items():\n if msgs.get('state', None) == 'error':\n for m in msgs['messages']:\n error_messages.append(dict(\n field=field,\n message=m,\n code=error_codes['validation_error'],\n ))\n return error_messages", "def getError(self):\n \n return self.resp[\"error\"]", "def error_count(self):\n return len(self.errors)", "def _get_errors(sheet, row, col):\n field = _FIELDS['primary data']\n val = sheet.cell(row + field['row'], col + field['column']).value\n if not val:\n return []\n final_row = row + field['row']\n error = sheet.cell(final_row, col + field['column']).value\n while error:\n final_row += 1\n error = sheet.cell(final_row, col + field['column']).value\n return [sheet.cell(i, col + field['column']).value\n for i in range(row + field['row'], final_row)]", "def tasks_with_errors(self):\n errs = []\n while True:\n try:\n errs.append(self._errq.get_nowait())\n except Queue.Empty:\n break\n return errs", "def getErrorMessage(self):\n return self._message", "def CalculateErrors(self, X, D):\n Y = self.Forward(X)\n self.errors = len([Y[i] for i in range(len(Y)) if Y[i] != D[i]])\n return self.errors", "def grammarErrors(self, text, language):\n if not self.__isValidInput(text):\n return []\n\n textUnicode = unicode_str(text)\n errorList = []\n offset = 0\n for paragraph in textUnicode.split(\"\\n\"):\n errorList = errorList + self.__grammarParagraph(paragraph, offset, language)\n offset = offset + len(paragraph) + 1\n return errorList", "def sex_errors(self):\n result = {}\n disable_sex_check = app_settings.get_app_setting(\n \"variants\", \"disable_pedigree_sex_check\", project=self\n )\n if disable_sex_check:\n return result\n for case in self.case_set.all():\n result.update(case.sex_errors(disable_sex_check))\n return result", "def exception(self) -> exceptions.ErrorMessageException:\n\n return ErrorMessage.ERROR_CODES_TO_EXCEPTIONS.get(\n self.error_code,\n exceptions.GenericException\n )", "def get_error(self, idx=0):\n return self.portal.error_log.getLogEntries()[idx]", "def refined_errors(self):\r\n errs = []\r\n for err in self.errors:\r\n if err['typo'].lower() not in self.terms:\r\n errs.append(err)\r\n return errs", "def error_codes(self):\n self._sort_measurements()\n return self._error_codes", "def _pydantic_errors_to_validation_results(\n errors: list[dict | Exception] | ValidationError,\n file_path: Path,\n scope: Scope,\n) -> list[ValidationResult]:\n out = []\n for e in (\n errors.errors() if isinstance(errors, ValidationError) else cast(list, errors)\n ):\n if isinstance(e, Exception):\n message = getattr(e, \"message\", str(e))\n id = \"exception\"\n scope = Scope.FILE\n else:\n id = \".\".join(\n filter(\n bool,\n (\n \"dandischema\",\n e.get(\"type\", \"UNKNOWN\"),\n \"+\".join(e.get(\"loc\", [])),\n ),\n )\n )\n message = e.get(\"message\", e.get(\"msg\", None))\n out.append(\n ValidationResult(\n origin=ValidationOrigin(\n name=\"dandischema\",\n version=dandischema.__version__,\n ),\n severity=Severity.ERROR,\n id=id,\n scope=scope,\n path=file_path,\n message=message,\n # TODO? dataset_path=dataset_path,\n # TODO? dandiset_path=dandiset_path,\n )\n )\n return out", "def error_message(self):\n\n return self._error_message", "def get_errors_list(jobs_object, job_id):\n\n # Get the error keys of the concrete job ii\n jenkins_errors = jobs_object[\"jobsConfig\"][\"errorMsg\"]\n\n error_keys = jobs_object[\"jobsConfig\"][\"jenkinsJobs\"][job_id][\"errorType\"]\n\n # Get the error messages of the error keys\n error_list = []\n # We append the action to perform to the error message\n for ii in error_keys:\n if jenkins_errors[ii][\"action\"] == \"retryBuild\":\n for error in jenkins_errors[ii][\"errorStr\"]:\n error_list.append(error + \" - retryBuild\")\n elif jenkins_errors[ii][\"action\"] == \"nodeOff\":\n for error in jenkins_errors[ii][\"errorStr\"]:\n error_list.append(error + \" - nodeOff\")\n elif jenkins_errors[ii][\"action\"] == \"nodeReconnect\":\n for error in jenkins_errors[ii][\"errorStr\"]:\n error_list.append(error + \" - nodeReconnect\")\n else:\n print(\n \"Action not defined. Please define a valid action in \"\n + jobs_config_path\n )\n return error_list", "def get_errors(self, path: str,\n is_ancillary: bool = False,\n is_system: bool = False,\n is_removed: bool = False) -> List[str]:", "def field_errors(bound_field):\n seen = []\n errors = {}\n if hasattr(bound_field.field, \"fields\"):\n for idx, subfield in enumerate(bound_field.field.fields):\n key = \"%s_%d\" % (bound_field.auto_id, idx)\n subfield_errors = getattr(subfield.widget, \"errors\", [])\n errors[key] = subfield_errors\n seen.extend(subfield_errors)\n for error in bound_field.errors:\n if error not in seen:\n errors.setdefault(bound_field.auto_id, [])\n errors[bound_field.auto_id].append(error)\n return errors.items()", "def getExceptions(self):\n return self.getOrDefault(\"exceptions\")", "def GetFailures(self):\n return self._compute_client.all_failures", "def errorbars (self):\n return self._errorbars", "def warnings(self) -> List[Error]:\n return self._get_warnings()", "def check_errors(self):\n\n errors = []\n while True:\n err = self.values(\"SYST:ERR?\")\n if int(err[0]) != 0:\n errmsg = \"Agilent 5313xA: {0}: {1}\".format(err[0], err[1])\n log.error(errmsg + '\\n')\n errors.append(errmsg)\n else:\n break\n\n return errors", "def ErrorMessage(self):\n return self._errormessage", "def errorcaps (self):\n return self._errorcaps", "def get_errored_courses(self):\r\n return dict((k, self.errored_courses[k].errors) for k in self.errored_courses)", "def messages(self):\n return self._messages", "def messages(self):\n return self._messages", "def messages(self):\n return self._messages", "def query_error(self):\n return self.details[KEY_QUERY_ERROR]" ]
[ "0.7164323", "0.70850545", "0.7084039", "0.7073318", "0.7073318", "0.7032286", "0.69743216", "0.6840167", "0.6840085", "0.6816689", "0.68060446", "0.67511207", "0.66765666", "0.6626607", "0.65734386", "0.65553755", "0.65435886", "0.6522403", "0.6402078", "0.63631475", "0.6350149", "0.63170594", "0.6312729", "0.63122153", "0.6284837", "0.62512636", "0.61444694", "0.6065811", "0.60224736", "0.6016319", "0.6014446", "0.5982093", "0.5935994", "0.5854258", "0.583771", "0.5777689", "0.5770109", "0.5731137", "0.5721682", "0.570453", "0.570453", "0.566765", "0.5632113", "0.5595717", "0.5565627", "0.5503202", "0.54932886", "0.5477905", "0.53716326", "0.53676915", "0.5365297", "0.53583497", "0.53526014", "0.5341042", "0.5331032", "0.53226525", "0.5310991", "0.5303637", "0.5286214", "0.5285876", "0.5285202", "0.52813214", "0.52592516", "0.52442986", "0.52141994", "0.52115345", "0.52092403", "0.5201769", "0.5199416", "0.5199416", "0.517958", "0.51787317", "0.5171295", "0.51709306", "0.51611435", "0.5154321", "0.5152182", "0.5130895", "0.50871825", "0.5076235", "0.5074987", "0.50728893", "0.5070535", "0.50689095", "0.50684375", "0.5067607", "0.5066021", "0.50616026", "0.50598854", "0.5041734", "0.50276005", "0.5024531", "0.5020144", "0.5019323", "0.50067776", "0.5000856", "0.4998048", "0.4998048", "0.4998048", "0.4997809" ]
0.7075878
3
Sets the errors of this MessagingCampaign. A list of current error conditions associated with this messaging campaign.
def errors(self, errors): self._errors = errors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def errors(self, errors):\n\n self._errors = errors", "def validation_errors(self, validation_errors):\n self._validation_errors = validation_errors", "def add_errors(self, errors):\n self.errors = merge_errors(self.errors, errors)", "def errors (self):\n return self._errors", "def errors (self):\n return self._errors", "def errors(self):\n return self._errors", "def errors(self):\n return self.__errors", "def Errors(self):\n return self._get_attribute('errors')", "def errors(self):\r\n if not hasattr(self, '_errors_cache'):\r\n self._errors_cache = self.form.get_field_errors(self)\r\n return self._errors_cache", "def errors(self):\n return self._properties.get(\"errors\")", "def errors(self) -> Tuple[MqexsErrorInfo, ...]:\n return self.__errors", "def errors(self) -> pulumi.Output[Sequence['outputs.BatchAIErrorResponse']]:\n return pulumi.get(self, \"errors\")", "def getErrors(self):\n return self.errors", "def getErrorsList(self):\n return self.__errors", "def Errors(self):\r\n\t\treturn self._get_attribute('errors')", "def validation_errors(self):\n return self._validation_errors", "def errors(self):\n _errors = {}\n # pylint: disable=no-member\n for name, field in self._fields.items():\n if field.errors:\n _errors[name] = field.errors.pop()\n\n return _errors", "def error(self, msg, transfers):\n self.validation_exceptions.extend(self._create_exceptions(msg, transfers, ValidationType.ERROR))", "def errors(self) -> List[Error]:\n return self._errors_files + list(self._errors.values())", "def errors(self):\n raise NotImplementedError", "def set_limit(self, errors):\n self.limit = errors", "def errors(self) -> str:\n return self.job_errors() + self.analysis_errors()", "def on_errors(self, errors):\n log.error(\"Received errors: %s\", errors)", "def get_errors(self):\n return [result for result in self.values() if result.outcome == Result.ERROR]", "def get_errors(self, request):\n\n value = request._get_parameter_value(self)\n return value.errors", "def error_message(self, error_message):\n if (self.local_vars_configuration.client_side_validation and\n error_message is not None and len(error_message) > 500):\n raise ValueError(\"Invalid value for `error_message`, length must be less than or equal to `500`\") # noqa: E501\n\n self._error_message = error_message", "def retrieve_error_messages(self):\n return self.errors_seen[:]", "def report_transaction_error_messages(self):\n response = self.__get_transaction_response()\n\n # get response data from response object\n response_data = response.json()\n\n # get error messages\n response_error = response_data['Error']\n response_error_messages = response_error['messages']\n\n # add all error messages to the report\n error_messages_to_report = []\n for response_error_message in response_error_messages:\n error_description = response_error_message['description']\n error_messages_to_report.append(error_description)\n\n return error_messages_to_report", "def errors(self) -> List[Error]:", "def analysis_errors(self) -> str:\n errors = []\n\n # Get any callback errors\n for cid, callback in self._analysis_callbacks.items():\n if callback.status == AnalysisStatus.ERROR:\n errors.append(f\"\\n[Analysis Callback ID: {cid}]: {callback.error_msg}\")\n\n return \"\".join(errors)", "def check_set_errors(self):\n raise NotImplementedError(\"Implement it in a subclass.\")", "def get_validation_errors(self):\n return [err.to_dict() for err in self._schema.validator.validation_errors]", "def error_data(self):\n\n if not self.__settings:\n return []\n\n return self.__transaction_errors", "def errors(self) -> List[Error]:\n # May have inherited errors with a different path.\n for error in self._errors.values():\n error.path = self.path\n if self.is_removed: # Mark all of our errors as non-persistant.\n error.is_persistant = False\n return list(self._errors.values())", "def job_errors(self) -> str:\n errors = []\n\n # Get any job errors\n for job in self._jobs.values():\n if job and job.status() == JobStatus.ERROR:\n if hasattr(job, \"error_message\"):\n error_msg = job.error_message()\n else:\n error_msg = \"\"\n errors.append(f\"\\n[Job ID: {job.job_id()}]: {error_msg}\")\n\n # Get any job futures errors:\n for jid, fut in self._job_futures.items():\n if fut and fut.done() and fut.exception():\n ex = fut.exception()\n errors.append(\n f\"[Job ID: {jid}]\"\n \"\\n\".join(traceback.format_exception(type(ex), ex, ex.__traceback__))\n )\n return \"\".join(errors)", "def set_error(self, name, value):\n self.errors[name] = value", "def error_count(self, error_count):\n\n self._error_count = error_count", "async def flush_errors(self, errors):\n logger.error(errors)\n data = {\"output\": {\"errors\": errors}, \"state\": \"completed\", \"status\": \"error\"}\n await self.c_task.update(data)", "def errors(self):\n\n dict = {\"Stellar Mass Error\":[self.st_masserr1,self.st_masserr2],\n \"Stellar Radius Error\":[self.st_raderr1,self.st_raderr2]}\n\n return dict", "def setErrorMessage(self, errorMessage):\n self._errorMessage = errorMessage", "def mark_error(self):\r\n self.status = ERROR", "def error_message(self, error_message):\n\n self._error_message = error_message", "def _initialize_error_dictionaries(self):\n for task_id in self.task_ids.keys():\n self.training_errors[task_id] = []\n self.validation_errors[task_id] = []", "def errors_and_warnings(self, errors_and_warnings):\n\n self._errors_and_warnings = errors_and_warnings", "def error(self) -> list:\n return self.__err", "def errors_summary(self, errors_summary):\n\n self._errors_summary = errors_summary", "def all_errors(self) -> List[XMLSchemaParseError]:\n errors = []\n for comp in self.iter_components():\n if comp.errors:\n errors.extend(comp.errors)\n return errors", "def getNumErrors(self):\n return _libsbml.XMLErrorLog_getNumErrors(self)", "def setError(self,err):\n self.error = err", "def getErrors(self) -> java.util.Collection:\n ...", "def set_error(self, error):\n self._set_sub_text('error', text=str(error))\n return self", "def error_messages(self) -> List[str]:\n spatial_msgs = []\n temporal_msgs = []\n if self.spatial:\n spatial_msgs = [m for v, m in self.spatial_validations if not v(self.spatial)]\n if self.temporal:\n temporal_msgs = [m for v, m in self.temporal_validations if not v(self.temporal)]\n\n return spatial_msgs + temporal_msgs", "def error_message(self, error_message: str):\n\n self._error_message = error_message", "def _find_errors(self, new_response: response_domain_model.Response):\n self._reset_child_state()\n for response in self._responses_to_replay:\n for question_code, conditional in self.requirements.items():\n uncaught_errors = conditional.get_errors(response, question_code=question_code)\n if uncaught_errors:\n # Should not be able to get errors on previously passed responses\n # (invalid answers should be ignored when checking conditionals)\n raise Exception('Invalid answers found in previously checked responses')\n\n new_errors = []\n for question_code, conditional in self.requirements.items():\n new_errors.extend(conditional.get_errors(new_response, question_code=question_code))\n\n if new_errors:\n # Recurse again until there are no new errors found\n new_errors.extend(self._find_errors(new_response))\n\n return new_errors", "def email_error_report(self, digest_content, jats_content, error_messages):\n datetime_string = time.strftime(utils.DATE_TIME_FORMAT, time.gmtime())\n doi = None\n if digest_content:\n doi = digest_content.doi\n body_content = requests_provider.error_email_body_content(\n doi, jats_content, error_messages\n )\n body = email_provider.simple_email_body(datetime_string, body_content)\n author = None\n if digest_content:\n author = digest_content.author\n subject = requests_provider.error_email_subject_msid_author(\n \"digest\", digest_provider.get_digest_msid(digest_content), author\n )\n sender_email = self.settings.digest_sender_email\n\n recipient_email_list = email_provider.list_email_recipients(\n self.settings.digest_jats_error_recipient_email\n )\n\n messages = email_provider.simple_messages(\n sender_email, recipient_email_list, subject, body, logger=self.logger\n )\n self.logger.info(\n \"Formatted %d error email messages in %s\" % (len(messages), self.name)\n )\n\n details = email_provider.smtp_send_messages(\n self.settings, messages, self.logger\n )\n self.logger.info(\"Email sending details: %s\" % str(details))\n\n return True", "def add_error_tables(self, error_tables):\n self.error_distribution = error_tables", "def get_error_messages(self):\n\n if len(self._sensor_results_list) == 0:\n return\n\n error_msgs = []\n\n for reading in self._sensor_results_list:\n if reading.is_error():\n error_msgs.append(reading.get_error_msg())\n\n if len(error_msgs) > 0:\n return error_msgs\n else:\n return \"No Error Readings\"", "def error_count(self):\n return len(self.errors)", "def __set_errors_json(self, error_count_by_operation, errors_by_operation):\n message = \"{0} error/s reported.\".format(error_count_by_operation)\n log_file_path = self.logger.file_logger.log_file_path\n message += \" The latest {0} error/s are shared in detail. To view all errors, review this log file on the machine: {1}\".format(len(errors_by_operation), log_file_path) if error_count_by_operation > 0 else \"\"\n return {\n \"code\": Constants.PatchOperationTopLevelErrorCode.SUCCESS if error_count_by_operation == 0 else Constants.PatchOperationTopLevelErrorCode.ERROR,\n \"details\": errors_by_operation,\n \"message\": message\n }", "def security_errors(self):\n errors = ErrorDict()\n for f in [\"honeypot\", \"timestamp\", \"security_hash\"]:\n if f in self.errors:\n errors[f] = self.errors[f]\n return errors", "def GetAll(self):\n return self._errors.copy()", "def errors():\n return THE_LOGGER.errors", "def add_error(self, field, message):\n add_list_value(self.errors, field, message)", "def error_recovery_settings(self, error_recovery_settings):\n\n self._error_recovery_settings = error_recovery_settings", "def setErrorThreshold(self, threshold):\n return self._set(errorThreshold=threshold)", "def setErrorThreshold(self, threshold):\n return self._set(errorThreshold=threshold)", "def _get_resp_body_errors(self):\n\n if self._resp_body_errors and len(self._resp_body_errors) > 0:\n return self._resp_body_errors\n\n errors = []\n warnings = []\n resp_codes = []\n\n if self.verb is None:\n return errors\n\n dom = self.response.dom()\n if dom is None:\n return errors\n\n for e in dom.findall('Errors'):\n eSeverity = None\n eClass = None\n eShortMsg = None\n eLongMsg = None\n eCode = None\n\n try:\n eSeverity = e.findall('SeverityCode')[0].text\n except IndexError:\n pass\n\n try:\n eClass = e.findall('ErrorClassification')[0].text\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n except IndexError:\n pass\n\n try:\n eShortMsg = smart_encode(e.findall('ShortMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eLongMsg = smart_encode(e.findall('LongMessage')[0].text)\n except IndexError:\n pass\n\n try:\n eCode = e.findall('ErrorCode')[0].text\n if int(eCode) not in resp_codes:\n resp_codes.append(int(eCode))\n except IndexError:\n pass\n\n msg = str(\"Class: {eClass}, Severity: {severity}, Code: {code}, {shortMsg} {longMsg}\") \\\n .format(eClass=eClass, severity=eSeverity, code=eCode, shortMsg=eShortMsg,\n longMsg=eLongMsg)\n\n # from IPython import embed; embed()\n\n if eSeverity == 'Warning':\n warnings.append(msg)\n else:\n errors.append(msg)\n\n self._resp_body_warnings = warnings\n self._resp_body_errors = errors\n self._resp_codes = resp_codes\n\n if self.config.get('warnings') and len(warnings) > 0:\n log.warn(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(warnings)))\n\n if self.response.reply.Ack == 'Failure':\n if self.config.get('errors'):\n log.error(\"{verb}: {message}\\n\\n\".format(\n verb=self.verb, message=\"\\n\".join(errors)))\n\n return errors\n\n return []", "def add_error(self, message):\n self.errors.append(message)", "def setErrorMax(self, error_max):\n\t\tself.error_max = error_max", "def getBuildErrors(self):\n return [x for x in self.xeps if x.buildErrors]", "def sex_errors(self):\n result = {}\n disable_sex_check = app_settings.get_app_setting(\n \"variants\", \"disable_pedigree_sex_check\", project=self\n )\n if disable_sex_check:\n return result\n for case in self.case_set.all():\n result.update(case.sex_errors(disable_sex_check))\n return result", "def set_max_errors(self,newMaxErrors):\n previousErrors = self.errors[max(0, self._errorCursor - newMaxErrors) : self._errorCursor]\n #Too complicated fuck me\n wrappingErrors = self.errors[self._errorCursor + max(0, self.maxErrors - newMaxErrors) : self.maxErrors]\n del self.errors\n freeSpace = [0] * (newMaxErrors - len(previousErrors) - len(wrappingErrors))\n self._errorCursor = (len(wrappingErrors) + len(previousErrors)) % newMaxErrors\n self.errors = wrappingErrors + previousErrors + freeSpace\n self.maxErrors = newMaxErrors\n self._errorSum = sum(self.errors)\n del previousErrors\n del wrappingErrors\n del freeSpace\n gc.collect()", "def processErrors(self):\n e = self.formatErrors()\n if not self.config.nologtostdout:\n if e:\n print e\n else:\n print \"No errors\"\n if self.config.logtomail:\n if e:\n m = xeputils.mail.LogMail(self.config, e)\n m.send()\n if self.config.logtofile:\n f = open(self.config.logtofile, 'a')\n f.write(\"\\n===================\\n\")\n f.write(datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\"))\n f.write(\"\\n===================\\n\")\n if e:\n f.write(e)\n else:\n f.write(\"No errors\")\n f.close()", "def failed_replication_jobs(self, failed_replication_jobs):\n if self._configuration.client_side_validation and failed_replication_jobs is None:\n raise ValueError(\"Invalid value for `failed_replication_jobs`, must not be `None`\") # noqa: E501\n if (self._configuration.client_side_validation and\n failed_replication_jobs is not None and failed_replication_jobs < 0): # noqa: E501\n raise ValueError(\"Invalid value for `failed_replication_jobs`, must be a value greater than or equal to `0`\") # noqa: E501\n\n self._failed_replication_jobs = failed_replication_jobs", "def init_matches_errors(self) -> None:\n\n self.matches = set()\n self.ignored = set()\n self.errors = set()", "def form_invalid_add_global_errormessages(self, form):\n if self.get_selected_items_form_attribute() in form.errors:\n errormessages = form.errors[self.get_selected_items_form_attribute()]\n for errormessage in errormessages:\n messages.error(self.request, errormessage)", "def record_error(self, message, keys=None, type=None, **kwargs):\n keys = list(keys) if keys is not None else []\n self.errors.append(\n dict(\n message=message,\n keys=keys,\n type=type or EntityErrors.UNCATEGORIZED,\n **kwargs\n )\n )", "def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors", "def report_errors(errors):\n if len(errors) > 0:\n for error in errors:\n logger.debug(error)\n sys.exit(0)", "def WriteFlowErrors(self, errors):\n # Errors are similar to results, as they represent a somewhat related\n # concept. Error is a kind of a negative result. Given the structural\n # similarity, we can share large chunks of implementation between\n # errors and results DB code.\n self._WriteFlowResultsOrErrors(self.flow_errors, errors)", "def setError(self, index, error):\n\t\tself.membersWithErrors[index][1] = error", "def add_error(self, msg):\n self._add_message(msg, self._errors)", "def error(self, error):\n\n self._error = error", "def error(self, error):\n\n self._error = error", "def error(self, error):\n\n self._error = error", "def get_errors(self):\n df = self.get_events()\n return df[df.error.notnull()]", "def error(self, message, code='UnknownError', error_code=None, http_status=400):\n\n # Some backwards compatibility\n if error_code is not None and code == 'UnknownError':\n code = error_code\n\n self._add_message( message, self.ERROR, code=code )\n self.n_errors += 1\n self.status = 'ERROR'\n self.http_status = http_status\n self.error_code = code\n self.message = message", "def diagnostic_trouble_codes(self, diagnostic_trouble_codes):\n\n self._diagnostic_trouble_codes = diagnostic_trouble_codes", "def CalculateErrors(self, X, D):\n Y = self.Forward(X)\n self.errors = len([Y[i] for i in range(len(Y)) if Y[i] != D[i]])\n return self.errors", "def error(self, exception=None):\n self._error = exception", "def pin_errors(self):\n for m in range(self.stage_width_list[-1]):\n error, _ = rqrmilib.calculate_submodel_error(self._get_native_object(), self.probe, len(self)-1, m)\n if error < 0: error = 0\n self.error_list[m] = int(error)\n self.rqrmi_state_changed = True\n return self.error_list", "def display_errors(self):\r\n\r\n def format_name(field_name):\r\n \"\"\"Formats field names for error display\"\"\"\r\n if field_name == \"celebration_tier\":\r\n return \"{wLargesse{n\"\r\n return \"{w%s{n\" % field_name.capitalize()\r\n\r\n msg = \"Please correct the following errors:\\n\"\r\n msg += \"\\n\".join(\r\n \"%s: {r%s{n\" % (format_name(field), \", \".join(errs))\r\n for field, errs in self.errors.items()\r\n )\r\n return msg", "def setErrorLog(self, *args):\n return _libsbml.XMLInputStream_setErrorLog(self, *args)", "def error_entity(self, error_entity):\n \n self._error_entity = error_entity", "def error_map(self):\n return self._error_map", "def _update_errors_report(self, model_name, error_msg):\n errors_filename = os.path.join(self._get_results_path(), \"errors.md\")\n with open(errors_filename, \"a\") as fout:\n self.verbose_print(f\"There was an error during {model_name} training.\")\n self.verbose_print(f\"Please check {errors_filename} for details.\")\n fout.write(f\"## Error for {model_name}\\n\\n\")\n fout.write(error_msg)\n link = \"https://github.com/mljar/mljar-supervised/issues/new\"\n fout.write(\n f\"\\n\\nPlease set a GitHub issue with above error message at: {link}\"\n )\n fout.write(\"\\n\\n\")", "def set_error(cls, ekindataset, dp, col):\n if ekindataset[col][dp]['var'] == '':\n return\n if not ekindataset[col][dp].has_key('error'):\n ekindataset[col][dp]['error'] = 0\n return", "def error_reason(self, error_reason):\n\n self._error_reason = error_reason", "def _setErrorNodes(self, errorNodes):\n self._errorNodes = errorNodes", "def add_error(self, resource_type, seq, attr_name, error):\n resource_errors = self.errors.setdefault(resource_type, {})\n seq_errors = resource_errors.setdefault(seq, {})\n attr_errors = seq_errors.setdefault(attr_name, [])\n attr_errors.append(error)" ]
[ "0.7063868", "0.6316049", "0.62133807", "0.5995449", "0.5995449", "0.5993789", "0.5955976", "0.58611673", "0.5738866", "0.57359105", "0.56898946", "0.56828547", "0.56606567", "0.56536406", "0.5637317", "0.5577304", "0.5540206", "0.5453656", "0.54534554", "0.54445463", "0.5427907", "0.5297211", "0.5279484", "0.525997", "0.5244819", "0.5221872", "0.5194199", "0.51878995", "0.5151027", "0.51397586", "0.5121268", "0.5107964", "0.50943774", "0.5080314", "0.50717455", "0.506621", "0.50495", "0.50426507", "0.5004578", "0.5003011", "0.49701798", "0.4967973", "0.49484527", "0.49450395", "0.49142504", "0.49068043", "0.48854777", "0.48778656", "0.4864789", "0.48622867", "0.48615256", "0.4830137", "0.4813637", "0.48103064", "0.4775602", "0.47728068", "0.47710347", "0.47584313", "0.47584012", "0.47515887", "0.47194967", "0.47092918", "0.47020674", "0.4701304", "0.4700421", "0.4700421", "0.4686703", "0.46864355", "0.46795753", "0.46674326", "0.46588477", "0.4658053", "0.4642491", "0.46408594", "0.4626274", "0.46254867", "0.4603615", "0.45933828", "0.45904434", "0.45894766", "0.4588426", "0.45809337", "0.45782277", "0.45782277", "0.45782277", "0.45703274", "0.45577374", "0.4540596", "0.45389873", "0.45238033", "0.4520139", "0.45176798", "0.4509601", "0.45089304", "0.4501302", "0.44994268", "0.44923404", "0.44890392", "0.4487223", "0.4486627" ]
0.7172415
0
Gets the sms_config of this MessagingCampaign. Configuration for this messaging campaign to send SMS messages.
def sms_config(self): return self._sms_config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sms_config(self, sms_config):\n \n self._sms_config = sms_config", "def config(self):\n if self.__config is None:\n self.__config = self._get_config(self.bot)\n return self.__config", "def sms(self):\r\n return sms.SMS(self)", "def get_configuration(self) -> dict:\n return self.config", "def get_config(self):\n return self.config", "def getConfig(self):\n \n return self.config", "def config(self):\n return self[CONFIG_KEY]", "def config(self):\n annotations = IAnnotations(self.context)\n return annotations.get(CONFIGURATION_KEY, {})", "def config(self):\n if not hasattr(self,\"_config\") or self._config is None:\n self._config = {}\n \n return self._config", "def configuration(self):\n if self.integration is None:\n return None\n return self.integration.configuration", "def config(self) -> ServerConfig:\n return self._config", "def get_config(self) -> Dict[str, Any]:\n if self.config is None:\n self.config = self.load_config()\n\n return self.config", "def get_config(self) -> Dict[str, Any]:\n if self.config is None:\n self.config = self.load_config()\n\n return self.config", "def config(self):\n return self.__config", "def config(self):\n return self.__config", "def get_config(self):\n\n # make sure that the config reflects the state of the underlying logic\n self.logic_to_config()\n # and then return the config struct.\n return self._config", "def config(self) -> dict:\n return self._config", "def config(self):\n return self._config", "def config(self):\n return self._config", "def config(self):\n return self._config", "def CONFIG(self) -> misc_.Config:\n\t\treturn self._CONFIG", "def get_config(self):\n if self.allow_reco():\n return self.chs_config()\n else:\n return self.get_config_j(self.id)", "def config(self) -> ClientConfig:\n return self._config", "def configuration(self):\n return self._config", "def get_config_connection(self):\n return self.m_connection.config", "def sms_disabled(self):\n return self._sms_disabled", "def config(self):\n return self._cfg", "def config(self) -> Any:\n return self._config", "def getConfiguration(self):\n return self._configuration", "def destination_config(self):\n return self._destination_config", "def properties(self) -> Optional[pulumi.Input['SmsChannelPropertiesArgs']]:\n return pulumi.get(self, \"properties\")", "def get_config(self) -> dict:\n out = {}\n for name in self.CONFIG_DEFAULTS:\n out[name] = self.__getattribute__(name)\n return out", "def get_config(cls):\n if not cls.config:\n import aha\n cls.config = aha.Config()\n return cls.config", "def _get_config(self):\n return self._get_from_settings(settings.INTEGRATIONS_PAGER_DUTY_WEBHOOKS, 'service_key')", "def config(self) -> dict:\n return self._configs", "def get_SMS():\n\tfor msg in client.messages.list():\n\t\t#check for incoming sms and add to hash\n\n\t\t#delete prev SMS from same sender\t\n\t\tif(msg.from_ != base):\n\t\t\t#check if sender had sent SMS previously\n\t\t\tif msg.from_ in msg_received.keys():\n\t\t\t\t#if so delete the prev SMS\n\t\t\t\tcurr_sid = msg.sid\n\t\t\t\tdelete_rSMS(curr_sid)\t\t\n\t\t\telse :\n\t\t\t\t#otherwise store in hash\n\t\t\t\tmsg_received[msg.from_] = msg.body", "def config(self):\n return CurrentProject().config.config[self.key]", "def get_config(cls) -> GlobalConfig:\n if cls.config is None:\n raise ValueError(\"Config is not loaded\")\n return cls.config", "def get_service_config(self, honeypotids, serviceid):\n req = {\"type\": \"get_settings\", \n \"from\": self.network.mc_id,\n \"to\": honeypotids,\n \"service\": serviceid}\n expect_dict = {\"type\": \"hp_settings\"}\n msg_list = self.send_receive(req, honeypotids, expect_dict)\n answer = {}\n for msg in msg_list:\n answer[msg[\"from\"]] = msg[\"settings\"]\n return answer", "def get_config(self) -> Dict:\n if not self.config_file or not os.path.exists(self.config_file):\n logger.debug(\"No configuration file was provided to the RasaFileImporter.\")\n return {}\n\n config = rasa.shared.utils.io.read_model_configuration(self.config_file)\n return config", "def config(self) -> Config:\n if self._config is None:\n self._config = Config(Path.cwd())\n return self._config", "def config(self) -> ModelConfigDict:\n return self.config_obj.to_dict()", "def config(self) -> dict:\n if not self._config:\n self._file_path = FileConfig._find_config_file(self._file_path)\n self._config = FileConfig._load_config_from_file(self._file_path) if self._file_path else dict()\n return self._config", "def configuration(self):\n return self._configuration", "def customer_gateway_configuration(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"customer_gateway_configuration\")", "def send_sms(self, sms):\n pass", "def config(self):\n if self._config is None:\n try:\n task_conf = CONF.workflow.tasks[self.task_type]\n except KeyError:\n task_conf = {}\n self._config = CONF.workflow.task_defaults.overlay(task_conf)\n return self._config", "def send_service_config(self, honeypotids, config):\n req = {\"type\": \"set_settings\", \n \"from\": self.network.mc_id,\n \"to\": honeypotids,\n \"settings\": config}\n expect_dict = {\"type\": \"hp_settings\"}\n msg_list = self.send_receive(req, honeypotids, expect_dict)\n answer = {}\n for msg in msg_list:\n answer[msg[\"from\"]] = msg[\"settings\"]\n return answer", "def config(self) -> PipelineConfiguration:\n return self._config", "def config(self):\r\n return self._config", "def getConfig(self):\n return self.cp", "def config(self) -> 'outputs.DeviceConfigResponse':\n return pulumi.get(self, \"config\")", "def getConfiguration(self):\n # TODO: Split metadata (e.g. name and version) from configuration data.\n # Currently, we do this by selectively copying from __dict__. A\n # cleaner separation would require refactoring all the way through how\n # we create update objects.\n config = {}\n for key in self.__dict__:\n if key in self.CONFIG_FIELDS:\n config[key] = self.__dict__[key]\n return config", "def get_cm_config(self, view='summary'):\n return self.api_client.get_cm_config(view=view)['items']", "def get_matrix_configuration() -> Optional[Dict]:\n try:\n matrix_config_func = get_function_from_config(\"matrix_config_function\")\n except AttributeError:\n raise AttributeError(\"Not configured for Matrix support\")\n return matrix_config_func()", "def ssl_config(self) -> 'outputs.SslConfigResponse':\n return pulumi.get(self, \"ssl_config\")", "def getSettings(self):\n return self.cfg", "def insights_config(self) -> 'outputs.InsightsConfigResponse':\n return pulumi.get(self, \"insights_config\")", "def config_bot(self):\n # Return the config detail to API requests\n config_data = dict(SPARK_BOT_EMAIL=self.spark_bot_email,\n SPARK_BOT_TOKEN=\"--Redacted--\",\n SPARK_BOT_URL=self.spark_bot_url,\n SPARK_BOT_NAME=self.spark_bot_name)\n\n return json.dumps(config_data)", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config" ]
[ "0.66070634", "0.592993", "0.56343", "0.54566157", "0.5446236", "0.5412347", "0.53807044", "0.53407514", "0.5317883", "0.52791315", "0.52556473", "0.52439207", "0.52439207", "0.52175206", "0.52175206", "0.52135134", "0.5207754", "0.5194879", "0.5194879", "0.5194879", "0.51929045", "0.51494735", "0.5143155", "0.5127543", "0.5107325", "0.50860894", "0.50726235", "0.5026649", "0.5018908", "0.5013706", "0.50121075", "0.4980803", "0.49613893", "0.49281213", "0.4907883", "0.48914218", "0.48905975", "0.4888918", "0.48636135", "0.48081803", "0.48053908", "0.47825998", "0.47755703", "0.47734717", "0.4747491", "0.47451946", "0.47438625", "0.47398347", "0.47318155", "0.47254676", "0.47153658", "0.47153", "0.47104597", "0.47049296", "0.47025862", "0.46996775", "0.4699062", "0.46940023", "0.46868774", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477", "0.46682477" ]
0.8423905
0
Sets the sms_config of this MessagingCampaign. Configuration for this messaging campaign to send SMS messages.
def sms_config(self, sms_config): self._sms_config = sms_config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sms_config(self):\n return self._sms_config", "def sms_enabled(self, sms_enabled):\n\n self._sms_enabled = sms_enabled", "def send_sms(self, sms):\n pass", "def sms_phone_number(self, sms_phone_number):\n\n self._sms_phone_number = sms_phone_number", "def sms_disabled(self, sms_disabled):\n\n self._sms_disabled = sms_disabled", "def sms(self, phone_address, message):\n self.server.sendmail(self.username, phone_address, message)", "def send_sms(self, body):\n message = self.twilio_client.sms.messages.create(to=self.to_num, from_=self.from_num, body=body)", "def set_config(self, config):\n\n self.config = config\n\n return self", "def send_sms(self, message, to=CONTACT_NUMBER):\n try:\n pbx_alarm = PBXAlert()\n pbx_alarm.send_sms(self.tinfo['message'])\n if self.verbose:\n print(\"{} Successfully sent SMS!\".format(Timer.OK))\n return True\n except Exception as e:\n print(\"{} Caught exception in send_sms: {}\".format(Timer.FAIL, e))\n return False", "def send_sms(domain, contact, phone_number, text, **kwargs):\n if phone_number is None:\n return False\n if isinstance(phone_number, int) or isinstance(phone_number, long):\n phone_number = str(phone_number)\n phone_number = clean_phone_number(phone_number)\n\n msg = SMSLog(\n domain=domain,\n phone_number=phone_number,\n direction=OUTGOING,\n date = datetime.utcnow(),\n backend_id=None,\n text = text\n )\n if contact:\n msg.couch_recipient = contact._id\n msg.couch_recipient_doc_type = contact.doc_type\n add_msg_tags(msg, **kwargs)\n \n def onerror():\n logging.exception(\"Problem sending SMS to %s\" % phone_number)\n return queue_outgoing_sms(msg, onerror=onerror)", "def set_sms_telephone_number(self, telephone_number, email):\n ngo_user_profile = NGOUserProfile.objects.get(user__email=email)\n org_setting = OrganizationSetting.objects.get(organization__org_id=ngo_user_profile.org_id)\n smsc = SMSC(vumi_username=\"smsc\")\n smsc.save()\n outgoing_number = OutgoingNumberSetting(phone_number=telephone_number, smsc=smsc)\n outgoing_number.save()\n org_setting.sms_tel_number = telephone_number\n org_setting.outgoing_number = outgoing_number\n org_setting.save()", "def sms(self):\r\n return sms.SMS(self)", "def delete_sms(self, sms_id: int) -> SetResponseType:\n return self._connection.post_set('sms/delete-sms', {'Index': sms_id})", "def can_enable_for_sms(self, can_enable_for_sms):\n\n self._can_enable_for_sms = can_enable_for_sms", "def __init__(self, config):\n self.smsapi_username = config['smsapi_username']\n self.smsapi_password = config['smsapi_password']\n self.smsapi_recipient = config['smsapi_recipient']\n\n super(SmsApiNotifier, self).__init__(config)", "def send_sms(self,msg,to=None,long=True):\n if long:\n return self.send_msg(msg,to,\"SendCatSMS\")\n else:\n return self.send_msg(msg,to,\"SendSMS\")", "def send_service_config(self, honeypotids, config):\n req = {\"type\": \"set_settings\", \n \"from\": self.network.mc_id,\n \"to\": honeypotids,\n \"settings\": config}\n expect_dict = {\"type\": \"hp_settings\"}\n msg_list = self.send_receive(req, honeypotids, expect_dict)\n answer = {}\n for msg in msg_list:\n answer[msg[\"from\"]] = msg[\"settings\"]\n return answer", "def set_config(self, config):\r\n self._config = config\r\n self._config.dump_to_sdb(\"botoConfigs\", self.id)", "def send_transactional_sms(self, phone_number, message):\n if not settings.CAN_SEND_SMS: # So that we do not send SMS while development\n return\n if not phone_number:\n logger.warning('No phone number received for meaasge: {0}'.format(message))\n raise MissingPhoneNumberException('No phone number received to send the SMS to')\n request_data = self._get_request()\n request_data[\"sms\"] = [{\"message\": message, \"to\": [phone_number]}]\n logger.info('Sending SMS to {0}. SMS content {1}'.format(phone_number, request_data))\n sms_response = self._get_response_from_msg91(self.request_method, self.MSG91_SMS_URL, request_data)\n logger.info(\"sms_response {0}\".format(sms_response))\n if sms_response.get('type') != \"success\":\n raise MessageSendingFailed('The service provider failed to send the SMS')", "def config(self, config):\n self._config = config", "def tls_config(self, tls_config):\n\n self._tls_config = tls_config", "def send_sms(message):\n client.messages.create(\n body=message,\n from_=os.environ['TWILIO_NUMBER_FROM'],\n to=['TWILIO_NUMBER_TO']\n )", "def send_sms_via_api(self, mobile, text=\"\"):\n\n mobile = self.sanitise_phone(mobile)\n\n try:\n self.sms_api_post_config[self.sms_api.message_variable] = text\n self.sms_api_post_config[self.sms_api.to_variable] = str(mobile)\n query = urllib.urlencode(self.sms_api_post_config)\n request = urllib.urlopen(self.sms_api.url, query)\n output = request.read()\n return True\n except:\n return False", "def send_transactional_sms(self, phone_number, message):\n if not settings.CAN_SEND_SMS: # So that we do not send SMS while development\n return\n if not phone_number:\n logger.warning('No phone number received for meaasge: {0}'.format(message))\n raise MissingPhoneNumberException('No phone number received to send the SMS to')\n request_data = {\n 'From': self.exo_phone,\n \"To\": phone_number,\n 'Body': message\n }\n logger.info('Sending SMS to {0}. SMS content {1}'.format(phone_number, message))\n sms_response = requests.post(self.EXOTEL_SMS_URL.format(self.sid, self.token), data=request_data).json()\n logger.info(sms_response)\n if sms_response.get('RestException'):\n logger.warn('SMS sending failed. Rsponse from exotel - {0}'.format(sms_response))\n elif sms_response.get('SMSMessage') and sms_response['SMSMessage']['Status'] \\\n not in self.EXOTEL_SUCCESS_STATUS_LIST:\n raise MessageSendingFailed('The service provider failed to send the SMS')", "def set_config(self, config):\n self.adversarial = config.adversarial\n self.eps = config.eps\n self.probability = config.probability\n self.use_dynamics = config.use_dynamics\n self.random = config.random\n self.observable_noise = config.observable_noise\n self.use_max_norm = config.use_max_norm", "def send_sms(self, message, phone=None, client=None, send_before=None):\n if not phone:\n phone = str(client.phone)\n if not phone:\n return False\n return self.sender.send(message, phone, client, send_before)", "def send_config(self, configdict):\n self.config = configdict\n self.send_to_backend('config', configdict=configdict)", "def send_sms(message_text: str, receiver_sms=\"+27749911999\"):\n user = \"AC6925977501b11f3f5ea71105df8a4ea7\"\n twilio_client = Client(user, keyring.get_password(\"twilio\", user))\n twilio_client.messages.create(to=receiver_sms,\n from_=\"+19149964656\",\n body=message_text)", "def send_sms(self, from_number, to_number, msg, tag=None):\n # allow sending to a group in one call\n if not isinstance(to_number, list):\n to_number = [to_number]\n\n return self.sms_client.send_message(\n from_=self._as_e164(from_number),\n to=[self._as_e164(number) for number in to_number],\n text=msg,\n tag=tag,\n applicationId=settings.BW_APP_ID\n )", "def send_sms(msg, phone_number, logger=None):\n\n if logger:\n logger.debug(f\"msg: '{msg}'\")\n logger.debug(f\"phone_number: '{phone_number}'\")\n sns = boto3.client('sns')\n try:\n sns.publish(PhoneNumber=phone_number, Message=msg)\n except BaseException as e:\n if logger:\n logger.error(e)\n raise e\n if logger:\n logger.info(f'SMS with available dates was sent to {phone_number}.')", "def setConfiguration(self, config):\n raise NotImplementedError", "def test_broadcast_sms(self, TwilioRestClient_mock):\n from google.appengine.ext import ndb\n from sosbeacon.event.message import Message\n from sosbeacon.event.message import broadcast_sms\n\n message_key = Mock(spec=ndb.Key)\n message_key.kind.return_value = \"Message\"\n message = Message(key=message_key, message_type='em', message={'email': '', 'sms': 'Test sms'})\n message_key.get.return_value = message\n\n broadcast_sms('84973796065', message, 'http://google.com', 'longly', 'longly')\n self.assertEqual(1, TwilioRestClient_mock.call_count)", "def send_sms(message):\n\ttwilio = tw(AccountSID, AuthToken, MyNumber, TwilioNumber)\n\ttwilio.send_sms(message)", "def configure(self, config: dict):\n self.config.update(config)", "def send_mms(self, from_number, to_number, msg, media, tag=None):\n # allow sending to a group in one call\n if not isinstance(to_number, list):\n to_number = [to_number]\n\n return self.sms_client.send_message(\n from_=self._as_e164(from_number),\n to=[self._as_e164(number) for number in to_number],\n text=msg,\n tag=tag,\n media=media,\n applicationId=settings.BW_APP_ID\n )", "def send_sms(phone_number: str, subject: str, body: str, carrier: str) -> None:\n # creates SMTP session\n s = smtplib.SMTP('smtp.gmail.com', 587)\n receiver_email_id = f'{phone_number}@{CARRIERS_DICT[carrier]}'\n # start TLS for security\n s.starttls()\n # Authentication\n s.login(SENDER_EMAIL_ID, SENDER_EMAIL_ID_PASSWORD)\n\n message = (\"From: %s\\r\\n\" % SENDER_EMAIL_ID\n + \"To: %s\\r\\n\" % receiver_email_id\n + \"Subject: %s\\r\\n\" % subject\n + \"\\r\\n\"\n + body)\n\n # sending the mail\n s.sendmail(SENDER_EMAIL_ID, [receiver_email_id], message)\n # terminating the session\n s.quit()", "def set_config(self, config):\n if 'symbols' in config:\n self.symbols = self.config['symbols'] = config['symbols']\n if 'update_frequency_milliseconds' in config:\n self.update_frequency_milliseconds = self.config['update_frequency_milliseconds'] = int(\n config['update_frequency_milliseconds']\n )\n if 'elements_per_update' in config:\n self.elements_per_update = self.config['elements_per_update'] = int(config['elements_per_update'])", "def config(self, config_dict):\r\n self._cfg.config = config_dict", "def syslog_config(self, syslog_config):\n\n self._syslog_config = syslog_config", "def send_bulk_transactional_sms(self, phone_numbers, message):\n if not settings.CAN_SEND_SMS: # So that we do not send SMS while development\n return\n if not phone_numbers:\n logger.warning('No phone number received for meaasge: {0}'.format(message))\n raise MissingPhoneNumberException('No phone number received to send the SMS to')\n request_data = {\n 'From': self.exo_phone,\n \"To[]\": phone_numbers,\n 'Body': message\n }\n logger.info('Sending SMS to {0}. SMS content {1}'.format(phone_numbers, message))\n sms_response = requests.post(self.EXOTEL_SMS_URL.format(self.sid, self.token), data=request_data).json()\n logger.info(sms_response)\n for res in sms_response:\n if res.get('RestException'):\n logger.warn('SMS sending failed. Rsponse from exotel - {0}'.format(sms_response))\n elif res.get('SMSMessage') and res['SMSMessage']['Status'] \\\n not in self.EXOTEL_SUCCESS_STATUS_LIST:\n raise MessageSendingFailed('The service provider failed to send the SMS')", "def sms(self, did, dst, message):\n method = \"sendSMS\"\n\n if not isinstance(did, int):\n raise ValueError(\"DID Numbers which is sending the message needs to be an int (Example: 5551234567)\")\n\n if not isinstance(dst, int):\n raise ValueError(\"Destination Number needs to be an int (Example: 5551234568) \")\n\n if not isinstance(message, str):\n raise ValueError(\"Message to be sent needs to be a str (Example: 'hello John Smith' max chars: 160)\")\n else:\n if len(message) > 160:\n raise ValueError(\"Message to be sent can only have 160 chars\")\n\n parameters = {\n \"did\": did,\n \"dst\": dst,\n \"message\": message,\n }\n\n return self._voipms_client._get(method, parameters)", "def mms_content(self, mms_content):\n if mms_content is None:\n raise ValueError(\"Invalid value for `mms_content`, must not be `None`\") # noqa: E501\n\n self._mms_content = mms_content", "def test_custom_provider_setting(\n config,\n):\n sms = YesssSMS.YesssSMS(\n LOGIN,\n YESSS_PASSWD,\n custom_provider={\n \"LOGIN_URL\": \"https://example.com/login\",\n \"LOGOUT_URL\": \"https://example.com/logout\",\n \"KONTOMANAGER_URL\": \"https://example.com/kontomanager\",\n \"WEBSMS_FORM_URL\": \"https://example.com/send_websms\",\n \"SEND_SMS_URL\": \"https://example.com/websms\",\n },\n )\n assert sms._login_url == \"https://example.com/login\"\n assert sms._logout_url == \"https://example.com/logout\"\n assert sms._kontomanager == \"https://example.com/kontomanager\"\n assert sms._sms_form_url == \"https://example.com/send_websms\"\n assert sms._send_sms_url == \"https://example.com/websms\"", "def send_message(self, to_number, message, from_number=None):\n\n values = {'token': self._token, 'to': to_number, 'message': message}\n if from_number is not None:\n values['from'] = from_number\n else:\n values['from'] = 'skylinesms'\n\n return self._request(self.SEND_SMS_URL, values)", "def add_sms(self, message, phone=None, client=None, send_before=None):\n if not phone:\n phone = str(client.phone)\n if not phone:\n return False\n return self.sender.add(message, phone, client, send_before)", "def send_sms(to, datas, temp_id):\n cpp = CCP()\n cpp.sendTemplateSMS(to, datas, temp_id)", "def destination_config(self, destination_config):\n self._destination_config = destination_config", "def register_config(self, config):\n\n self._config = config", "def build_config(self, config):\n config.setdefaults('Makesmith Settings', {'COMport': 'COM5', 'xPitch': 20, 'openFile': \" \"})", "def sms():\n def send_sms(number, message):\n #get session bus\n try:\n session_bus = dbus.SessionBus()\n except dbus.exceptions.DBusException:\n click.echo(chalk.red('Have a display you must'))\n return\n\n #check for kdeconnect\n try:\n devices_dbus_obj = session_bus.get_object('org.kde.kdeconnect','/modules/kdeconnect/devices')\n except dbus.exceptions.DBusException:\n click.echo(chalk.red('kdeconnect not installed it appears'))\n return\n\n #get devices ids\n devices_xml = devices_dbus_obj.Introspect(dbus_interface='org.freedesktop.DBus.Introspectable')\n devices_xml = ET.fromstring(devices_xml)\n nodes = devices_xml.findall('node')\n if(len(nodes) is 0):\n click.echo(chalk.red('Devices there are not'))\n return\n deviceIDs = list()\n for node in nodes:\n deviceIDs.append(node.get('name'))\n\n #get devices properties\n deviceID_Props = dict()\n for ID in deviceIDs:\n try:\n device = session_bus.get_object('org.kde.kdeconnect', '/modules/kdeconnect/devices/' + ID)\n deviceProps = device.GetAll('', dbus_interface='org.freedesktop.DBus.Properties')\n deviceID_Props[ID] = deviceProps\n except dbus.exceptions.DBusException:\n #don't create an entry in the dictionary if the object, or a GetAll method does not exist\n pass\n if(len(deviceID_Props) is 0):\n click.echo(chalk.red('Devices there are not'))\n return\n\n #eliminate non sms devices\n devices_no_sms = list()\n for device in deviceID_Props:\n keeping = False\n for plugin in deviceID_Props[device]['supportedPlugins']:\n if('sms' in plugin):\n keeping = True\n if(not keeping):\n devices_no_sms.append(device)\n for device in devices_no_sms:\n del deviceID_Props[device]\n\n #if there are no devices that support sms\n if(len(deviceID_Props) is 0):\n click.echo(chalk.red('Devices that support sms there are not'))\n return\n #elif only one device was found that supports sms\n elif(len(deviceID_Props) is 1):\n click.echo(chalk.yellow('Device using: ' + str(list(deviceID_Props.values())[0]['name'])))\n sendMessage = session_bus.get_object('org.kde.kdeconnect', '/modules/kdeconnect/devices/' + str(list(deviceID_Props.keys())[0]) + '/sms')\n sendMessage.sendSms(number, message, dbus_interface='org.kde.kdeconnect.device.sms')\n return\n #otherwise get user to choose device\n else:\n choice_map = dict()\n for idx, device in enumerate(deviceID_Props, start=1):\n click.echo(chalk.green(str(idx) + ': ' + deviceID_Props[device]['name']))\n choice_map[str(idx)] = device\n choice = click.prompt(chalk.blue('Device, you must select: '), default='1', type=click.Choice(choice_map.keys()))\n #click.echo('you chose: ' + choice_map[the_chosen_device] + ' with id: ' + deviceNames_IDs[choice_map[the_chosen_device]])\n sendMessage = session_bus.get_object('org.kde.kdeconnect', '/modules/kdeconnect/devices/' + choice_map[choice] + '/sms')\n sendMessage.sendSms(number, message, dbus_interface='org.kde.kdeconnect.device.sms')\n return\n\n click.echo(chalk.blue('For whom you want to send an sms'))\n friend_name = input().strip()\n friend_name_lower = friend_name.lower()\n if os.path.isfile(PEOPLE_CONFIG_FILE_PATH):\n with open(PEOPLE_CONFIG_FILE_PATH) as fin:\n contents = yaml.load(fin)\n entries = contents['entries']\n for entry in entries:\n if(friend_name == entry['name'] or friend_name_lower == entry['name']):\n number = entry['mobile']\n break\n if('number' not in locals()):\n click.echo(chalk.red('Friend not found.'))\n else:\n if(len(number) is not 0):\n click.echo(chalk.blue('Message, you must enter: '))\n message = input(':')\n click.echo(chalk.yellow('Device to send sms to ' + number + ' looking for: '))\n send_sms(number, message)\n else:\n click.echo(chalk.red('Friends number not in people file, run `yoda people setup` to add it.'))\n else:\n click.echo(chalk.red('The People file does not exist, run `yoda people setup` to create an entry.'))", "def set_config(self, **config_opt) -> None:\n for name, default in self.CONFIG_DEFAULTS.items():\n if name in config_opt:\n self.__setattr__(name, config_opt[name])\n elif name not in self.__dict__:\n self.__setattr__(name, default)", "def sms_disabled(self):\n return self._sms_disabled", "def channel(self, channel):\n allowed_values = [\"sms\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and channel not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `channel` ({0}), must be one of {1}\" # noqa: E501\n .format(channel, allowed_values)\n )\n\n self._channel = channel", "def send_sms_via_modem(self, mobile, text=\"\"):\n\n mobile = self.sanitise_phone(mobile)\n\n # Add '+' before country code\n mobile = \"+\" + mobile\n\n try:\n self.modem.send_sms(mobile, text)\n return True\n except:\n return False", "def set_config(self, config_name=None, optional=False, **CONFIG_VARS):\n assert self._module\n config, _ = self.make_config_params(config_name, optional, **CONFIG_VARS)\n if config:\n self.c = config", "def instant_messaging_address(self, instant_messaging_address: str):\n self._instant_messaging_address = instant_messaging_address", "def send_alert(self, name: str, monitor: Monitor) -> None:\n\n if not monitor.urgent:\n return\n\n alert_type = self.should_alert(monitor)\n if alert_type not in [AlertType.FAILURE, AlertType.SUCCESS]:\n return\n\n message = self.build_message(AlertLength.SMS, alert_type, monitor)\n\n url = \"https://gateway.sms77.io/api/sms\"\n params = {\n \"text\": message,\n \"to\": self.target,\n \"from\": self.sender,\n \"p\": self.api_key,\n }\n\n if not self._dry_run:\n try:\n r = requests.get(url, params=params)\n s = r.text\n if not s.startswith(\"100\"):\n self.alerter_logger.error(\"Unable to send SMS: status code %s\", s)\n except Exception:\n self.alerter_logger.exception(\"SMS sending failed\")\n else:\n self.alerter_logger.info(\"dry_run: would send SMS: %s\", message)", "def set_config(config):\n global _config\n logging.config.dictConfig(config)\n _configure_ulog_bridge()\n _config = config", "def register_config(self, config):\n self.config = config", "def send_sms(to, datas, temp_id):\n # celery's client only depend on the function name and the args.\n cpp = CCP()\n ret = cpp.sendTemplateSMS(to, datas, temp_id)\n # return celery async_result value\n return ret", "def multicastsourcesiteconfigs(self, site_id, multicastsourcesiteconfig_id, data, tenant_id=None, api_version=\"v2.0\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/sites/{}/multicastsourcesiteconfigs/{}\".format(api_version,\n tenant_id,\n site_id,\n multicastsourcesiteconfig_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)", "def send_text(self, phone_number):\n sms_params = urllib.urlencode({\n '_rnr_se': self.key,\n 'phoneNumber': phone_number,\n 'text': self.text\n })\n # Send the text, display status message \n self.response = \"true\" in self.opener.open(self.sms_url, sms_params).read()", "def configure(self, config: ConfigParams):\n parameters = config.get_section(\"parameters\")\n if len(parameters) > 0:\n self.__parameters = parameters", "def send(self):\n ret = yield Service.get('SMS').send(self)\n yield ret", "async def sendTextSMS(\n self, body: str, dest: str, source: Optional[Dict[str, str]] = None\n ) -> None:\n send_body: SendSMSBody = {\n \"mobileTerminate\": {\n \"message\": {\"content\": body, \"type\": \"text\"},\n \"destination\": {\n \"address\": dest,\n },\n },\n }\n if source:\n send_body[\"mobileTerminate\"][\"source\"] = {\n \"ton\": tonFromType(source[\"type\"]),\n \"address\": source[\"text\"],\n }\n\n username = self.sydent.config.sms.api_username\n password = self.sydent.config.sms.api_password\n\n b64creds = b64encode(b\"%s:%s\" % (username, password))\n req_headers = Headers(\n {\n b\"Authorization\": [b\"Basic \" + b64creds],\n b\"Content-Type\": [b\"application/json\"],\n }\n )\n\n # Type safety: The case from a TypedDict to a regular Dict is required\n # because the two are deliberately not compatible. See\n # https://github.com/python/mypy/issues/4976\n # for details, but in a nutshell: Dicts can have keys added or removed,\n # and that would break the invariants that a TypedDict is there to check.\n # The case below is safe because we never use send_body afterwards.\n resp, response_body = await self.http_cli.post_json_maybe_get_json(\n API_BASE_URL, cast(JsonDict, send_body), {\"headers\": req_headers}\n )\n\n headers = dict(resp.headers.getAllRawHeaders())\n\n request_id = None\n if b\"X-Request-Id\" in headers:\n request_id = headers[b\"X-Request-Id\"][0].decode(\"UTF-8\")\n\n # Catch errors from the API. The documentation says a success code should be 202\n # Accepted, but let's be broad here just in case and accept all 2xx response\n # codes.\n #\n # Relevant OpenMarket API documentation:\n # https://www.openmarket.com/docs/Content/apis/v4http/send-json.htm\n if resp.code < 200 or resp.code >= 300:\n if response_body is None or \"error\" not in response_body:\n raise Exception(\n \"OpenMarket API responded with status %d (request ID: %s)\"\n % (\n resp.code,\n request_id,\n ),\n )\n\n error = response_body[\"error\"]\n raise Exception(\n \"OpenMarket API responded with status %d (request ID: %s): %s\"\n % (\n resp.code,\n request_id,\n error,\n ),\n )\n\n ticket_id = None\n if b\"Location\" not in headers:\n logger.error(\"Got response from sending SMS with no location header\")\n else:\n # Nominally we should parse the URL, but we can just split on '/' since\n # we only care about the last part.\n value = headers[b\"Location\"][0].decode(\"UTF-8\")\n parts = value.split(\"/\")\n if len(parts) < 2:\n logger.error(\n \"Got response from sending SMS with malformed location header: %s\",\n value,\n )\n return\n else:\n ticket_id = parts[-1]\n\n logger.info(\n \"Successfully sent SMS (ticket ID: %s, request ID %s), OpenMarket API\"\n \" responded with code %d\",\n ticket_id,\n request_id,\n resp.code,\n )", "def set_config(self, config):\r\n if self.config:\r\n raise ValueError(\"Already configured: %s\" % self.config)\r\n file(self.config_file, \"w\").write(\"\")\r\n config = self.manifest.config_schema.validate(config)\r\n for template in self.manifest.get(\"templates\", []):\r\n print \"Applying template %s with %s\" % (template, config)\r\n EJSTemplate(self.unchroot_path(template)).apply(self.unchroot_path(template), config)\r\n file(self.config_file, \"w\").write(json.dumps(config, indent=1))", "def tx_queue_settings(self, tx_queue_settings):\n\n self._tx_queue_settings = tx_queue_settings", "def scsi_queue_settings(self, scsi_queue_settings):\n\n self._scsi_queue_settings = scsi_queue_settings", "def enable_smtp(self, enable_smtp):\n if enable_smtp is None:\n raise ValueError(\"Invalid value for `enable_smtp`, must not be `None`\")\n\n self._enable_smtp = enable_smtp", "def set_config(self):\n str_config = cmds.getAttr(\"{}.{}\".format(self.root_node,\n CONFIG[\"config_attr\"]))\n try:\n # THIS NEEDS TO BE REVISTED. I am adding shit from file\n stored_config = ast.literal_eval(str_config)\n self.setup_config = get_added_dicts(stored_config, CONFIG)\n except Exception:\n cmds.warning(\"Could not retrieve CONFIG stored on setup!\")\n self.setup_config = CONFIG", "def send_sms_to_verified_number(verified_number, text, **kwargs):\n backend = verified_number.backend\n msg = SMSLog(\n couch_recipient_doc_type = verified_number.owner_doc_type,\n couch_recipient = verified_number.owner_id,\n phone_number = \"+\" + str(verified_number.phone_number),\n direction = OUTGOING,\n date = datetime.utcnow(),\n domain = verified_number.domain,\n backend_id = backend._id,\n text = text\n )\n add_msg_tags(msg, **kwargs)\n\n def onerror():\n logging.exception(\"Exception while sending SMS to VerifiedNumber id \" + verified_number._id)\n return queue_outgoing_sms(msg, onerror=onerror)", "def get_SMS():\n\tfor msg in client.messages.list():\n\t\t#check for incoming sms and add to hash\n\n\t\t#delete prev SMS from same sender\t\n\t\tif(msg.from_ != base):\n\t\t\t#check if sender had sent SMS previously\n\t\t\tif msg.from_ in msg_received.keys():\n\t\t\t\t#if so delete the prev SMS\n\t\t\t\tcurr_sid = msg.sid\n\t\t\t\tdelete_rSMS(curr_sid)\t\t\n\t\t\telse :\n\t\t\t\t#otherwise store in hash\n\t\t\t\tmsg_received[msg.from_] = msg.body", "def set_config(self, aConfig):\n \n # we update the dict of the existing config with the passed\n # parameter. This means that the new config is merged with\n # the old, but all new members overwrite old one. This is\n # more robust.\n self._config.__dict__.update(aConfig.__dict__)\n # apply the config to the underlying logic\n self.config_to_logic()\n # bring it back all the way up to the view\n self.logic_to_config()\n\n # but only if we are in view mode\n if self.view_initialised:\n self.config_to_view()\n\n # the config has been set, so we assumem that the module has\n # now been modified. \n self._module_manager.modify_module(self)", "def test_env_var_settings_set(config, environment_vars_set_wowww):\n sms = YesssSMS.YesssSMS()\n assert sms._logindata[\"login_rufnummer\"] == \"03211234567\"\n assert sms._logindata[\"login_passwort\"] == \"MySecr3t\"\n assert sms._provider == \"wowww\"\n\n os.environ[\"YESSSSMS_PROVIDER\"] = \"goood\"\n sms = YesssSMS.YesssSMS(\"123456\", \"password\")\n assert sms._logindata[\"login_rufnummer\"] == \"03211234567\"\n assert sms._logindata[\"login_passwort\"] == \"MySecr3t\"\n assert sms._provider == \"goood\"\n\n del os.environ[\"YESSSSMS_PROVIDER\"]\n sms = YesssSMS.YesssSMS(\"123456\")\n assert sms._logindata[\"login_rufnummer\"] == \"03211234567\"\n assert sms._logindata[\"login_passwort\"] == \"MySecr3t\"\n assert sms._provider == \"yesss\"\n\n del os.environ[\"YESSSSMS_LOGIN\"]\n sms = YesssSMS.YesssSMS(\"123456\", \"password\")\n assert sms._logindata[\"login_rufnummer\"] == \"123456\"\n assert sms._logindata[\"login_passwort\"] == \"password\"\n assert sms._provider == \"yesss\"", "def setConfig(self, crcConfigCls):\n CrcComb.setConfig(self, crcConfigCls)", "def create_sms(phone_number, message):\n logger.debug('TWILIO-SEND-SMS to phone number ends with %s', phone_number[-4:])\n\n client = TwilioRestClient(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN)\n try:\n response = client.messages.create(\n to=phone_number,\n from_=settings.TWILIO_PHONE_NUMBER,\n body=message)\n logger.debug('TWILIO-SEND-SMS sent \"%s\" to phone nymber ending with %s. SMS sid %s',\n message, phone_number[-4:], response.sid)\n except TwilioException:\n logger.exception('TWILIO-SEND-SMS-FAIL to phone number ending with %s', phone_number[-4:])\n raise", "async def test_set_config(self):\n set_log_levels(logger_topics=True)\n\n disable_auto_linking = random_bool()\n monitor_mode = random_bool()\n auto_led = random_bool()\n deadman = random_bool()\n topic = f\"ack.{SET_IM_CONFIGURATION}\"\n topic_item = TopicItem(\n topic,\n {\n \"disable_auto_linking\": disable_auto_linking,\n \"monitor_mode\": monitor_mode,\n \"auto_led\": auto_led,\n \"deadman\": deadman,\n },\n 0.1,\n )\n\n modem = ModemBase()\n reset_config(modem, disable_auto_linking, monitor_mode, auto_led, deadman)\n\n send_topics([topic_item])\n await modem.async_set_configuration(\n disable_auto_linking, monitor_mode, auto_led, deadman\n )\n await asyncio.sleep(0.1)\n\n assert modem.configuration[DISABLE_AUTO_LINKING].value == disable_auto_linking\n assert modem.configuration[MONITOR_MODE].value == monitor_mode\n assert modem.configuration[AUTO_LED].value == auto_led\n assert modem.configuration[DEADMAN].value == deadman\n\n assert modem.configuration[DISABLE_AUTO_LINKING].new_value is None\n assert modem.configuration[MONITOR_MODE].new_value is None\n assert modem.configuration[AUTO_LED].new_value is None\n assert modem.configuration[DEADMAN].new_value is None", "def send_transactional_sms(self, phone_number, message):\n provider = self._pick_provider()\n return provider.send_transactional_sms(phone_number, message)", "async def send_config_req(self):\n if not self.connected:\n return\n\n data = bytearray(7)\n data[0] = M_START\n data[1] = 5 # len of msg\n data[2] = mtypes[BMTS_CONFIG_REQ][0]\n data[3] = mtypes[BMTS_CONFIG_REQ][1]\n data[4] = mtypes[BMTS_CONFIG_REQ][2]\n data[5] = 0x77 # known value\n data[6] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def set_config(self, attr, value):\n setattr(self.config, attr, value)", "def set_config(self, attr, value):\n setattr(self.config, attr, value)", "def sms_send(self, sender_name, phones, body, date=None, transliterate=False):\n\n logger.info(\"Function call: sms_send\")\n if not sender_name:\n return self.__handle_error('Seems you not pass sender name')\n if not phones:\n return self.__handle_error(\"Empty phones\")\n if not body:\n return self.__handle_error('Seems you not pass sms text')\n\n try:\n phones = json.dumps(phones)\n except:\n logger.debug(\"Phones: {}\".format(phones))\n return self.__handle_error(\"Phones list can't be converted by JSON library\")\n\n data_to_send = {\n 'sender': sender_name,\n 'phones': phones,\n 'body': body,\n 'date': date,\n 'transliterate': transliterate,\n }\n\n return self.__handle_result(self.__send_request('sms/send', 'POST', data_to_send))", "def send_reminders(self, send_reminders):\n\n self._send_reminders = send_reminders", "def set_config(config: Config):\n CurrentConfig.set(config)", "def text_cell_phone(self, sender, message):\n if self.cell_phone:\n text_message.send_sms(sender, message, self.cell_phone)", "def test_resend_sms_update_smscampaignsubscriber(self):\n result = resend_sms_update_smscampaignsubscriber.delay()\n self.assertEqual(result.successful(), True)", "def set_amount_msat(self, amount_msat: Union[int, str]) -> None:\n if amount_msat == \"!\":\n amount_sat = amount_msat\n else:\n assert isinstance(amount_msat, int), f\"{amount_msat=!r}\"\n assert amount_msat >= 0, amount_msat\n amount_sat = (amount_msat // 1000) + int(amount_msat % 1000 > 0) # round up\n if outputs := self.outputs:\n assert len(self.outputs) == 1, len(self.outputs)\n self.outputs = [PartialTxOutput(scriptpubkey=outputs[0].scriptpubkey, value=amount_sat)]\n self.amount_msat = amount_msat", "def mms(self, did, dst, message, media1=None, media2=None):\n method = \"sendMMS\"\n\n if not isinstance(did, int):\n raise ValueError(\"DID Numbers which is sending the message needs to be an int (Example: 5551234567)\")\n\n if not isinstance(dst, int):\n raise ValueError(\"Destination Number needs to be an int (Example: 5551234568) \")\n\n if not isinstance(message, str):\n raise ValueError(\"Message to be sent needs to be a str (Example: 'hello John Smith' max chars: 1600)\")\n \n if media1:\n if not valid_url(media1):\n raise ValueError(\"Media1 to be sent needs to be a valid url to media file (Example: 'https://voip.ms/themes/voipms/assets/img/talent.jpg?v=2' \")\n \n if media2:\n if not isBase64(media2):\n raise ValueError(\"Media2 to be sent needs to be a base 64 image encode (Example: data:image/png;base64,iVBORw0KGgoAAAANSUh...)\")\n else:\n if len(message) > 1600:\n raise ValueError(\"Message to be sent can only have 1600 chars\")\n\n if media1:\n parameters = {\n \"did\": did,\n \"dst\": dst,\n \"message\": message,\n \"media1\": media1,\n }\n elif media2:\n parameters = {\n \"did\": did,\n \"dst\": dst,\n \"message\": message,\n \"media2\": media2,\n }\n else:\n parameters = {\n \"did\": did,\n \"dst\": dst,\n \"message\": message,\n }\n\n return self._voipms_client._get(method, parameters)", "def config_setup(self, config):\n super(PushGatewayApiV1TestCase, self).config_setup(config)\n config[\"apps\"][\"com.example.spqr\"] = {\n \"type\": \"tests.test_pushgateway_api_v1.TestPushkin\"\n }", "def set_search_parameters(self, config):\n for k, v in config.items():\n self.config[k] = v", "async def mass_send(self, messages: List[Sms]) -> List[int]:\n raise NotImplementedError", "def sendConfig(self, config, filename=''):\n if self.ioLoopInst is not None:\n cmd = {'cmd': 'setConfig', 'value': config, 'filename': filename}\n self._sendMessageToWeb(cmd)\n else:\n print(\"sendConfig: \" + filename)", "def update_cm_config(self, config_list):\n return self._put(endpoint='{}/cm/config'.format(self.api_version),\n data=config_list).json()", "def __init__(self, body, **kwargs):\n super(Sms, self).__init__(**kwargs)\n self.value = body", "def sent(self, message):\n sent_mail.send(sender=self.__class__, message=message)", "def apply_user_configuration(self, config):\n self.logDisplay.set_logging_level(config['log'].get('logging_level', fallback='Verbose'))\n\n # MIDI\n self.winchMidiInputCombo.select_item(config['midi'].get('winch_midi_input', fallback='<no selection>'))\n self.midiOutputCombo.select_item(config['midi'].get('midi_output', fallback='<no selection>'))\n\n # OSC\n oscdef = config['osc']\n self.oscListenerConfig.set_OSC_port(oscdef.get('listener_addr', fallback='localhost'),\n oscdef.getint('listener_port', fallback=3751))\n\n self.oscSenderConfig.set_OSC_port(oscdef.get('sender_addr', fallback='localhost'),\n oscdef.getint('sender_port', fallback=3752))\n\n # DMX\n self.dmxSelect.select_item(config['dmx'].get('dmx_output_serial_port', fallback='<no selection>'))\n\n # winches\n for i, winchSelect in enumerate(self.winchSelects):\n key = \"winch_%d_output_serial_port\" % (i+1)\n winchSelect.select_item(config['winches'].get(key, fallback = '<no selection>'))\n return", "def send_to_mpesa(self, transaction_id, phone_number, amount: float, ):\n return self.send_mobile_money(transaction_id, phone_number, amount, channel=\"mpesa\")", "def gather_configuration(self, config):\n config['log']['logging_level'] = self.logDisplay.get_logging_level()\n\n # MIDI\n config['midi']['winch_midi_input'] = self.winchMidiInputCombo.current_item()\n config['midi']['midi_output'] = self.midiOutputCombo.current_item()\n\n # OSC\n addr, port = self.oscListenerConfig.get_OSC_port()\n config['osc']['listener_addr'] = addr\n config['osc']['listener_port'] = str(port)\n addr, port = self.oscSenderConfig.get_OSC_port()\n config['osc']['sender_addr'] = addr\n config['osc']['sender_port'] = str(port)\n\n # DMX\n config['dmx']['dmx_output_serial_port'] = self.dmxSelect.current_item()\n\n # winches\n for i, winchSelect in enumerate(self.winchSelects):\n key = \"winch_%d_output_serial_port\" % (i+1)\n config['winches'][key] = winchSelect.current_item()\n\n return", "def test_sending_sms(self):\n try:\n from django.conf import settings\n except ImportError:\n self.fail(msg=\"No TEST_NUMBER found in settings!\")\n\n from rapidsms.router import send\n from rapidsms.models import Connection, Backend\n from random import randint\n\n b = Backend.objects.get_or_create(name='envaya_nexmo')[0]\n c = Connection.objects.get_or_create(identity = settings.TEST_NUMBER, backend = b)[0]\n msg = \"Hey, this is a test message from NexmoOutgoingBackendTest! \\n Your Lucky number is %s\" % (randint(1,42))\n\n send(msg,[c])\n print \"Cannot actually verify whether the message was sent or not because of the limitations of rapdisms framework :-/\"", "def mobile_phone_number(self, mobile_phone_number):\n\n self._mobile_phone_number = mobile_phone_number" ]
[ "0.6710091", "0.6376342", "0.61567897", "0.5817117", "0.5528274", "0.5234292", "0.5205838", "0.515643", "0.5122757", "0.5109166", "0.51015836", "0.50929946", "0.50358784", "0.5014341", "0.49991947", "0.49899116", "0.49732202", "0.49195576", "0.4918217", "0.4918136", "0.49015227", "0.4887474", "0.4870044", "0.48350385", "0.482204", "0.47904205", "0.47790444", "0.47695473", "0.47661555", "0.4762994", "0.47483665", "0.47364718", "0.47290716", "0.47209394", "0.47155026", "0.47141844", "0.46780136", "0.46653858", "0.46191448", "0.46164057", "0.4585761", "0.45822892", "0.4570286", "0.45673665", "0.45513695", "0.44916427", "0.44863316", "0.44827527", "0.4466762", "0.4453298", "0.44422415", "0.44079548", "0.4392814", "0.43900895", "0.4389255", "0.4343499", "0.43264216", "0.43164173", "0.4301937", "0.42906383", "0.42884806", "0.42798135", "0.42730495", "0.4268654", "0.42595196", "0.42592204", "0.4249941", "0.4246669", "0.4228622", "0.42254305", "0.42247823", "0.42225227", "0.4219619", "0.42177373", "0.4215211", "0.42116323", "0.42051864", "0.4200483", "0.41884023", "0.41775042", "0.41775042", "0.4176937", "0.41532516", "0.4132909", "0.41311374", "0.4128442", "0.41173404", "0.411568", "0.41136196", "0.41117716", "0.41088176", "0.41057962", "0.41054833", "0.41022694", "0.41013297", "0.40927935", "0.40916482", "0.40914884", "0.40881965", "0.40696368" ]
0.84385973
0
Gets the self_uri of this MessagingCampaign. The URI for this object
def self_uri(self): return self._self_uri
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_uri(self):\n return self.__uri", "def get_uri(self):\n return self.url", "def uri(self) -> str:\n return self._uri", "def getURI(self):\n return _libsbml.SBase_getURI(self)", "def getURI(self):\n return _libsbml.SBasePlugin_getURI(self)", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def get_uri(self):\r\n return self.uri", "def uri(self) -> Optional[str]: # noqa: D401\n return self._uri", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def getUri(self):\n return _libsbml.SBMLUri_getUri(self)", "def get_url(self):\n return self.metadata['thisRecordUrl']", "def get_selfLink(self):\n if 'selfLink' in self.target_pool_config:\n return self.target_pool_config['selfLink']", "def uri(self) -> str:\n return self._host", "def get_url(self):\n return self._url", "def uri(self) -> Optional[str]:\n return pulumi.get(self, \"uri\")", "def get_url(self):\n\n return self.url", "def get_url(self):\n\n return self.url", "def get_uri(self):\n if self._uri is None:\n self._uri = \"{0}{1}/{2}\".format(\n self.session.resource_prefix,\n self.base_uri,\n self.ip_or_ifname_or_group_name,\n )\n\n return self._uri", "def getURI(self):\n return _libsbml.ASTBasePlugin_getURI(self)", "def getURI(self):\n return _libsbml.XMLToken_getURI(self)", "def URL(self):\r\n return self._URL", "def parent(self) -> 'URI':\n if self.type == URIType.SERVICE or self.suffix is not None:\n return URI(\n self.base_url,\n self.study_instance_uid,\n self.series_instance_uid,\n self.sop_instance_uid,\n self.frames,\n suffix=None)\n elif self.type == URIType.STUDY:\n return URI(self.base_url)\n elif self.type == URIType.SERIES:\n return self.study_uri()\n elif self.type == URIType.INSTANCE:\n return self.series_uri()\n else:\n return self.instance_uri()", "def url(self) -> str:\n return self._url", "def url(self) -> str:\n return self._url", "def url(self) -> str:\n return self._url", "def get_url(self):\n return self.resource.url", "def _get_self_collection(self) -> Link:\n rel = \"self\"\n href = self.api_endpoint\n return Link(href=href, rel=rel)", "def uri_reference(self):\n return self._get_uri_reference()", "def url(self):\n\n return self._url", "def url(self):\n\n return self._url", "def url(self):\n\n return self._url", "def get_url(self):\n if self.object_id is None:\n return '{0}/{1}'.format(self.parent.get_url(), self.path)\n\n return '{0}/{1}/{2}'.format(self.parent.get_url(), self.path,\n self.object_id.replace('/', '-'))", "def selflinking(self):\n return self._selflinking", "def get_reference(self):\n return self.resource.url", "def get_url(self):\n return self.url", "def get_url(self):\n return self.url", "def getFullURL(self):\n return self.FullURL", "def get_url(self) -> str:\n return urljoin(self._base_url, self.url)", "def base_uri(self) -> 'URI':\n return URI(self.base_url)", "def url(self):\n # type: () -> string_types\n return self._url", "def self_link(self):\n return self._json['coredata'].get('link', [])[0].get('@href')", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def url(self):\n return self._url", "def uri(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"uri\")", "def url(self):\n\n if self.identifier and self.identifier != \"\":\n return self.collection.url + self.identifier + \"/\"\n else:\n return self.collection.url", "def url(self) -> str:\n return self.HTTP.url if self.HTTP else self._url", "def geturl(self):\n return self.__url", "def url(self):\n url = self.url\n return url", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def url(self):\r\n return self.urlparts.geturl()", "def getURI(self):\n return _libsbml.XMLTriple_getURI(self)", "def href(self):\n return self._href", "def href(self):\n return self._href", "def url(self):\n return (urljoin(self.lodgeit.address, self.relative_url)\n if self.relative_url else None)", "def url(self) -> str:\n return self._request.url.path", "def getParentDeviceUrl(self):\n url = \"\"\n dev = self.device()\n if dev: url = dev.absolute_url_path()\n return url", "def url(self) -> str:\n return self.url_as()", "def getURI(self):\n return _libsbml.SBMLNamespaces_getURI(self)", "def url(self):\n if self._url is None:\n self._url = self.request.url\n return self._url", "def get_tracker_uri(self):\r\n return self.tracker_uri", "def full_url(self):\n return \"%s://%s%s\" % (self.protocol, self.host, self.uri)", "def full(self):\n url = (self.scheme + ':') if self.scheme else ''\n url += '//' + self.netloc + self.relative()\n return url", "def url(self):\n return self._client.url", "def self_link_with_id(self) -> str:\n return pulumi.get(self, \"self_link_with_id\")", "def self_link_with_id(self) -> str:\n return pulumi.get(self, \"self_link_with_id\")", "def FullURL(self):\n return self._fullURL", "def url(self) -> str:\n return getattr(\n self.auth_accounts[-1], \"url\" # pylint: disable=unsubscriptable-object\n )", "def url (self):\n return Links.createURL('/')", "def url(self):\n return self.full()", "def self_uri(self, self_uri):\n \n self._self_uri = self_uri", "def self_uri(self, self_uri):\n \n self._self_uri = self_uri" ]
[ "0.72447217", "0.72073245", "0.7078981", "0.7075188", "0.6987316", "0.6909857", "0.6909857", "0.6909857", "0.6909857", "0.6909857", "0.6909857", "0.68474865", "0.6822147", "0.68092525", "0.68092525", "0.68092525", "0.68092525", "0.68092525", "0.68092525", "0.68092525", "0.68092525", "0.68092525", "0.67216885", "0.6682811", "0.6658109", "0.6622247", "0.6535778", "0.6529114", "0.6518842", "0.6518842", "0.6500256", "0.649495", "0.64694816", "0.6434615", "0.6426032", "0.64173126", "0.64173126", "0.64173126", "0.63924545", "0.63829184", "0.63697964", "0.6352315", "0.6352315", "0.6352315", "0.63373077", "0.63280845", "0.63203144", "0.631104", "0.631104", "0.62888867", "0.6288527", "0.6281937", "0.6278156", "0.62724996", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62406147", "0.62298816", "0.622144", "0.6209796", "0.62089884", "0.6177114", "0.6172847", "0.6172847", "0.6172847", "0.6172847", "0.6172847", "0.6172847", "0.6139282", "0.61330163", "0.6124238", "0.6124238", "0.61159426", "0.6104417", "0.6102288", "0.60945463", "0.6093345", "0.609108", "0.60896623", "0.6081188", "0.6054755", "0.6047914", "0.60368806", "0.60368806", "0.60318315", "0.6028935", "0.6019004", "0.6014115", "0.60132194", "0.60132194" ]
0.8186954
3
Sets the self_uri of this MessagingCampaign. The URI for this object
def self_uri(self, self_uri): self._self_uri = self_uri
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def self_uri(self):\n return self._self_uri", "def self_uri(self):\n return self._self_uri", "def self_uri(self):\n return self._self_uri", "def self_uri(self):\n return self._self_uri", "def uri(self) -> str:\n return self._uri", "def get_uri(self):\n return self.__uri", "def _get_self_collection(self) -> Link:\n rel = \"self\"\n href = self.api_endpoint\n return Link(href=href, rel=rel)", "def uri(self) -> Optional[str]: # noqa: D401\n return self._uri", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> str:\n return pulumi.get(self, \"self_link\")", "def set_uri(self, uri):\n self.__uri = uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def uri(self):\n return self._uri", "def set_uri(self, uri):\r\n self.uri = uri", "def get_uri(self):\r\n return self.uri", "def get_uri(self):\n return self.url", "def uri(self, uri):\n self._uri = uri", "def uri(self, uri):\n self._uri = uri", "def _set_url(self): \n self.url = self.geturl()", "def getURI(self):\n return _libsbml.SBase_getURI(self)", "def uri(self, uri):\n\n self._uri = uri", "def uri(self, uri):\n\n self._uri = uri", "def uri(self) -> str:\n return self._host", "def getURI(self):\n return _libsbml.SBasePlugin_getURI(self)", "def get_selfLink(self):\n if 'selfLink' in self.target_pool_config:\n return self.target_pool_config['selfLink']", "def get_uri(self):\n if self._uri is None:\n self._uri = \"{0}{1}/{2}\".format(\n self.session.resource_prefix,\n self.base_uri,\n self.ip_or_ifname_or_group_name,\n )\n\n return self._uri", "def uri(self) -> Optional[str]:\n return pulumi.get(self, \"uri\")", "def getUri(self):\n return _libsbml.SBMLUri_getUri(self)", "def parent(self) -> 'URI':\n if self.type == URIType.SERVICE or self.suffix is not None:\n return URI(\n self.base_url,\n self.study_instance_uid,\n self.series_instance_uid,\n self.sop_instance_uid,\n self.frames,\n suffix=None)\n elif self.type == URIType.STUDY:\n return URI(self.base_url)\n elif self.type == URIType.SERIES:\n return self.study_uri()\n elif self.type == URIType.INSTANCE:\n return self.series_uri()\n else:\n return self.instance_uri()", "def base_uri(self) -> 'URI':\n return URI(self.base_url)", "def _self(self, _self):\n\n self.__self = _self", "def _self(self, _self):\n\n self.__self = _self", "def url(self):\n\n if self.identifier and self.identifier != \"\":\n return self.collection.url + self.identifier + \"/\"\n else:\n return self.collection.url", "def set_self_ref(self, resource):\n fullname = utils.class_fullname(resource)\n if fullname not in self._models_index:\n self.send_error(400,\n message=\"Unrecognized resource type: %s\" % type(resource))\n return -1\n resource_name = self._models_index[fullname]\n resource_url = self.reverse_url(\n self._collections[resource_name][\"name\"], resource[self.Id]) \n resource[\"selfRef\"] = \"%s://%s%s\" % (\n self.request.protocol, self.request.host, resource_url)\n return 0", "def uri(self, uri):\n if uri is None:\n raise ValueError(\"Invalid value for `uri`, must not be `None`\")\n\n self._uri = uri", "def set_uri(self, uri):\n # Parse URI\n parsed_uri = urllib.parse.urlparse(uri)\n # Separate out the user ID for HydroShare users\n contributor_pk = os.path.basename(parsed_uri.path.strip('/'))\n # Make sure this is a HydroShare user URI\n is_hs_user_uri = False\n try:\n validate_hydroshare_user_id(contributor_pk)\n is_hs_user_uri = True\n except ValidationError:\n pass\n\n if is_hs_user_uri:\n # Set rel_uri\n self.rel_uri = parsed_uri.path\n pk = None\n try:\n pk = int(contributor_pk)\n except ValueError:\n msg = \"User ID {0} is not an integer. User URI was {1}.\"\n raise GenericResourceMeta.ResourceMetaException(msg)\n\n assert (pk is not None)\n self.id = pk\n\n self.uri = uri", "def uri(self):\n if self.fetcher:\n return self.fetcher.uri\n else:\n raise InvalidFetcherAccessPoint(\n \" Initialize an access point (%s) first.\"\n % \",\".join(self.Fetchers.keys())\n )", "def setLocationURI(self, *args):\n return _libsbml.SBMLDocument_setLocationURI(self, *args)", "def full_url(self):\n return \"%s://%s%s\" % (self.protocol, self.host, self.uri)", "def self_link(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"self_link\")", "def selflinking(self):\n return self._selflinking", "def url (self):\n return Links.createURL('/')", "def _uri(self):\n raise NotImplementedError", "def uri(cls):\n return f'{cls.app_label}.{cls.name}'", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def self_link(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"self_link\")", "def uri_reference(self):\n return self._get_uri_reference()", "def saveUri(self):\n ret = libxml2mod.xmlSaveUri(self._o)\n return ret", "def URL(self):\r\n return self._URL", "def getURI(self):\n return _libsbml.ASTBasePlugin_getURI(self)", "def instance_uri(self) -> 'URI':\n if self.type not in (URIType.INSTANCE, URIType.FRAME):\n raise ValueError(\n f'Cannot get an Instance URI from a {self.type!r} URI.')\n return URI(self.base_url, self.study_instance_uid,\n self.series_instance_uid, self.sop_instance_uid)", "def self_link(self):\n return self._json['coredata'].get('link', [])[0].get('@href')", "def on(self, o_self):\r\n self.o_self = o_self\r\n return self", "def url(self) -> str:\n return self._url", "def url(self) -> str:\n return self._url", "def url(self) -> str:\n return self._url", "def url(self):\n if self._url is None:\n self._url = self.request.url\n return self._url", "def self_assign_short_url(self):\n self.image_short_url = short_url.encode_url(self.id)\n return self.image_short_url", "def get_url(self):\n return self.metadata['thisRecordUrl']", "def _set_tracker_uri(self, uri):\r\n parse_result = urlparse.urlparse(uri)\r\n if (parse_result.scheme.lower() not in ['http', 'https'] or\r\n not parse_result.netloc or not parse_result.query):\r\n raise InvalidUriError('Invalid tracker URI (%s)' % uri)\r\n qdict = cgi.parse_qs(parse_result.query)\r\n if not qdict or not 'upload_id' in qdict:\r\n raise InvalidUriError('Invalid tracker URI (%s)' % uri)\r\n self.tracker_uri = uri\r\n self.tracker_uri_host = parse_result.netloc\r\n self.tracker_uri_path = '%s/?%s' % (parse_result.netloc,\r\n parse_result.query)\r\n self.server_has_bytes = 0", "def get_tracker_uri(self):\r\n return self.tracker_uri", "def set_tracking_uri(uri):\n global _tracking_uri\n _tracking_uri = uri", "def url(self) -> str:\n return self.HTTP.url if self.HTTP else self._url", "def full(self):\n url = (self.scheme + ':') if self.scheme else ''\n url += '//' + self.netloc + self.relative()\n return url", "def self_href(cls, href: HREF) -> \"Link\":\n href_str = str(os.fspath(href))\n return cls(pystac.RelType.SELF, href_str, media_type=pystac.MediaType.JSON)", "def get_url(self):\n if self.object_id is None:\n return '{0}/{1}'.format(self.parent.get_url(), self.path)\n\n return '{0}/{1}/{2}'.format(self.parent.get_url(), self.path,\n self.object_id.replace('/', '-'))", "def _base_uri(self) -> str:\n if self.use_original_uri:\n header_value = self.use_original_uri.get(\"header_value\")\n conditions = self.use_original_uri.get(\"claim_conditions\")\n if conditions.get(\"any\"):\n uri = self.request.headers.get(header_value)\n else:\n key = self.claims.get(conditions.get(\"claim_key\"))\n val = self.claims.get(conditions.get(\"claim_value\"))\n if self.claims.get(key) == val:\n uri = self.request.headers.get(header_value)\n else:\n uri = self.request.uri\n else:\n uri = self.request.uri\n if not uri:\n uri = self.request.uri\n return uri.split(\"?\")[0]", "def __init__(self, client=None, principal=None, url=None):\n super(ScheduleMailbox, self).__init__(client=client, url=url)\n self._items = None\n if not client and principal:\n self.client = principal.client\n if not principal and client:\n principal = self.client.principal\n if url is not None:\n self.url = client.url.join(URL.objectify(url))\n else:\n self.url = principal.url\n try:\n self.url = self.client.url.join(URL(self.get_property(self.findprop())))\n except:\n logging.error(\"something bad happened\", exc_info=True)\n error.assert_(self.client.check_scheduling_support())\n self.url = None\n raise error.NotFoundError(\n \"principal has no %s. %s\"\n % (str(self.findprop()), error.ERR_FRAGMENT)\n )", "def self_link_with_id(self) -> str:\n return pulumi.get(self, \"self_link_with_id\")", "def self_link_with_id(self) -> str:\n return pulumi.get(self, \"self_link_with_id\")", "def getURI(self):\n return _libsbml.XMLToken_getURI(self)", "def url(self) -> str:\n return self._connection.url + self.path_attribute", "def href(self, href):\n\n self._href = href", "def href(self, href):\n\n self._href = href", "def href(self, href):\n\n self._href = href", "def href(self, href):\n\n self._href = href", "async def async_set_media_uri(self, uri):\n self._media_uri = uri", "def full_url(self):\n return self.url + \"?channel_id=\" + self.external_id" ]
[ "0.7107602", "0.7107602", "0.7107602", "0.7107602", "0.591665", "0.58686566", "0.5748005", "0.574481", "0.57201016", "0.57201016", "0.57201016", "0.57201016", "0.57201016", "0.57201016", "0.57201016", "0.57201016", "0.57201016", "0.5711726", "0.5708703", "0.5708703", "0.5708703", "0.5708703", "0.5708703", "0.5708703", "0.56449413", "0.5601016", "0.55887675", "0.55065376", "0.55065376", "0.55002964", "0.54799885", "0.54795223", "0.54795223", "0.5475396", "0.54365766", "0.5395848", "0.5388253", "0.53637433", "0.5355462", "0.5354789", "0.5321961", "0.5272823", "0.5272823", "0.5260659", "0.52519196", "0.52369595", "0.52340925", "0.5195733", "0.514853", "0.5135691", "0.5103985", "0.5103985", "0.5103985", "0.5103985", "0.5103985", "0.50892127", "0.5088958", "0.5061763", "0.50594544", "0.5045212", "0.5045212", "0.5045212", "0.5045212", "0.5045212", "0.5045212", "0.5043418", "0.5041003", "0.503385", "0.5006703", "0.49930325", "0.49812233", "0.4978247", "0.49725136", "0.49725136", "0.49725136", "0.49701318", "0.49669003", "0.49392605", "0.49202782", "0.4914735", "0.49060288", "0.48913494", "0.48791444", "0.48773807", "0.48634657", "0.48567387", "0.48524547", "0.48414817", "0.48414817", "0.48354784", "0.48344514", "0.48326418", "0.48326418", "0.48326418", "0.48326418", "0.4823201", "0.48111427" ]
0.8014554
3
Returns the model properties as a dict
def to_dict(self): result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_dict(self):\n return self.properties", "def to_dict(self):\n return self.properties", "def get_properties(self):\n return self.properties", "def asdict(self):\n return self._prop_dict", "def json(self):\n rv = {\n prop: getattr(self, prop)\n for prop in self.__properties__\n if prop in vars(self)\n }\n rv.update(self._props)\n return rv", "def get_properties(self):\n return self.properties", "def get_properties():", "def getProperties():", "def properties(self):\r\n if self._properties is None:\r\n res = self._con.get(self._url, {'f':'json'})\r\n self._properties = PropertyMap(res)\r\n return self._properties", "def properties(self):\r\n if self._properties is None:\r\n res = self._con.get(self._url, {'f':'json'})\r\n self._properties = PropertyMap(res)\r\n return self._properties", "def getProperties(self):\n return self.properties", "def __properties__(self) -> dict:\r\n parameters = [\r\n d for d in dir(self) if (d[0] != \"_\") and (d.count(\"set\") == 0)\r\n and (d.count(\"_c\") == 0) and (d.count(\"_f\") == 0)\r\n ]\r\n\r\n return self.__as_json__(parameters)", "def json_properties(self):\n attributes = []\n all = vars(self)\n for var in all:\n if var[:1] != '_':\n attributes.append(var)\n if isinstance(self, db.Model):\n properties = self.properties().keys()\n for property in properties:\n if property[:1] != '_':\n attributes.append(property)\n return attributes", "def properties(self) -> Any:\n return pulumi.get(self, \"properties\")", "def _properties(self) -> dict[str, dict[str, str]]:\n schema = self.schema(by_alias=False)\n if schema.get('properties') is not None:\n return schema.get('properties', {})\n return schema.get('definitions', {}).get(self.__class__.__name__, {}).get('properties', {})", "def get_model_properties(self):\n properties = {}\n\n filename = self._get_data_filename(\"modelargs.json\")\n with open(filename, \"r\") as f:\n results = json.loads(f.read())\n properties[\"image_size\"] = results.get(\"image_size\")\n properties[\"num_classes\"] = results.get(\"num_classes\")\n properties[\"model\"] = results.get(\"model\")\n properties[\"name\"] = results.get(\"name\")\n properties[\"filter_size\"] = results.get(\"filter_size\", 3)\n properties[\"increase_factor\"] = results.get(\"increase_factor\", 0)\n self.model = properties[\"name\"] # regardless of the name of the folder, this will get the proper model name (i.e. <modelname>.cntk)\n\n # optional property\n properties[\"trainer\"] = results.get(\"trainer\", \"CNTK 2.2\")\n\n self._ensure_model_file()\n properties[\"size_mb\"] = round(os.path.getsize(self.model_file) / (1000 * 1000))\n\n return properties", "def as_dict(self):\n result = {}\n for attr in self.__attr:\n result[attr] = getattr(self, attr)\n return result", "def to_dict_model(self) -> dict:\n return dict((key, getattr(self, key)) for key in self.__mapper__.c.keys())", "def get_properties():\n properties = dict()\n properties['size'] = list()\n properties['color'] = list()\n properties['quality'] = list()\n u = models.Size.query.all()\n for i in u:\n properties['size'].append(i.size_name)\n u = models.Color.query.all()\n for i in u:\n properties['color'].append(i.color_name)\n u = models.Quality.query.all()\n for i in u:\n properties['quality'].append(i.quality_name)\n return make_response(jsonify(properties))", "def get_modelDict(self):\n return self.__modelDict", "def attributes(self):\n return dict(self.__attributes)", "def properties(self):\n return self._properties", "def properties(self):\n return self._properties", "def to_dict(self):\n result = {}\n for p in self.json_properties():\n value = getattr(self, p)\n if isinstance(value, datetime.datetime):\n value = value.strftime('%s%f')[:-3]\n result[Jsonifiable.transform_to_camelcase(p)] = value\n return result", "def properties(self):\n return self._props", "def properties(self):\n pass", "def to_dict(self):\n d = {}\n for attr in self.__class__.attributes:\n d[attr] = getattr(self, attr)\n return d", "def properties_get(self):\n return self._get('properties')", "def _collect_properties(self):\n properties = {\n 'userid': self.user_id,\n 'title': self.get_fullname()\n }\n if not self.ogds_user:\n return properties\n\n for attribute_name in self.ogds_user_attributes:\n value = getattr(self.ogds_user, attribute_name)\n properties[attribute_name] = value\n return properties", "def getPropertyDict(self):\n \n d = self.getChild('__properties')\n if d:\n return d.getDict()\n else:\n return {}", "def get_attributes(self):\n return dict(self.attributes) # return the attributes", "def get_attributes(self):\n return dict(self.attributes) # return the attributes", "def get_attributes(self):\n return dict(self.attributes) # return the attributes", "def to_dict(self, include=None):\n _MODEL = type(self)\n repr_dict = {}\n if include is None:\n include = []\n for name, prop in _MODEL._properties.iteritems():\n if hasattr(prop, 'public') and getattr(prop, 'public', False):\n include.append(name)\n\n for name in include:\n # check if this property is even allowed to be public\n # or has a value set\n if not hasattr(self, name):\n continue\n\n value = getattr(self, name)\n if type(getattr(_MODEL, name)) == ndb.StructuredProperty:\n if isinstance(value, list):\n items = []\n for item in value:\n items.append(item.to_dict(include=None))\n repr_dict[name] = items\n else:\n repr_dict[name] = value.to_dict(include=None)\n elif isinstance(value, date):\n repr_dict[name] = value.isoformat()\n elif isinstance(value, ndb.Key):\n repr_dict[name] = value.urlsafe()\n else:\n repr_dict[name] = value\n\n if self._key:\n repr_dict['key'] = self.get_key_urlsafe()\n return repr_dict", "def to_dict(self):\n properties = {}\n for k in self.__dict__:\n if k == 'POSSIBLE_METHODS':\n continue\n if k == 'keysamplers':\n properties[k] = [i.to_dict() for i in self.__dict__[k] if hasattr(i,'to_dict')]\n elif k in {'pooler'}:\n properties[k] = self.__dict__[k].to_dict()\n else:\n properties[k] = deepcopy(self.__dict__[k])\n return properties", "def get_all_properties(cls):\n return ['key', 'id'] + _.keys(cls._properties)", "def get_properties(self):\n return self.name, self.author, self.description, self.fmu_type, self.version, self.guid, self.tool, self.numStates", "def properties(self):\n\n return self._properties", "def ToDict(self):\n atributes_dictionary = {}\n for key, value in self.__dict__.iteritems():\n atributes_dictionary[key] = value\n return atributes_dictionary", "def properties(self):", "def properties(self):", "def properties(self):", "def modelPropertiesDictionary(sql_row_list):\n \n properties_dictionary = \\\n {\n \"id\": sql_row_list[0],\n \"name\": sql_row_list[1],\n \"last_deploy_timestamp\": sql_row_list[2],\n \"active_version\": sql_row_list[3],\n \"build_id\": sql_row_list[4]\n };\n\n return properties_dictionary;", "def as_dict(self):\n data = dict()\n for name in self.fields:\n val = getattr(self, name)\n if isinstance(val, Model):\n val = val.as_dict()\n elif isinstance(val, list) and val and isinstance(val[0], Model):\n val = [sub.as_dict() for sub in val]\n data[name] = val\n return data", "def to_dict(self):\n if self._dict is not None:\n return self._dict\n\n result = {}\n for key in self.ATTRIBUTES:\n value = getattr(self, key)\n if value:\n result[key] = value\n self._dict = result\n return result", "def properties(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"properties\")", "def properties(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"properties\")", "def to_dict(self):\n _dict = {}\n for f in self._meta.fields:\n if f.name == 'created':\n _dict[f.name] = str(f.value_from_object(self))\n else:\n _dict[f.name] = f.value_from_object(self)\n\n return _dict", "def to_dict(self):\r\n return self.__dict__", "def properties(self):\n return None", "def properties(self):\n return None", "def to_dict(self):\n return attr.asdict(self)", "def as_dict(self):\n return self.__dict__", "def _get_model_state(self) -> dict:\n return dict(model=self.model, kwargs=self._model_kwargs)", "def dictify(self):\n return {\n \"name\" : self.name,\n \"lastname\" : self.lastname,\n \"phone\" : self.phone,\n \"email\" : self.email\n }", "def to_dict(self):\n return {\n \"id\": self.id,\n \"name\": self.name,\n # \"created_by\": self.created_by,\n # \"created_on\": self.created_on,\n # \"modified_by\": self.modified_by,\n # \"modified_on\": self.modified_on\n }", "def properties(self):\r\n return resources.Properties(self)", "def attributes(self):\n params = self.model.param_array\n return {'parameters': params}", "def properties(self, pk):\n return JsonResponse(self._get_properties(pk))", "def to_dict(self):\n return vars(self)", "def to_dict(self):\n\n # Check if is the right instance.\n if isinstance(self, db.Model):\n # construct a dictionary from column names and values.\n dict_representation = {c.name: getattr(self, c.name) for c in self.__table__.columns}\n return dict_representation\n else:\n raise AttributeError(type(self).__name__ + \" is not instance of \" + db.Model.__name__)", "def bson_properties(self):\n return []", "def to_dict(self):\n return {\n \"id\": self.id,\n \"name\": self.name\n }", "def get_dict(self):\n return", "def to_dict(self):\n return to_dict(self.__dict__)", "def to_json(self):\n properties = self.to_dict()\n if isinstance(self, db.Model):\n properties['id'] = unicode(self.key().id())\n return json.dumps(properties)", "def to_dict(self):", "def to_dict(self):\n return self.__dict__", "def to_dict(self):\n return self.__dict__", "def to_dict(self):\n return self.__dict__", "def to_dict(self):\n return self.__dict__", "def to_dict(self):\n return self.__dict__", "def to_dict(self):\n return self.__dict__", "def get_attributes(self) -> Dict[str, str]:\n pass", "def config(self) -> ModelConfigDict:\n return self.config_obj.to_dict()", "def properties(self):\n return self.properties_with_uid[1:]", "def to_dict(self):\n properties = {}\n for k in self.__dict__:\n if k in {'idsSoFar'}:\n continue\n else:\n properties[k] = deepcopy(self.__dict__[k])\n return properties", "def to_dict(self):\n properties = {}\n for k in self.__dict__:\n if k in {'idsSoFar'}:\n continue\n else:\n properties[k] = deepcopy(self.__dict__[k])\n return properties", "def to_dict(self):\n properties = {}\n for k in self.__dict__:\n if k in {'idsSoFar'}:\n continue\n else:\n properties[k] = deepcopy(self.__dict__[k])\n return properties", "def serialise(self):\n return {\n 'id': self.id,\n 'category_id': self.category_id,\n 'name': self.name,\n 'description': self.description,\n 'quantity': self.quantity,\n 'price': self.price,\n 'user_id': self.user_id\n }", "def getPropertiesAll():", "def get_all_properties(self) -> dict:\n return self._request(\n \"post\",\n URL,\n json=attr.asdict(\n Body(\"getAllProperties\", API_VERSION),\n filter=attr.filters.exclude(attr.fields(Body).params),\n ),\n )", "def model_info(self):\n if not self._model_info:\n self._load_model_info()\n try:\n data = json.loads(self._model_info)\n except (TypeError, ValueError):\n data = {}\n return data", "def to_dict(self):\n return {\n 'name': self.get_name(),\n 'description': self.get_description()\n }", "def serialize(self):\n return {\n 'id' : self.id,\n 'name' : self.name,\n }", "def serialize(self):\n return {\n 'id' : self.id,\n 'name' : self.name,\n }", "def as_dict(self):\n return self.__dict__", "def to_dict(self):\r\n\r\n return {\r\n 'product_id': self.product_id,\r\n 'product_name': self.product_name\r\n }", "def serialize(self):\n return {\n 'name' : self.name,\n 'id' : self.id,\n }", "def asdict(self):\n return attr.asdict(self)", "def to_dict(self) -> dict:", "def getDict(self):\n res = {}\n for attr, value in self.__dict__.iteritems():\n if type(attr) is IntType or type(attr) is StringType or type(attr) is LongType or type(attr) is UnicodeType:\n res[attr] = value\n elif isinstance(attr, datetime.datetime):\n res[attr] = value.isoformat('-')\n \n return res", "def attributes(self):\n return self.__dict.keys()", "def dict(self):\n return self.__dict__", "def dict(self):\n return self.__dict__", "def dict(self):\n return self.__dict__", "def dict(self):\n return self.__dict__", "def dict(self):\n return self.__dict__", "def dict(self):\n return self.__dict__", "def dict(self):\n return self.__dict__", "def as_dict(self):\n return {c.key: getattr(self, c.key)\n for c in inspect(self).mapper.column_attrs}" ]
[ "0.7751993", "0.7751993", "0.73391134", "0.7334895", "0.7297356", "0.727818", "0.7159078", "0.71578115", "0.71494967", "0.71494967", "0.71283495", "0.71275014", "0.7122587", "0.71079814", "0.7060394", "0.7043251", "0.7034103", "0.70233124", "0.69635814", "0.69586295", "0.690053", "0.6881568", "0.6881568", "0.6857664", "0.68415916", "0.68122137", "0.680096", "0.67914945", "0.6757063", "0.6753585", "0.6741746", "0.6741746", "0.6741746", "0.6735291", "0.67126125", "0.6697801", "0.6695801", "0.6689893", "0.6680752", "0.66802895", "0.66802895", "0.66802895", "0.66547817", "0.66495687", "0.6633999", "0.6619567", "0.6619567", "0.66156983", "0.66049474", "0.6590706", "0.6590706", "0.6590206", "0.6587873", "0.65861845", "0.65822417", "0.65794736", "0.65792733", "0.657747", "0.6571183", "0.65662557", "0.65637356", "0.6539919", "0.65396816", "0.65283066", "0.65252614", "0.6513477", "0.65098846", "0.65077883", "0.65077883", "0.65077883", "0.65077883", "0.65077883", "0.65077883", "0.6507418", "0.6505772", "0.65015876", "0.64951885", "0.64951885", "0.64951885", "0.64857763", "0.6474329", "0.6469453", "0.64684683", "0.6453606", "0.6453024", "0.6453024", "0.6430734", "0.6429058", "0.6426903", "0.64215595", "0.64201874", "0.6417152", "0.6414739", "0.6411571", "0.6411571", "0.6411571", "0.6411571", "0.6411571", "0.6411571", "0.6411571", "0.64035517" ]
0.0
-1
Returns the model as raw JSON
def to_json(self): return json.dumps(sanitize_for_serialization(self.to_dict()))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_json(self) -> str:\n return json.dumps(model_to_dict(self))", "def as_json(self):", "def json(self):\n return {'id': self.id, 'name': self.name, 'description': self.description}", "def model_json(name):\n model = Model.query.filter_by(name=name).first_or_404()\n return jsonify(**model.meta)", "def json(self):\n return self.__json", "def _toJSON(self):\n\n return json.encode(self.__toJSON())", "def serialize(self):\n return json.dumps(self.as_dict())", "def json(self):\n return {\n 'id': self.id,\n 'name': self.name\n }", "def json_raw(self):\n return json.dumps(self.data, cls=ComplexEncoder)", "def to_json(self):\n pass", "def tojson(self):\n return json.dumps(self.jsonable())", "def to_json(self):\n return json.dumps(self.for_json())", "def as_json(self):\n return json.dumps(self.as_dict())", "def as_json(self):\n return json.dumps(self.as_dict())", "def as_json(self):\n return json.dumps(self.as_dict())", "def json (self):\n\n return jsonpickle.encode(self, unpicklable=False)", "def json (self):\n\n return jsonpickle.encode(self, unpicklable=False)", "def json(self):\n class ExtendedJSONEncoder(json.JSONEncoder):\n def default(self, obj):\n if isinstance(obj, datetime.date) or isinstance(obj, datetime.time):\n encoded_object = obj.isoformat()\n else:\n encoded_object = json.JSONEncoder.default(self, obj)\n return encoded_object\n\n obj = {\n 'operation': self.operation,\n 'version': self.version,\n 'language': self.language,\n 'identifiers': self.identifiers,\n 'store_execute': self.store_execute,\n 'status': self.status,\n 'lineage': self.lineage,\n 'inputs': dict((i, [inpt.json for inpt in self.inputs[i]]) for i in self.inputs),\n 'outputs': self.outputs,\n 'raw': self.raw\n }\n\n return json.dumps(obj, allow_nan=False, cls=ExtendedJSONEncoder)", "def create_json_from_model(self):\n json = {\n \"enableAutoReply\": self.enable_auto_reply,\n \"responseSubject\": self.response_subject,\n \"responseBodyPlainText\": self.response_body_plain_text,\n \"responseBodyHtml\": self.response_body_html,\n \"restrictToContacts\": self.restrict_to_contacts,\n \"restrictToDomain\": self.restrict_to_domain,\n \"startTime\": self.start_time,\n \"endTime\": self.end_time\n }\n return json", "def serialize(self):\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"detail\": self.detail,\n \"date_on\": self.date_on,\n }", "def __str__(self):\n return self.as_json_string()", "def __str__(self):\n return self.asJsonString()", "def tojson(self) -> ty.Text:\n return json.dumps(self.todict())", "def toJSON(self):\n raise NotImplementedError()", "def to_json(self):\n return json.dumps(self.serialize())", "def to_json(self):\n return json.dumps(self.to_dict())", "def to_json(self):\n return json.dumps(self.to_dict())", "def __str__(self):\n return self.AsJsonString()", "def json(self):\r\n return {\"id\": self.id, \"code\": self.code, \"description\": self.description, \"xCoor\": self.x_coor, \"yCoor\": self.y_coor, \"latitude\": self.latitude,\r\n \"longitude\": self.longitude, \"waterschapId\": self.waterschap_id, \"watertypeId\": self.watertype_id, \"watertypeKrwId\": self.watertype_krw_id}", "def to_json(self):\n return self.__dict__", "def to_json(self):\n return self.__dict__", "def to_json(self):\n return self.__dict__", "def to_json(self):\n return self.__dict__", "def toJSON(self):\n return json.dumps(self, default=lambda o: o.__dict__)", "def toJson(self):\r\n return self.__dict__", "def json(self):\n return {\n '_id' : self._id,\n 'name' : self.name,\n 'description' : self.description,\n }", "def json_friendly(self):", "def json(self):\n return self._json", "def to_json(self):\n return json.dumps(self._asdict())", "def save_model(self) -> bytes:\n\n return serialize_for_zippy(self.model)", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self):\n return json.dumps(self.__dict__)", "def json(self):\n return json.dumps(self, default=lambda o: o.__dict__,\n sort_keys=True, indent=4)", "def to_json(self, **kwargs):\n return dumps(self, **kwargs)", "def to_json(self) :\n return jsonpickle.encode(self)", "def to_json(self):\n return None", "def serialize(self):\n return {\n 'id' : self.id,\n 'created' : self.created.isoformat(),\n 'newCar' : self.newCar,\n 'type' : self.type,\n 'make' : self.make,\n 'model' : self.model,\n 'trim' : self.trim,\n 'year' : self.year,\n 'mileage' : self.mileage,\n 'price' : self.price,\n 'description' : self.description,\n 'dealer_id' : self.dealer_id,\n }", "def __str__(self):\n return json.dumps(self.obj)", "def serialize(self) -> str:\n return json.dumps(self.__dict__)", "def _jsonify(self):\n return self.experiment_record.to_ddb_record()", "def dump_model(self):", "def serialize(self):\n return {\n 'id' : self.id,\n 'name' : self.name,\n 'owner' : self.user.name,\n 'room' : self.room.name,\n 'description' : self.description,\n 'price' : self.price,\n }", "def convert_to_json(self):\n return self.__dict__", "def json(self):\n return {\n 'id': self.id,\n 'id_bank_data': self.id_bank_data,\n 'national_id_document': self.national_id_document,\n 'country': self.country,\n 'name': self.name,\n 'surname': self.surname,\n 'mail': self.mail,\n 'google_token': self.google_token,\n 'role': self.role\n }", "def get_data(self):\n return self.data.to_json()", "def to_json(self):\r\n return {'type': self.type, 'name': self.name}", "def to_json(self):\n\n return self.__dict__", "def to_json(self):\n return json.dumps(self, default=lambda i: i.__dict__)", "def to_json(self) -> JSON:\n pass", "def to_json(self) -> str:\n return json.dumps(asdict(self))", "def serialize(self):\n return {\n \"id\": self.id,\n \"name\": self.name,\n }", "def toJson(self):\n return json.dumps(self.toDict())", "def toJson(self):\n return json.dumps(self.toDict())", "def serialize(self):\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"currency\": self.currency,\n \"old_price\": self.old_price,\n \"price\": self.price,\n \"availability\": self.availability,\n \"url\": self.url,\n \"img_url\": self.img_url\n }", "def serialize(self):\n return {\n 'id' : self.id,\n 'description': self.description,\n 'longitude' : self.longitude,\n 'latitude' : self.latitude,\n 'created_on' : self.created_on,\n 'created_by' : self.created_by,\n 'likes' : self.likes\n }", "def to_json(self):\n properties = self.to_dict()\n if isinstance(self, db.Model):\n properties['id'] = unicode(self.key().id())\n return json.dumps(properties)", "def serialize(self):\n return {\n 'id': self.id,\n 'name': self.name,\n 'ingredients': self.ingredients,\n 'directions': self.directions,\n 'type': self.type,\n }", "def __repr__(self):\n return json.dumps(self.__dict__)", "def serialize(self):\n return {\n 'id' : self.id,\n 'name' : self.name,\n 'date' : str(self.date),\n 'owner_id' : self.owner_id,\n }", "def toJSON(self) -> str:\r\n try:\r\n _ = json.dumps(self.value)\r\n value = self.value\r\n except (TypeError, OverflowError):\r\n value = {}\r\n value['object_type'] = self.value.__class__.__name__\r\n if isinstance(self.value, RawData):\r\n type_str = '_{}__'.format(value['object_type'])\r\n for key, data in self.value.__dict__.items():\r\n value[key.replace(type_str, '')] = data\r\n else:\r\n type_str = '_{}'.format(value['object_type'])\r\n for key, data in self.value.__dict__.items():\r\n value[key.replace(type_str, '')] = data\r\n\r\n return json.dumps({'object_type' : 'RawData', 'version' : self.version,\r\n 'timestamp' : self.timestamp, 'label' : self.label,\r\n 'value' : value})", "def dict(self):\n\t\treturn self.json", "def serialize(self):\n return {\n 'id' : self.id,\n 'name' : self.name,\n 'phone' : self.phone,\n 'email' : self.email,\n 'address' : self.address,\n 'picture' : self.picture,\n }", "def cls2json(self):\n return json.dumps(self.__dict__)", "def cls2json(self):\n return json.dumps(self.__dict__)", "def serialize(self):\n\t\treturn {\n\t\t\t\"id\": self.id,\n\t\t\t\"name\": self.name\n\t\t}", "def serialize(self):\n\t\treturn {\n\t\t\t\"id\": self.id,\n\t\t\t\"name\": self.name\n\t\t}", "def to_string(self):\n return json.dumps(self.to_json(), cls=ObjectEncoder)", "def jsonify(self):\n jsonObject = self.getJsonObject()\n return json.dumps(jsonObject)", "def __repr__(self):\n _dict = dict()\n for name in self.fields.keys():\n _dict[name] = getattr(self, name)\n return json.dumps(_dict)", "def encode(self):\n return json.dumps(self.get_data(), indent=4)", "def as_json(self) -> str:\n return json.dumps(self, cls=_ProgrammeJSONEncoder)", "def serialize(self) -> bytes:\n return json_dumps(self._to_dict()).encode()", "def serialize(self) -> bytes:\n return json_dumps(self._to_dict()).encode()" ]
[ "0.7920127", "0.7444696", "0.72223186", "0.7181604", "0.71696025", "0.7103236", "0.7078409", "0.7004283", "0.69838184", "0.6976507", "0.69680184", "0.69473517", "0.6939899", "0.6939899", "0.6939899", "0.6935423", "0.6935423", "0.6913819", "0.6902281", "0.6876575", "0.68757135", "0.687536", "0.6874749", "0.6874215", "0.6872937", "0.68683887", "0.68683887", "0.68375653", "0.6834542", "0.68333185", "0.68333185", "0.68333185", "0.68333185", "0.68301636", "0.68265754", "0.68236154", "0.68189853", "0.6814629", "0.68140143", "0.67939585", "0.6793877", "0.6793877", "0.6793877", "0.6793877", "0.6793877", "0.6793877", "0.6793877", "0.6793877", "0.6793877", "0.67937243", "0.67859626", "0.6768311", "0.6745471", "0.6745412", "0.6744855", "0.67407274", "0.6733251", "0.6725511", "0.6719252", "0.6719172", "0.6715111", "0.67119116", "0.6699035", "0.66811484", "0.66769356", "0.66666216", "0.6655566", "0.6654225", "0.6644305", "0.6643218", "0.6643218", "0.664063", "0.6630534", "0.6624686", "0.66083", "0.66064024", "0.66056734", "0.6600494", "0.6597923", "0.65904725", "0.6589898", "0.6589898", "0.65884054", "0.65884054", "0.6579075", "0.6578504", "0.6578061", "0.6577392", "0.6573383", "0.65671873", "0.65671873" ]
0.6839134
36
Returns the string representation of the model
def to_str(self): return pformat(self.to_dict())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return super().__str__() + self.model.__str__()", "def __str__(self) -> str:\n # noinspection PyUnresolvedReferences\n opts = self._meta\n if self.name_field:\n result = str(opts.get_field(self.name_field).value_from_object(self))\n else:\n model_fields = get_model_fields(\n opts.model,\n foreign=False,\n m2m=False,\n exclude=self.exclude_from_str\n )\n # TODO: replace the above with the below to remove the get_model_fields call:\n # model_fields = [\n # f for f in opts.get_fields()\n # if f.concrete\n # and not (f.primary_key or f.is_relation or f.name in self.exclude_from_str)\n # ]\n result = \" \".join(\n [\n str(fld.value_from_object(self))\n for fld in model_fields\n if fld.value_from_object(self)\n ]\n )\n return result.strip() or super().__str__()", "def __str__(self):\n return '%s%s' % (self.name, ' - %s' % self.model if self.model else '')", "def __str__(self):\n model = self._meta.verbose_name.title()\n return f\"{model:s}: {self.name:s}\"", "def __str__(self):\n model = self._meta.verbose_name.title()\n return f\"{model:s}: {self.name:s}\"", "def __repr__(self):\n\n mod = f\"{self.__class__.__name__} Model\"\n try:\n mod += f': {self.filename}'\n except AttributeError:\n pass\n s = [mod]\n for name, v in self.metadata.items():\n s += [f\"{name:16} : {v}\"]\n return '\\n'.join(s)", "def __str__(self):\n \n res = ['>>> Model %(model_name)s <<<']\n res.append('')\n res.append('Independent parameters:')\n res.append('-----------------------')\n res.append('')", "def __str__(self):\n return \"DataModel(name={},attributes={},description={})\".format(\n self.name, {a.name: str(a) for a in self.attributes}, self.description\n )", "def model_info(self) -> str:\n return self._model_info(self.model).decode(\"utf-8\")", "def __str__(self):\n return str(self.serialize())", "def __str__ (self) :\n\n return self.as_string()", "def __str__(self):\n\n return self.toString()", "def __str__(self):\n msg = [\n f'{self.model=}',\n f'{self.field=}',\n f'{self.fxx=}',\n f'{self.date=}',\n f'{self.priority=}',\n ]\n return '\\n'.join(msg)", "def __str__(self):\n model = self._meta.verbose_name.title()\n title = self.extended_object.get_title()\n return f\"{model:s}: {title:s}\"", "def __repr__(self):\n return grid_search_to_str(self.model)", "def __str__(self):\n return self.toString()", "def __str__(self):\n return str(self.__dict__)", "def __str__(self):\n return str(self.__dict__)", "def to_representation(self) -> str:\n raise NotImplementedError()", "def __str__(self):\n return str(self.obj)", "def __str__(self):\n return self.make_flat()", "def dump_model(self):", "def __str__(self):\n return str(self.__dict__['_obj'])", "def __str__(self) -> str:\n model_str = [\"\\nModel info:\\n\", \" Unimodal encoder:\\n\"]\n\n for modality in range(self.num_modalities):\n model_str.append(f\" ({modality + 1}) {self.unimodal_encoder[modality]}\")\n\n model_str.append(\"\\n\\n Unimodal decoder:\\n\")\n for modality in range(self.num_modalities):\n model_str.append(f\" ({modality + 1}) {self.unimodal_decoder[modality]}\")\n\n if self.multimodal_decoder is not None:\n model_str.append(\"\\n\\n Multimodal decoder:\\n\")\n model_str.append(f\" {self.multimodal_decoder}\")\n\n return \"\".join(model_str)", "def __repr__(self):\n s = 'text model name: ' + self.name + '\\n'\n s += ' number of words: ' + str(len(self.words)) + '\\n'\n s += ' number of word lengths: ' + str(len(self.word_lengths)) + '\\n'\n s += ' number of stems: ' + str(len(self.stems)) + '\\n'\n s += ' number of sentence lengths: ' + str(len(self.sentence_lengths)) + '\\n'\n s += ' most common words: ' + str(self.common_word) + '\\n'\n\n return s", "def to_string(self):\r\n return self.__str__()", "def __repr__(self):\n return '<ModelSignature(model_name=%r)>' % self.model_name", "def __repr__(self):\n return '<ModelSignature(model_name=%r)>' % self.model_name", "def __str__(self):\n return str(self.get_data())", "def __str__(self):\n return f\"model {self._name}\"", "def __str__(self):\n\n return self.raw_field", "def __repr__(self):\n \n s = 'text model name: ' + self.name + '\\n' \n s += ' number of words: ' + str(len(self.words)) + '\\n'\n s += ' number of word lengths: ' + str(len(self.word_lengths)) + '\\n'\n s += ' number of sentence lengths: ' + str(len(self.sentence_lengths)) + '\\n'\n s += ' number of word stems: ' + str(len(self.stems)) + '\\n'\n s += ' number of commas counts: ' + str(len(self.commas_per_sentence)) + '\\n'\n return s", "def serialize(self):\n\n\t\treturn str(self)", "def __str__(self):\n return self.get_str()", "def serialize(self):\n\n return str(self)", "def __str__(self) -> str:\n if self.name_field:\n return str(getattr(self, self.name_field))\n # noinspection PyUnresolvedReferences\n data = [\n # Collect the string representations of related objects.\n # getattr(self, fk_field.attname) and\n # fk_field.value_from_object(self) would only return the primary\n # key of the related object.\n str(getattr(self, fk_field.name))\n for fk_field in get_model_fields(\n self._meta.model, base=False, foreign=True, m2m=False\n )\n if not fk_field.null\n ]\n if len(data) < 2:\n # Cannot build a more meaningful representation than the default.\n return super().__str__()\n else:\n template = \"{}\" + \" ({})\" * (len(data) - 1)\n return template.format(*data)", "def __str__(self):\n return self.s", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __str__(self):\n return self.__repr__()", "def __repr__(self):\n\n # info string\n info = self.model.__repr__()\n info += \"\\n=========================\\n\"\n info += f\"Train data length:\\t\\t{ len(self.train_dataset) }\\n\"\n info += f\"Eval sata length:\\t\\t{ len(self.eval_dataset) }\\n\"\n info += f\"Optimizer:\\t\\t\\t\\t{ str(self.optimizer).split('(')[0] }\\n\"\n info += f\"Criterion:\\t\\t\\t\\t{ str(self.criterion).split('(')[0] }\\n\"\n info += f\"Training Environment:\\t{ self.device.type }\\n\"\n info += f\"Show information:\\t\\t{ 'True' if self.info else 'False' }\\n\"\n info += \"=========================\\n\"\n\n return info", "def __repr__(self):\n s = 'text model name: ' + self.name + '\\n'\n s += ' number of words: ' + str(len(self.words)) + '\\n'\n s += ' number of word lengths: ' + str(len(self.word_lengths)) + '\\n'\n s += ' number of stems: ' + str(len(self.stems)) + '\\n'\n s += ' number of sentence lengths: ' + str(len(self.sentence_lengths))\\\n + '\\n'\n s += ' number of punctuation types: ' + str(len(self.punctuation))\n return s", "def dumps(self, indent=0):\n outstr = \" \"*indent + \"MewloDbModel object '{0}' attribute values:\\n\".format(self.__class__.__name__)\n public_props = (name for name in dir(object) if not name.startswith('_'))\n for name in public_props:\n outstr += \" \"*indent + \"{0}: {1}\\n\".format(name, str(getattr(self,name)))\n return outstr", "def __str__(self):\n model = self._meta.verbose_name.title()\n title = self.title or str(_(\"Empty title\"))\n\n return f\"{model:s}: {title:s}\"", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()", "def __repr__(self):\n return self.to_str()" ]
[ "0.85856134", "0.7814518", "0.77898884", "0.7751367", "0.7751367", "0.7712228", "0.76981676", "0.76700574", "0.7651133", "0.7597206", "0.75800353", "0.7568254", "0.7538184", "0.75228703", "0.7515832", "0.7498764", "0.74850684", "0.74850684", "0.7467648", "0.74488163", "0.7442643", "0.74416703", "0.7433768", "0.7411771", "0.7405439", "0.7379557", "0.7361716", "0.7361716", "0.732774", "0.7325511", "0.732528", "0.73097324", "0.73078936", "0.73001266", "0.7296789", "0.7292791", "0.7289445", "0.7287187", "0.7287187", "0.7287187", "0.7287187", "0.7287187", "0.7279803", "0.7261615", "0.7250399", "0.7244789", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068", "0.7223068" ]
0.0
-1
For `print` and `pprint`
def __repr__(self): return self.to_str()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pprint(*args, **kwargs):\n if PRINTING:\n print(*args, **kwargs)", "def print_out():\n pass", "def custom_print(*objects):\n print(*objects, sep=OFS, end=ORS)", "def _print(self, *args):\n return _ida_hexrays.vd_printer_t__print(self, *args)", "def _printable(self):\n pass", "def _print_custom(self):\n pass", "def pypprint(*args, **kwargs): # type: ignore\n from typing import Iterable\n\n if len(args) != 1:\n print(*args, **kwargs)\n return\n x = args[0]\n if isinstance(x, dict):\n for k, v in x.items():\n print(f\"{k}:\", v, **kwargs)\n elif isinstance(x, Iterable) and not isinstance(x, str):\n for i in x:\n print(i, **kwargs)\n else:\n print(x, **kwargs)", "def pprint(self):\n # just here for defining the interface; work is done in subclasses\n pass", "def print(*args, **kwargs):\n with P_LOCK:\n __builtins__.print(*args, **kwargs)", "def print(self):\n # Your implementation here", "def p(value):\n pp.pprint(value)", "def static_print(*args, __p=print, **kwargs):\n __p(*args, **kwargs)", "def print(self, *args, **kwargs):\n print(*args, **kwargs)", "def pprint(self):\n print(self.pprint_str())", "def pprint(obj):\n for argname in sorted([x for x in dir(obj) if not x.startswith('__')]):\n # Skip callables\n if hasattr(getattr(obj, argname), '__call__'):\n continue\n print(\"{} : {}\".format(argname, getattr(obj, argname)))", "def print_(self, s: str) -> None:", "def my_pprint(obj, intend = 0):\n if isinstance(obj, dict):\n for key, value in obj.items():\n print(intend*\" \"+str(key)+\" : \")\n my_pprint(value, intend = intend + 4)\n print()\n elif isinstance(obj, list):\n for value in obj:\n my_pprint(value, intend = intend + 4)\n print()\n elif isinstance(obj, bytes):\n print(\"<binary data>\")\n \n else:\n try:\n print(intend*\" \"+str(obj))\n except UnicodeDecodeError:\n print(intend*\" \"\"<?>\")", "def test_print(chikin):\n chikin.print()", "def _print(self, text):\n\t\tif self.verbose:\n\t\t\tprint text", "def out(*args):\r\n print(*args)", "def __pprint(object, stream=None, indent=1, width=80, depth=None):\n printer = PrettyPrinterExt(\n stream=stream, indent=indent, width=width, depth=depth)\n printer.pprint(object)", "def DumpPprint(data):\n #NOTE(g): Import is done here, instead of the top of the file, to not require this module if it is not used\n import pprint\n \n text = pprint.pformat(data)\n \n return text", "def repl_print_statements():\n pass", "def test_03_pass_print(self):\n print('Hello World!')", "def p(self):\n self.printstdout = True", "def print(*args, **kwargs):\n new_args = []\n for arg in args:\n if builtins.isinstance(arg, models.Point):\n new_args.append(\"({0}, {1})\".format(arg.x, arg.y))\n else:\n new_args.append(arg)\n\n builtins.print(*new_args, **kwargs)", "def real_print(*args, **kwargs):\n\n kwargs.setdefault('file', real_stdout)\n _python_print_function(*args, **kwargs)", "def to_print_out(self):\n self.error_throw('output')\n\n if self.rank_method == methods_of_ranking[3]: #'diversified_ranking'\n self.output_div('print')\n else:\n self.output('print')", "def debug_print(debug_data):\n if DEBUG_MODE == \"true\":\n pp.pprint(debug_data)", "def print(self):\r\n self.print_avec_separateur()", "def pprint(self):\r\n for i in self.items():\r\n print '%s => %r'%i", "def pprint(self, parameter_s=''):\n ptformatter = self.shell.display_formatter.formatters['text/plain']\n ptformatter.pprint = bool(1 - ptformatter.pprint)\n print('Pretty printing has been turned',\n ['OFF','ON'][ptformatter.pprint])", "def print(self):\n print(self.pretty_str())", "def eprint(*args, **kwargs):\n\tprint(*args, file=sys.stderr, **kwargs)", "def test_print4(self):\n writer = StringIO()\n collatz_print(writer, 1, 1, 1)\n self.assertEqual(writer.getvalue(), \"1 1 1\\n\")", "def _print(self, *args, **kwargs) -> None:\n # Only print in verbose mode\n if self._verbose:\n arglist = list(args)\n arglist[0] = f\"[buddy-{self._experiment_name}] {args[0]}\"\n print(*arglist, **kwargs)", "def use_pypprint_for_implicit_print(self) -> None:\n if self.implicit_print is not None:\n self.implicit_print.func.id = \"pypprint\" # type: ignore\n # Make sure we import it later\n self.undefined.add(\"pypprint\")", "def test_print(self):\n writer = StringIO()\n collatz_print(writer, 1, 10, 20)\n self.assertEqual(writer.getvalue(), \"1 10 20\\n\")", "def pprint(self):\n return pformat(repr(self))", "def printer(message):\n if VERBOSITY:\n pprint(message)", "def rec_print(p):\n if len(p) == 0:\n return\n t = p.pop(0)\n print t\n rec_print(p)", "def printc(*a, **kw):\n print(*a, **kw)", "def pformat(object):\r\n return PrettyPrinter().pformat(object)", "def pr(x):\n Card.print_pretty_cards(x)", "def debug_print(self, *content):\n if self.debug:\n print(*content)", "def pprint(object, stream=None):\r\n printer = PrettyPrinter(stream=stream)\r\n printer.pprint(object)", "def safe_print(*objs, errors=\"replace\"):\n\tprint(*(to_stdout(str(o), errors) for o in objs))", "def magic_Pprint(self, parameter_s=''):\n \n self.shell.outputcache.Pprint = 1 - self.shell.outputcache.Pprint\n print 'Pretty printing has been turned', \\\n ['OFF','ON'][self.shell.outputcache.Pprint]", "def print_output(tree):\n print_value(tree)\n print_tree(tree)", "def _Print(self, t):\n self.RaiseError(t, \"Print not supported\")", "def vprint(*args, **kwargs ):\n\n forceprint = False\n for key in kwargs:\n if key == \"forceprint\":\n forceprint =kwargs[key]\n \n line = ''\n if debug or forceprint : \n for arg in args:\n line += str(arg) +\" \"\n log = open(exepath + 'pyframe.log', 'a') \n log.write(line + \"\\n\")\n log.close() \n print line", "def eprint(*pargs, **kargs):\n print('\\u001b[31m', end='', file=sys.stderr)\n print(*pargs, file=sys.stderr, **kargs)\n print('\\u001b[0m', end='', file=sys.stderr)", "def printer(obj, ident=''):\n import inspect\n print ident + obj.__class__.__name__.upper()\n ident += ' '\n lists = []\n for name in dir(obj):\n elem = getattr(obj, name)\n if isinstance(elem, list) and name != u'decoded_content':\n lists.append(elem)\n elif not inspect.ismethod(elem):\n if not name.startswith('__'):\n if name == u'data' and elem:\n print ident + u'data = '\n printer(elem, ident + ' ')\n else:\n print ident + u'%s\\t= %s' % (name, getattr(obj, name))\n for l in lists:\n for i in l:\n printer(i, ident + ' ')", "def printer(obj, ident=''):\n import inspect\n print ident + obj.__class__.__name__.upper()\n ident += ' '\n lists = []\n for name in dir(obj):\n elem = getattr(obj, name)\n if isinstance(elem, list) and name != u'decoded_content':\n lists.append(elem)\n elif not inspect.ismethod(elem):\n if not name.startswith('__'):\n if name == u'data' and elem:\n print ident + u'data = '\n printer(elem, ident + ' ')\n else:\n print ident + u'%s\\t= %s' % (name, getattr(obj, name))\n for l in lists:\n for i in l:\n printer(i, ident + ' ')", "def _print(self, *args):\n return _ida_hexrays.qstring_printer_t__print(self, *args)", "def pprint(self):\n def pprintStr(node):\n s = \"(\" + str(node.value) \n for action in node.children:\n s = s + \", \" + pprintStr(node.children[action])\n s = s + \")\"\n return s\n\n print pprintStr(self)", "def hook_print():\n sys.stdout = PrintHook()", "def cmdPrint( self, *args):\n return self.cmd( *args, **{ 'verbose': True } )", "def print_list(self):\r\n pass", "def _get_print_fn(file=sys.stdout):\n def _print_fn(op, xin,):\n for attr in op.attrs:\n temp = getattr(xin, attr)\n if callable(temp):\n pmsg = temp()\n else:\n pmsg = temp\n print(op.message, attr, '=', pmsg, file=file)\n return _print_fn", "def debugprint(obj, depth=-1, print_type=False,\r\n file=None, ids='CHAR', stop_on_name=False):\r\n if file == 'str':\r\n _file = StringIO()\r\n elif file is None:\r\n _file = sys.stdout\r\n else:\r\n _file = file\r\n done = dict()\r\n results_to_print = []\r\n order = []\r\n if isinstance(obj, gof.Variable):\r\n results_to_print.append(obj)\r\n elif isinstance(obj, gof.Apply):\r\n results_to_print.extend(obj.outputs)\r\n elif isinstance(obj, Function):\r\n results_to_print.extend(obj.maker.fgraph.outputs)\r\n order = obj.maker.fgraph.toposort()\r\n elif isinstance(obj, (list, tuple)):\r\n results_to_print.extend(obj)\r\n elif isinstance(obj, gof.FunctionGraph):\r\n results_to_print.extend(obj.outputs)\r\n order = obj.toposort()\r\n elif isinstance(obj, (int, long, float, numpy.ndarray)):\r\n print obj\r\n else:\r\n raise TypeError(\"debugprint cannot print an object of this type\", obj)\r\n for r in results_to_print:\r\n debugmode.debugprint(r, depth=depth, done=done, print_type=print_type,\r\n file=_file, order=order, ids=ids,\r\n stop_on_name=stop_on_name)\r\n if file is _file:\r\n return file\r\n elif file == 'str':\r\n return _file.getvalue()\r\n else:\r\n _file.flush()", "def test_print1(self):\n writer = StringIO()\n collatz_print(writer, 100, 200, 125)\n self.assertEqual(writer.getvalue(), \"100 200 125\\n\")", "def printOutput(self):\n pass", "def _print(self, *args):\n return _ida_hexrays.cnumber_t__print(self, *args)", "def setPrint():\n (e,d,sr,sw) = codecs.lookup('utf-8')\n unicode_to_utf8 = sw(sys.stdout)\n sys.stdout = unicode_to_utf8", "def print(*args, sep=\" \"):\n pass", "def pr(string, verbose):\n if(verbose):\n print(string)", "def printv(self, *arg):\n if self.verbose:\n print(*arg)", "def print(self):\n\n print(self)", "def _p(self, *args, level=2, **kwargs):\n if self._verbosity >= level:\n print(*args, **kwargs)", "def test_print2(self):\n writer = StringIO()\n collatz_print(writer, 201, 210, 89)\n self.assertEqual(writer.getvalue(), \"201 210 89\\n\")", "def print_pointers(self):\n\n ### FILL IN ###", "def foo_printer(self):\n print(\"\\nHi I'm {}\".format(self.foo))", "def printed(method):\n\t\tdef wrapper(cls, *args):\n\t\t\tif cls.verbose:\n\t\t\t\treturn method(cls, *args)\n\t\treturn wrapper", "def print_me(self, tabs=0, tab=' '):\n pre = tab*tabs\n print(pre+'Producer:')\n print(pre+' produces:', self._produces)\n print(pre+' consumes:', self._consumes)\n print(pre+' transfer:', self._transfer)\n print(pre+' capacity:', self._capacity)", "def _print(cls, quad):\n\t\tprint(\"\\nLIGHT OUTPUT:\\n<<<<{}>>>>\".format(ast.literal_eval(str(cls.get_address_value(quad.result)))))\n\t\tprint(\"END\")\n\n\t\tvar = cls.get_address_value(quad.result)\n\t\tif isinstance(var, collections.Iterable):\n\t\t\tprint(\"DEEP COPY\")\n\t\t\tcls.print_queue.enqueue(copy.deepcopy(var))\n\t\telse:\n\t\t\tcls.print_queue.enqueue(var)", "def printout(*args, **kwargs):\n console_print(sys.stdout, *args, **kwargs)", "def pprint(x):\n if is_theano_object(x):\n return _gettheano().printing.pprint(x)\n else:\n return str(x)", "def PrettyPrint(self):\r\n print(self.data)\r\n return", "def print(self):\n self.print_avec_separateur(\" \")", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def pprint_helper(self, angle, indent):\n # just here for defining the interface; work is done in subclasses\n pass", "def _pprint(params, offset=0, printer=repr):\n # Do a multi-line justified repr:\n param_names = [p for p in params.keys() if p is not \"cost\"]\n param_names.sort()\n\n params_list = list()\n this_line_length = offset\n line_sep = ',\\n' + (1 + offset // 2) * ' '\n for i, name in enumerate(param_names):\n value = params[name]\n if isinstance(value, float):\n this_repr = '%s=%s' % (name, str(value))\n else:\n this_repr = '%s=%s' % (name, printer(value))\n if len(this_repr) > 500:\n this_repr = this_repr[:300] + '...' + this_repr[-100:]\n if i > 0:\n if (this_line_length + len(this_repr) >= 75 or '\\n' in this_repr):\n params_list.append(line_sep)\n this_line_length = len(line_sep)\n else:\n params_list.append(', ')\n this_line_length += 2\n params_list.append(this_repr)\n this_line_length += len(this_repr)\n # options = np.get_printoptions()\n # np.set_printoptions(**options)\n lines = ''.join(params_list)\n # Strip trailing space to avoid nightmare in doctests\n lines = '\\n'.join(l.rstrip(' ') for l in lines.split('\\n'))\n return lines", "def init_printing(pretty_print=True, order=None, use_unicode=None):\n if pretty_print:\n stringify_func = lambda arg: pretty(arg, order=order, use_unicode=use_unicode)\n else:\n stringify_func = sstrrepr\n\n try:\n import IPython\n\n ip = IPython.ipapi.get()\n\n if ip is not None:\n def result_display(self, arg):\n \"\"\"IPython's pretty-printer display hook.\n\n This function was adapted from:\n\n ipython/IPython/hooks.py:155\n\n \"\"\"\n if self.rc.pprint:\n out = stringify_func(arg)\n\n if '\\n' in out:\n print\n\n print out\n else:\n print repr(arg)\n\n ip.set_hook('result_display', result_display)\n return\n except ImportError:\n pass\n\n import __builtin__, sys\n\n def displayhook(arg):\n \"\"\"Python's pretty-printer display hook.\n\n This function was adapted from:\n\n http://www.python.org/dev/peps/pep-0217/\n\n \"\"\"\n if arg is not None:\n __builtin__._ = None\n print stringify_func(arg)\n __builtin__._ = arg\n\n sys.displayhook = displayhook", "def print_verbose(self) -> None:\n print(self)\n if self.meta is not None:\n print(self.meta.__repr__())", "def _print(self, *args):\n return _ida_hexrays.cinsn_t__print(self, *args)", "def my_print(self):\n if self.__size == 0:\n print(\"\")\n return\n [print(\"\") for x in range(0, self.__position[1])]\n for i in range(0, self.__size):\n [print(\" \", end=\"\") for i in range(0, self.__position[0])]\n [print(\"#\", end=\"\") for j in range(0, self.__size)]\n print(\"\")", "def sequential_print_statements():\n pass", "def print_post():\n print('| | |'),", "def print_(*args, **kwargs):\r\n fp = kwargs.pop(\"file\", sys.stdout)\r\n if fp is None:\r\n return\r\n def write(data):\r\n if not isinstance(data, basestring):\r\n data = str(data)\r\n fp.write(data)\r\n want_unicode = False\r\n sep = kwargs.pop(\"sep\", None)\r\n if sep is not None:\r\n if isinstance(sep, unicode):\r\n want_unicode = True\r\n elif not isinstance(sep, str):\r\n raise TypeError(\"sep must be None or a string\")\r\n end = kwargs.pop(\"end\", None)\r\n if end is not None:\r\n if isinstance(end, unicode):\r\n want_unicode = True\r\n elif not isinstance(end, str):\r\n raise TypeError(\"end must be None or a string\")\r\n if kwargs:\r\n raise TypeError(\"invalid keyword arguments to print()\")\r\n if not want_unicode:\r\n for arg in args:\r\n if isinstance(arg, unicode):\r\n want_unicode = True\r\n break\r\n if want_unicode:\r\n newline = unicode(\"\\n\")\r\n space = unicode(\" \")\r\n else:\r\n newline = \"\\n\"\r\n space = \" \"\r\n if sep is None:\r\n sep = space\r\n if end is None:\r\n end = newline\r\n for i, arg in enumerate(args):\r\n if i:\r\n write(sep)\r\n write(arg)\r\n write(end)", "def print_(*args, **kwargs):\r\n fp = kwargs.pop(\"file\", sys.stdout)\r\n if fp is None:\r\n return\r\n def write(data):\r\n if not isinstance(data, basestring):\r\n data = str(data)\r\n fp.write(data)\r\n want_unicode = False\r\n sep = kwargs.pop(\"sep\", None)\r\n if sep is not None:\r\n if isinstance(sep, unicode):\r\n want_unicode = True\r\n elif not isinstance(sep, str):\r\n raise TypeError(\"sep must be None or a string\")\r\n end = kwargs.pop(\"end\", None)\r\n if end is not None:\r\n if isinstance(end, unicode):\r\n want_unicode = True\r\n elif not isinstance(end, str):\r\n raise TypeError(\"end must be None or a string\")\r\n if kwargs:\r\n raise TypeError(\"invalid keyword arguments to print()\")\r\n if not want_unicode:\r\n for arg in args:\r\n if isinstance(arg, unicode):\r\n want_unicode = True\r\n break\r\n if want_unicode:\r\n newline = unicode(\"\\n\")\r\n space = unicode(\" \")\r\n else:\r\n newline = \"\\n\"\r\n space = \" \"\r\n if sep is None:\r\n sep = space\r\n if end is None:\r\n end = newline\r\n for i, arg in enumerate(args):\r\n if i:\r\n write(sep)\r\n write(arg)\r\n write(end)", "def debugprint(r, prefix='', depth=-1, done=None, print_type=False,\r\n file=sys.stdout, print_destroy_map=False,\r\n print_view_map=False, order=None, ids='CHAR',\r\n stop_on_name=False, prefix_child=None):\r\n if depth == 0:\r\n return\r\n\r\n if order is None:\r\n order = []\r\n\r\n if done is None:\r\n done = dict()\r\n\r\n if print_type:\r\n type_str = ' <%s>' % r.type\r\n else:\r\n type_str = ''\r\n\r\n if prefix_child is None:\r\n prefix_child = prefix\r\n\r\n def get_id_str(obj):\r\n if obj in done:\r\n id_str = done[obj]\r\n elif ids == \"id\":\r\n id_str = \"[@%s]\" % str(id(r))\r\n elif ids == \"int\":\r\n id_str = \"[@%s]\" % str(len(done))\r\n elif ids == \"CHAR\":\r\n id_str = \"[@%s]\" % char_from_number(len(done))\r\n elif ids == \"\":\r\n id_str = \"\"\r\n done[obj] = id_str\r\n return id_str\r\n\r\n if hasattr(r.owner, 'op'):\r\n # this variable is the output of computation,\r\n # so just print out the apply\r\n a = r.owner\r\n\r\n r_name = getattr(r, 'name', '')\r\n # normally if the name isn't set, it'll be None, so\r\n # r_name is None here\r\n if r_name is None:\r\n r_name = ''\r\n\r\n if print_destroy_map:\r\n destroy_map_str = str(getattr(r.owner.op, 'destroy_map', ''))\r\n else:\r\n destroy_map_str = ''\r\n\r\n if print_view_map:\r\n view_map_str = str(getattr(r.owner.op, 'view_map', ''))\r\n else:\r\n view_map_str = ''\r\n if destroy_map_str and destroy_map_str != '{}':\r\n destroy_map_str = 'd=' + destroy_map_str\r\n if view_map_str and view_map_str != '{}':\r\n view_map_str = 'v=' + view_map_str\r\n\r\n o = ''\r\n if order:\r\n o = str(order.index(r.owner))\r\n already_printed = a in done # get_id_str put it in the dict\r\n id_str = get_id_str(a)\r\n\r\n if len(a.outputs) == 1:\r\n print >> file, '%s%s %s%s \\'%s\\' %s %s %s' % (prefix, a.op,\r\n id_str,\r\n type_str, r_name,\r\n destroy_map_str,\r\n view_map_str,\r\n o)\r\n else:\r\n print >> file, '%s%s.%i %s%s \\'%s\\' %s %s %s' % (prefix, a.op,\r\n a.outputs.index(r),\r\n id_str, type_str,\r\n r_name,\r\n destroy_map_str,\r\n view_map_str,\r\n o)\r\n if not already_printed:\r\n if (not stop_on_name or\r\n not (hasattr(r, 'name') and r.name is not None)):\r\n new_prefix = prefix_child + ' |'\r\n new_prefix_child = prefix_child + ' |'\r\n for idx, i in enumerate(a.inputs):\r\n if idx == len(a.inputs) - 1:\r\n new_prefix_child = prefix_child + ' '\r\n\r\n debugprint(i, new_prefix, depth=depth - 1, done=done,\r\n print_type=print_type, file=file, order=order,\r\n ids=ids, stop_on_name=stop_on_name,\r\n prefix_child=new_prefix_child)\r\n else:\r\n #this is an input variable\r\n id_str = get_id_str(r)\r\n print >> file, '%s%s %s%s' % (prefix, r, id_str, type_str)\r\n\r\n return file", "def bpprint(self, out=None):\n if out is None:\n out = sys.stdout\n print(self.bpformat(), file=out)", "def vprint(expr, **settings):\n\n outstr = vsprint(expr, **settings)\n\n import builtins\n if (outstr != 'None'):\n builtins._ = outstr\n print(outstr)", "def print_(*args, **kwargs):\n fp = kwargs.pop(\"file\", sys.stdout)\n if fp is None:\n return\n\n def write(data):\n if not isinstance(data, basestring):\n data = str(data)\n fp.write(data)\n want_unicode = False\n sep = kwargs.pop(\"sep\", None)\n if sep is not None:\n if isinstance(sep, unicode):\n want_unicode = True\n elif not isinstance(sep, str):\n raise TypeError(\"sep must be None or a string\")\n end = kwargs.pop(\"end\", None)\n if end is not None:\n if isinstance(end, unicode):\n want_unicode = True\n elif not isinstance(end, str):\n raise TypeError(\"end must be None or a string\")\n if kwargs:\n raise TypeError(\"invalid keyword arguments to print()\")\n if not want_unicode:\n for arg in args:\n if isinstance(arg, unicode):\n want_unicode = True\n break\n if want_unicode:\n newline = unicode(\"\\n\")\n space = unicode(\" \")\n else:\n newline = \"\\n\"\n space = \" \"\n if sep is None:\n sep = space\n if end is None:\n end = newline\n for i, arg in enumerate(args):\n if i:\n write(sep)\n write(arg)\n write(end)", "def _pprint(params, offset=0, printer=repr):\n # Do a multi-line justified repr:\n options = numpy.get_printoptions()\n numpy.set_printoptions(precision=5, threshold=64, edgeitems=2)\n params_list = list()\n this_line_length = offset\n line_sep = ',\\n' + (1 + offset // 2) * ' '\n for i, (k, v) in enumerate(sorted(params.items())):\n if isinstance(v, float):\n # use str for representing floating point numbers\n # this way we get consistent representation across\n # architectures and versions.\n this_repr = '%s=%s' % (k, str(v))\n else:\n # use repr of the rest\n this_repr = '%s=%s' % (k, printer(v))\n if len(this_repr) > 500:\n this_repr = this_repr[:300] + '...' + this_repr[-100:]\n if i > 0:\n if this_line_length + len(this_repr) >= 75 or '\\n' in this_repr:\n params_list.append(line_sep)\n this_line_length = len(line_sep)\n else:\n params_list.append(', ')\n this_line_length += 2\n params_list.append(this_repr)\n this_line_length += len(this_repr)\n\n numpy.set_printoptions(**options)\n lines = ''.join(params_list)\n # Strip trailing space to avoid nightmare in doctests\n lines = '\\n'.join(l.rstrip(' ') for l in lines.split('\\n'))\n return lines" ]
[ "0.75581616", "0.7337525", "0.6988224", "0.6984917", "0.6944316", "0.6923891", "0.6899785", "0.6898276", "0.6816268", "0.680663", "0.6751926", "0.67508817", "0.67453593", "0.66987187", "0.66916466", "0.6675672", "0.66579014", "0.6610545", "0.6606928", "0.6602885", "0.65634936", "0.6560129", "0.6555002", "0.6495863", "0.6471497", "0.64508605", "0.64126676", "0.63404953", "0.6338191", "0.633458", "0.6330645", "0.6315019", "0.6312493", "0.62984765", "0.62983996", "0.62846917", "0.627895", "0.6272744", "0.6265982", "0.62646323", "0.626083", "0.62598103", "0.6257496", "0.6249349", "0.62487143", "0.6245663", "0.624474", "0.62410074", "0.62390095", "0.62152505", "0.6208483", "0.6207773", "0.62060744", "0.62060744", "0.6194434", "0.61885166", "0.618519", "0.618363", "0.6168196", "0.6162495", "0.61623794", "0.61557466", "0.6152245", "0.61517155", "0.61476016", "0.6138875", "0.61385185", "0.61303914", "0.6127962", "0.61253786", "0.61152357", "0.6111974", "0.6101017", "0.61008954", "0.60965693", "0.6081345", "0.6078598", "0.60725325", "0.6058975", "0.6057672", "0.60567707", "0.60567707", "0.60567707", "0.60567707", "0.60567707", "0.60567707", "0.604352", "0.6036946", "0.6034781", "0.6030252", "0.6029152", "0.6024002", "0.6016572", "0.60029066", "0.6001971", "0.6001971", "0.60009706", "0.5995241", "0.59929144", "0.59792817", "0.5978836" ]
0.0
-1
Returns true if both objects are equal
def __eq__(self, other): return self.__dict__ == other.__dict__
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __eq__(self, other):\n return are_equal(self, other)", "def __eq__(self, other):\n return are_equal(self, other)", "def __eq__(self,other):\n try: return self.object==other.object and isinstance(self,type(other))\n except: return False", "def __eq__(self, other):\n if isinstance(self, other.__class__):\n return self.__dict__ == other.__dict__\n return False", "def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.__dict__ == other.__dict__\n return False", "def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.__dict__ == other.__dict__\n return False", "def __eq__(self, other):\r\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n # Ensure same class and values match\n if isinstance(other, self.__class__):\n return self.__dict__ == other.__dict__\n else:\n return False", "def is_equal(self, a, b):\n return a is b", "def is_equal(self, a, b):\n return a == b", "def __eq__(self, other):\n return self is other", "def __eq__(self, other):\n return self is other", "def __eq__(self, other):\r\n if isinstance(other, self.__class__):\r\n return self.__dict__ == other.__dict__\r\n else:\r\n return False", "def is_equal(o1: object, o2: object) -> bool:\n if o1 is None and o2 is None:\n return True\n if o1 is None:\n return False\n return o1 == o2", "def __eq__(self,other):\n return self is other", "def is_equal(self, a, b):\n return a.X[0] == b.X[0]", "def __eq__(self, other):\n return type(self) == type(other) and self.id == other.id", "def __eq__(self, other) -> bool:\n if json.dumps(self.data,sort_keys=True) == json.dumps(other.data,sort_keys=True):\n return True\n else:\n return False", "def __eq__(self, other):\n if not isinstance(other, Single2HaObject):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n return self.__dict__ == other", "def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.__hash__() == other.__hash__()\n return False", "def __eq__(self, other):\n if self.__class__ != other.__class__:\n return False\n if self.primary != other.primary:\n return False\n return True", "def __eq__(self, other) -> bool:\n if other is None:\n return False\n return self.__hash__() == other.__hash__()", "def __eq__(self, other):\n if not isinstance(other, ObjectInfo):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self: _TT, other: object) -> bool:\n return self.eq(other) # type: ignore", "def __eq__(self, other):\n return id(self) == id(other)", "def __eq__(self, other) -> bool:\n return type(self) == type(other) and \\\n self._id == other.id and \\\n self.code == other.code and \\\n self.name == other.name and \\\n self.gender == other.gender and \\\n self.date_of_birth == other.date_of_birth", "def equals(self, other): # -> bool:\n ...", "def equals(self, obj: object) -> bool:\n ...", "def __eq__(self, other):\n for attr in self._attrs_to_save:\n try:\n if getattr(self, attr) != getattr(other, attr):\n return False\n except AttributeError:\n return False\n return True", "def __eq__(self, other):\n if type(other) is type(self):\n return (self.x == other.x and self.y == other.y and self.z == other.z)\n return False", "def __eq__(self, other):\n if isinstance(other, self.__class__):\n return self.x == other.x and self.y == other.y\n return False", "def __eq__(self, other: object) -> bool:\n if not isinstance(other, self.__class__):\n return NotImplemented\n\n return (\n self.name,\n self.submit_at,\n self.subreddit,\n self.title,\n self.body_template,\n ) == (\n other.name,\n other.submit_at,\n other.subreddit,\n other.title,\n other.body_template,\n )", "def __eq__(self, other):\n # Check that we share the same class as this object\n if not isinstance(other, type(self)):\n return False\n\n return hash(self) == hash(other)", "def __eq__(self, other):\n if not isinstance(other, PreviewObjectAutofill):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n return equal(self, other)", "def __eq__(self, other: Any) -> bool:\n return self.__class__ is other.__class__ and self.identifier == other.identifier", "def __eq__(self, other):\n return self.__id == other.get_id()", "def __eq__ (self, other):\n if type(self) == type(other):\n return self._m == other._m\n else:\n return False", "def __eq__(self, other):\n if not isinstance(other, Referent):\n return False\n\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n return self.properties == other.properties", "def __eq__(self, other):\n return self.items() == other.items()", "def __eq__(self, other):\n return hash(self) == hash(other)", "def __eq__(self, other):\n return hash(self) == hash(other)", "def __eq__(self, other):\n return hash(self) == hash(other)", "def __eq__(self, other):\n return self.x == other.x and self.y == other.y", "def __eq__(self, other):\n\n if self is other:\n return True\n return hash(self) == hash(other)", "def __eq__(self, other):\n if other._field1 == self._field1:\n return True\n return False", "def same_as(self, other):\n return super().__eq__(other)", "def __eq__(self, other):\n try:\n return other and \\\n self.id == other.id\n\n except AttributeError:\n return False", "def __eq__(self, other):\r\n\t\treturn self._to_pylist() == other._to_pylist()", "def __eq__(self, other):\n if not isinstance(other, Fiddle):\n return False\n\n return self.__dict__ == other.__dict__" ]
[ "0.8088132", "0.8088132", "0.8054589", "0.7982687", "0.7961088", "0.7961088", "0.79433626", "0.79303336", "0.7926563", "0.7897525", "0.78826123", "0.78826123", "0.78806067", "0.7872423", "0.7868354", "0.78668815", "0.7825702", "0.7819993", "0.78162885", "0.78078854", "0.78068274", "0.7796298", "0.7794721", "0.7784825", "0.77790844", "0.7769397", "0.77534705", "0.7746211", "0.7741107", "0.77282816", "0.7725766", "0.7719537", "0.770273", "0.7685999", "0.7677552", "0.76739407", "0.7664857", "0.76557016", "0.7655046", "0.76282835", "0.7625795", "0.76242626", "0.76237214", "0.76237214", "0.76237214", "0.7617347", "0.7600536", "0.7599156", "0.7595863", "0.75945824", "0.7594092", "0.75899327" ]
0.79670393
41
Returns true if both objects are not equal
def __ne__(self, other): return not self == other
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __ne__(self, other: object) -> bool:\n if self.__eq__(other):\n return False\n return True", "def __ne__(self, other: object) -> bool:\n return not self.__eq__(other)", "def __ne__(self, other) -> bool:\n return not self.__eq__(other)", "def __eq__(self, other):\n return not self.__ne__(other)", "def __ne__(self, other):\n if self.__eq__(other):\n return False\n return True", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n # type: (object) -> bool\n return not self == other", "def __ne__(self, other):\n # type: (object) -> bool\n return not self == other", "def __ne__(self, other):\n # type: (object) -> bool\n return not self == other", "def __ne__(self, other):\r\n return not self == other", "def __ne__(self, other):\r\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__ (self, other):\n return not self == other" ]
[ "0.845611", "0.8391477", "0.8144138", "0.81410587", "0.8132492", "0.8093973", "0.80920255", "0.80920255", "0.80920255", "0.8085325", "0.8085325", "0.8076365", "0.8076365", "0.8065748" ]
0.0
-1
seaborn time series, with errorbands
def sns_time_series(x_tuple,y_tuple,outputname,errors=0,two=False, *args,**kwargs): if (type(outputname)==str)|(type(x_tuple)==tuple)|(type(y_tuple)==tuple): pass else: raise TypeError() import matplotlib matplotlib.use("pdf") import matplotlib.pyplot as plt import numpy as np import seaborn as sns; sns.set_style('darkgrid') import seaborn.timeseries x, x_label = x_tuple y, y_label = y_tuple if two==True: x2,x_label2 = x_tuple2 y2,y_label2 = y_tuple2 def _plot_std_bars(std=None, central_data=None, ci=None, data=None,*args, **kwargs): std = errors ci = np.asarray((central_data - std, central_data + std)) kwargs.update({"central_data": central_data, "ci": ci, "data": data}) seaborn.timeseries._plot_ci_band(*args, **kwargs) seaborn.timeseries._plot_std_bars = _plot_std_bars plt.figure() sns.tsplot(xip,r,err_style='std_bars') sns.tsplot(xim,r,err_style='std_bars',color='r') plt.xlabel(r'$\theta$ (arcmin)') plt.ylabel(r'$\xi$') plt.xscale('log') plt.yscale('log') plt.legend([r'$\xi_+$',r'$\xi_-$'],bbox_to_anchor=(1, 1), loc='upper right', borderaxespad=0.) plt.savefig(outputname+'.pdf') plt.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visualize_time_series(fig_ax, data, inp_color, missing_data, lag_color, first_date,\n x_label=\"Number of Days\", y_label=\"Log of Aluminium Price\", title=\"Prices over time\"):\n fig, ax = fig_ax\n ((x_train_raw, y_train_raw), y_pred_list) = data\n\n missing_x, missing_y = missing_data\n is_missing = len(missing_x) != 0\n\n first_date = datetime.strptime(first_date, '%Y-%m-%d')\n\n convert_date = lambda x: [\n np.datetime64((first_date + timedelta(days=d)).strftime('%Y-%m-%d'))\n for d in x\n ]\n convert_price = lambda x: x[\"Output\"].to_list()\n\n x_train = convert_date(x_train_raw[\"Date\"].to_list())\n y_train = convert_price(y_train_raw)\n \n cut_point = x_train[-1]\n ax.plot(x_train, y_train, color=color[inp_color])\n\n for i, y_pred in enumerate(y_pred_list):\n data, plot_name, color_code, is_bridge = y_pred\n mean_pred, x_test_raw = data[\"mean\"], data[\"x\"]\n x_test = convert_date(x_test_raw)\n\n if i == 0 and is_missing:\n missing_x = convert_date(missing_x)\n ax.axvline(x_test[0], color=color[lag_color], linestyle='--', linewidth=0.5, dashes=(5, 0), alpha=0.2)\n ax.plot([missing_x[-1], x_test[0]], [missing_y[-1], mean_pred[0]], color[lag_color], linestyle=\"dashed\")\n ax.axvspan(cut_point, x_test[0], color=color[lag_color], alpha=0.1)\n\n plot_bound(ax, data, x_test, color[color_code], plot_name)\n\n if is_bridge and (not is_missing): \n ax.plot([x_train[-1], x_test[0]], [y_train[-1], mean_pred[0]], color[color_code], linewidth=1.5)\n\n if is_missing:\n ax.plot(missing_x, missing_y, color=color[lag_color], linestyle=\"dashed\")\n ax.plot([x_train[-1], missing_x[0]], [y_train[-1], missing_y[0]], color[lag_color], linestyle=\"dashed\")\n ax.axvline(cut_point, color=color[lag_color], linestyle='--', linewidth=0.5, dashes=(5, 0), alpha=0.2)\n else:\n ax.axvline(cut_point, color=color[\"k\"], linestyle='--')\n\n ax.xaxis.set_minor_locator(AutoMinorLocator())\n ax.legend()\n\n # ax.set_xlabel(x_label)\n ax.set_ylabel(y_label)\n ax.set_title(title)\n\n # ax.set_xlim(left=cut_point-np.timedelta64(1, 'm'))\n plot_axis_date(ax, x_train + missing_x + x_test)\n ax.grid()\n return fig, ax", "def plot_landings_quantiles(df):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n\n ax.set_position(default_timeseries_position) \n\n Fn = df['CatchMT'].groupby([df.Year, df.Reg, df.Sreg]).mean()\n grp = df['CatchMT'].groupby([df.Year, df.Reg, df.Sreg])\n\n qmean = grp.mean().loc[:, 'All', 'All'] \n q90 = grp.quantile(0.90).loc[:, 'All', 'All'] \n q75 = grp.quantile(0.75).loc[:, 'All', 'All'] \n q50 = grp.quantile(0.50).loc[:, 'All', 'All'] \n q25 = grp.quantile(0.25).loc[:, 'All', 'All'] \n q10 = grp.quantile(0.10).loc[:, 'All', 'All'] \n\n # Don't plot the first year. Also, the data is shifted by one year.\n # For some reason, restricting the year range above results in a series\n # that still have a multi-index. This seems like the cleanest way to do\n # that.\n qmean = qmean.iloc[2:]\n q90 = q90.iloc[2:]\n q75 = q75.iloc[2:]\n q50 = q50.iloc[2:]\n q25 = q25.iloc[2:]\n q10 = q10.iloc[2:]\n qmean.index = qmean.index - 1\n q90.index = q90.index - 1\n q75.index = q75.index - 1\n q50.index = q50.index - 1\n q25.index = q25.index - 1\n q10.index = q10.index - 1\n \n colors = seaborn.color_palette(n_colors=3);\n\n q90.plot(ax=ax, color=colors[0], linestyle='--', label='90%') \n q75.plot(ax=ax, color=colors[1], linestyle='--', label='75%') \n qmean.plot(ax=ax, color='black', label='Mean') \n q50.plot(ax=ax, color=colors[2], linestyle='--', label='50%') \n q25.plot(ax=ax, color=colors[1], linestyle='--', label='25%') \n q10.plot(ax=ax, color=colors[0], linestyle='--', label='10%') \n\n ax.legend(loc='best')\n\n content = io.BytesIO()\n plt.savefig(content, format='png')\n content.seek(0)\n image_cache['landings']['quantiles'] = content\n\n plt.close()", "def clt_plot(error: pd.Series, n_max=800):\n plt.figure()\n n_values = [n for n in range(20, n_max, 20)]\n error_mean, error_var = do_many_bootstraps(error, n_values, bootstrap_number=50)\n plt.plot(n_values, np.sqrt(error_var), label='Bootstrap estimate')\n plt.plot(n_values, np.divide(np.std(error, ddof=1), np.sqrt(n_values)), label='CLT estimate')\n plt.xlabel('number of samples')\n plt.ylabel('variance of mean square error')\n plt.legend()\n plt.show()", "def _plot_experiment(df, axes, metric_name, isTrain):\n # colors: https://stackoverflow.com/questions/42086276/get-default-line-colour-cycle\n ldf = metric_short_to_long(df)\n plotted = \"Train\" if isTrain else \"Val\"\n m = ldf.query(\"stat == 'mse' and metric == @metric_name\")[[\"trial\",\"state\",\"value\"]].rename({\"value\":\"mse\"},axis=1)\n # aggregated\n ax = sns.barplot(x=\"trial\", y=\"mse\", data=m, palette=[u'#1f77b4'], ci=\"sd\", ax=axes[0])\n ax.set_ylabel(\"MSE (log)\")\n ax.set_yscale(\"log\")\n ax.set_title(f\"Aggregated State Errors ({plotted})\")\n ax.set_xlabel(\"Trial Number\")\n\n # individual state plots\n ax = sns.barplot(x=\"trial\", y=\"mse\", hue=\"state\",data=m, ci=\"sd\", ax=axes[1])\n ax.set_ylabel(\"MSE (log)\")\n ax.set_yscale(\"log\")\n ax.set_title(f\"State Error by Trial ({plotted})\")\n ax.set_xlabel(\"Trial Number\")", "def meanRegion(thk_s,thk_p,thk_diff):\n meanp = np.nanmean(np.nanmean(thk_p,axis=1),axis=1)\n means = np.nanmean(np.nanmean(thk_s,axis=1),axis=1)\n \n print '\\n --- [[%s to %s N, %s to %s E]] ---' % (latmin,latmax,lonmin,lonmax)\n print 'Average Thickness (Satellite) == %s meters' % np.nanmean(means)\n print 'Average Thickness (PIOMAS) == %s meters' % np.nanmean(meanp)\n print 'Average Difference == %s meters' % (np.nanmean(means)-np.nanmean(meanp))\n \n yearmin = 2004\n yearmax = 2015\n years = np.arange(yearmin,yearmax+1,1)\n years = np.setdiff1d(years,[2010]) ### no satellite data in 2010\n \n fig = plt.figure()\n ax = plt.subplot(111)\n \n ### Call parameters\n plt.rcParams['text.usetex']=True\n plt.rcParams['font.family'] = 'sans-serif'\n plt.rcParams['font.sans-serif'] = 'Avant Garde'\n \n plt.plot(meanp,color='darkred',linewidth=2,linestyle='-',\n label=r'PIOMAS')\n plt.plot(means,color='forestgreen',linewidth=2,linestyle='-',\n label=r'Satellite')\n plt.axvline(6,color='k',linewidth=3,linestyle='-')\n \n labelsy = map(str,np.arange(0,6,1))\n labelsx = map(str,years)\n plt.xticks(np.arange(len(years)),labelsx)\n plt.yticks(np.arange(0,6,1),labelsy)\n plt.ylabel(r'\\textbf{Thickness (meters)}',fontsize=13)\n \n ### Adjust axes in time series plots \n def adjust_spines(ax, spines):\n for loc, spine in ax.spines.items():\n if loc in spines:\n spine.set_position(('outward', 10))\n else:\n spine.set_color('none') \n if 'left' in spines:\n ax.yaxis.set_ticks_position('left')\n else:\n ax.yaxis.set_ticks([])\n \n if 'bottom' in spines:\n ax.xaxis.set_ticks_position('bottom')\n else:\n ax.xaxis.set_ticks([]) \n \n ### Adjust axes spines\n adjust_spines(ax, ['left', 'bottom'])\n ax.spines['top'].set_color('none')\n ax.spines['right'].set_color('none')\n plt.grid(color='b',zorder=1,alpha=0.3)\n \n plt.legend(shadow=False,fontsize=11,loc='upper right',\n fancybox=True)\n \n plt.text(2,-0.8,r'\\textbf{ICESat}',fontsize=13)\n plt.text(7.3,-0.8,r'\\textbf{PIOMAS}',fontsize=13)\n \n fig.suptitle(r'\\textbf{SIT Difference [Satellite - PIOMAS]}',fontsize=16)\n plt.savefig(directoryfigure + 'test5_difftseries.png',dpi=300)", "def point_plot(MSDs, time_interval, histogram = True):\n\n from seaborn import pointplot as pointplot\n import seaborn as sns\n from matplotlib.colors import LinearSegmentedColormap\n import matplotlib.patches as mpatches\n import matplotlib as mpl\n mpl.rcParams.update({'font.size': 22})\n \n\n #plotting parameters\n fig, ax = plt.subplots(figsize=(17,10), dpi = 300)\n mpl.rcParams['lines.markersize'] = 10\n ax.set(xscale=\"log\", yscale = \"log\")\n ax.grid(color='grey', linestyle='-', linewidth=0.25, alpha=0.5)\n\n\n\n for columnName,columnData in MSDs.iteritems():\n \n msds = columnData\n msds.index = np.arange(msds.shape[0])*time_interval\n ax.plot(msds.index[1:], msds.values[1:], linewidth = 0.75,markersize = 10, marker = 'o', mec='k',zorder = 0, alpha = 0.4)\n\n #more plot parameters\n ax.set_xticks([3,6,9,12,15,18,21,24,27,30,45,60])\n ax.get_xaxis().set_major_formatter(mpl.ticker.ScalarFormatter())\n ax.get_yaxis().set_major_formatter(mpl.ticker.ScalarFormatter())\n\n ax.set_title('MSD')\n ax.set_ylabel('MSD (\\u03BC'+'m)\\u00b2')\n ax.set_xlabel('Lag time (min)')\n \n ax.set_axisbelow(True)\n ax.grid(color='grey', linestyle='-', linewidth=0.25, alpha=0.5)\n\n\n #average MSD\n avg_msd = MSDs.mean(axis = 1)\n avg_msd.index = np.arange(avg_msd.shape[0])*time_interval\n ax.plot(avg_msd.index[1:], avg_msd[1:], linewidth = 0.75, c = 'k', markersize = 15, marker = 'D', mec='k',zorder = 1)\n \n plt.show()\n\n return avg_msd", "def scatter_error_plot(y_true, y_predict, datelist,\n xlab='Dates', ylab='Units sold', title='Error analysis',\n ticks_separation='weeks'):\n plt.style.use('seaborn')\n\n #create plot\n fig=plt.figure(figsize=(15,10))\n \n #plot things\n plt.plot(datelist,y_true, label=r'True Values' ,\n linestyle='--', linewidth=2)\n plt.plot(datelist,y_predict, label=r'Predicted Values',\n linestyle='--', linewidth=2)\n plt.scatter(datelist,y_true)\n plt.scatter(datelist,y_predict)\n \n #labels\n plt.xlabel(xlab)\n plt.ylabel(ylab)\n plt.title(title)\n \n #set ticks every week\n if ticks_separation == 'days':\n plt.gca().xaxis.set_major_locator(matplotlib.dates.DayLocator())\n \n elif ticks_separation == 'weeks':\n plt.gca().xaxis.set_major_locator(matplotlib.dates.WeekdayLocator())\n \n elif ticks_separation == 'months':\n plt.gca().xaxis.set_major_locator(matplotlib.dates.MonthLocator())\n \n elif ticks_separation == 'days':\n plt.gca().xaxis.set_major_locator(matplotlib.dates.YearLocator())\n\n \n #set week format\n plt.gca().xaxis.set_major_formatter(matplotlib.dates.DateFormatter('%d %b'))\n \n \n plt.legend(loc='best')\n \n #increase all text\n ax=plt.gca()\n for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +\n ax.get_xticklabels() + ax.get_yticklabels() + ax.legend().get_texts()):\n item.set_fontsize(18)\n \n \n return fig", "def plot_errors(self):\n\n plt.title(\"Prediction Error\")\n plt.plot(self.errors)\n plt.ylabel(\"MSE (Mean Squared Error)\")\n plt.xlabel(\"Iteration\")\n plt.show()", "def errBarPlot(\n dataFrame,\n meanKey=\"mean\",\n sDevKey=\"sDev\",\n xKey=\"nBinSize\",\n rowKey=\"observable\",\n colKey=\"nX\",\n colorKey=\"nSamples\",\n errBarKwargs=None,\n shareY=False,\n):\n # Check whether frame contains all columns\n for key in [rowKey, colKey, xKey, meanKey, sDevKey, colorKey]:\n if not key in dataFrame.columns:\n raise KeyError(\"Key %s not found in input frame\" % key)\n\n # Set up the error bat plot\n errBarStyle = {\n \"linestyle\":\"None\",\n \"marker\":\".\",\n \"ms\":3,\n \"lw\":1,\n \"elinewidth\":0.5,\n \"capthick\":0.5,\n \"capsize\":0.5,\n }\n # Adjust by input keys\n if errBarKwargs:\n for key, val in errBarKwargs.items():\n errBarStyle[key] = val\n\n # Compute how much one has to shift plots for visualization\n ## Number of shifts\n colorEntries = dataFrame[colorKey].unique()\n nColors = len(colorEntries)\n\n ## Compute minimal independent variable distance\n xRange = dataFrame[xKey].unique()\n\n ## Loop through distances to get the minimal one\n dXmin = max(abs(xRange[-1] - xRange[0]), 0.1)\n for nx1, x1 in enumerate(xRange[:-1]):\n for x2 in xRange[nx1+1:]:\n if abs(x2-x1) < dXmin:\n dXmin = abs(x2-x1)\n dXmin /= 3\n\n ## Allocate shift of distances\n dX = {}\n for nEntry, entry in enumerate(colorEntries):\n dX[entry] = dXmin*(2*nEntry-nColors+1)*1./nColors\n\n ## Modify x cols\n df = dataFrame.copy()\n df[xKey] += df.apply(lambda col: dX[col[colorKey]], axis=1)\n\n # Create the facet grid for the mapping\n graph = sns.FacetGrid(\n data=df,\n row=rowKey,\n col=colKey,\n hue=colorKey,\n palette=\"Blues\",\n sharex=True,\n sharey=\"row\" if shareY else False,\n )\n ## and map the error bar plot\n graph.map(plt.errorbar, xKey, meanKey, sDevKey, **errBarStyle)\n\n # Change figure size\n graph.fig.set(\n dpi=500,\n figheight=2,\n figwidth=len(dataFrame[colKey].unique())*1./2\n )\n\n # Style individual plots\n for nax, ax in enumerate(graph.axes.flat):\n if not shareY:\n ax.set_yticks([])\n ## At most three ticks\n ax.set_xticks(np.linspace(\n dataFrame[xKey].min(), dataFrame[xKey].max(), 3, dtype=int\n ))\n ## Set the range\n ax.set_xlim(dataFrame[xKey].min()-1, dataFrame[xKey].max()+1)\n ## Set the ticks\n ax.tick_params(\n axis=\"both\",\n direction='inout',\n width=0.5,\n length=2.5,\n )\n\n # Remove axis and ticks\n for pos in [\"left\", \"top\", \"right\"]:\n ax.spines[pos].set_linewidth(0)\n if shareY and nax % len(graph.axes[0]) == 0:\n ax.spines[\"left\"].set_linewidth(0.5)\n else:\n ax.tick_params(\n axis=\"y\",\n direction='inout',\n width=0.0,\n length=0.0,\n )\n ax.spines[\"bottom\"].set_linewidth(0.5)\n\n # Adjust the margin titles and plot the mean of the means\n graph.set_titles(\"\")\n means = dataFrame.groupby([rowKey, colKey])[meanKey].mean()\n for nCorr, (corrName, axRow) in enumerate(\n zip(dataFrame[rowKey].unique(), graph.axes)\n ):\n for nt, ax in zip(dataFrame[colKey].unique(), axRow):\n if nCorr == 0:\n ax.set_title(\"{colKey}$ = {nt}$\".format(nt=nt, colKey=colKey))\n ax.axhline(means[corrName, nt], color=\"black\", ls=\"--\", lw=0.5)\n\n # Set the labels\n graph.set_ylabels(meanKey)\n\n # Adjust the remaining margin titles\n for corrName, ax in zip(dataFrame[rowKey].unique(), graph.axes[:, -1]):\n ax.yaxis.set_label_position(\"right\")\n ax.set_ylabel(corrName)\n\n graph.set_xlabels(xKey)\n graph.add_legend()\n\n # Adjust the intermediate plot spacing\n plt.subplots_adjust(wspace=0.1, hspace=0.05)\n\n return graph", "def get_avg_trend(y, filter='ewm', a=0.015, verbose =1, resample_interval='60s', fill_missing=False, title= '' , note= ''):\n\n # Two-way EWMA averaging\n ts_mean1, ts_std1 = smoothing(y, filter=filter, a=a)\n\n reversed_y = y.iloc[::-1]\n ts_mean2, ts_std2 = smoothing(reversed_y, filter=filter,a=a)\n ts_mean2 = ts_mean2.iloc[::-1]\n ts_std2 = ts_std2.iloc[::-1]\n\n ts_mean = (ts_mean1 + ts_mean2)/2\n ts_std = (ts_std1 + ts_std2)/2\n\n\n # Resample the daily trend by calculating the median of a resampling slice. mean can also be used.\n trend = ts_mean.resample(resample_interval).mean()\n ts_std = ts_std.resample(resample_interval).mean()\n\n # Fill up the missing trend samples if exist, by propagating the last valid\n if fill_missing: #rolling filter introduce Nan at the head or tail..\n trend.fillna(method='ffill', inplace=True, limit=2) #fill the end\n trend.fillna(method='bfill', inplace=True, limit=2) #fill the start\n\n\n\n if verbose>=1:\n t = title if title is not None else 'Average Trend'\n\n fig = plt.gcf()\n\n plt.plot(y[::1+y.shape[0]// 2000], alpha=.5)\n ax = trend.plot()\n ax.fill_between(trend.index, trend - 2 * ts_std, trend + 2 * ts_std,\n alpha=.25)\n ax.legend(['Orignal', 'Trend', 'std'])\n plt.text(ax.get_xlim()[0], ax.get_ylim()[0] + 50, note)\n plt.title(t)\n plt.show()\n\n import matplotlib.dates as mdates\n ax.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))\n plt.tight_layout()\n\n fig.savefig('./output/trends/'+t + '.pdf')\n plt.close(fig)\n\n return trend", "def plot_error(class_incorreto):\n epochs = np.arange(1, num_iter + 1)\n plt.plot(epochs, class_incorreto)\n plt.xlabel('Iterações')\n plt.ylabel('Classificados incorretamente')\n plt.show()", "def plot_errors(dat, title='Data', avg='mean', err='sem'):\n\n n_groups = len(dat)\n\n fig = plt.figure(figsize=[4, 5])\n ax = plt.gca()\n\n if avg == 'mean': avg_func = np.nanmean\n if avg == 'median': avg_func = np.nanmedian\n\n if err == 'sem': err_func = sem\n\n plt.errorbar(np.arange(1, n_groups+1), avg_func(dat, 1), yerr=err_func(dat, 1), xerr=None, fmt='.',\n markersize=22, capsize=10, elinewidth=2, capthick=2)\n\n ax.set_xlim([0.5, n_groups+0.5])\n\n # Titles & Labels\n ax.set_title(title, fontsize=16)\n ax.set_xlabel('Noise Levels')\n ax.set_ylabel('Error')\n\n # Set the top and right side frame & ticks off\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.xaxis.set_ticks_position('bottom')\n ax.yaxis.set_ticks_position('left')\n\n # Set linewidth of remaining spines\n ax.spines['left'].set_linewidth(2)\n ax.spines['bottom'].set_linewidth(2)", "def residual_plots(test_data, mods, station=None, squared=False):\r\n import config\r\n learning_var = config.learning_var\r\n data = test_data.get_miniOD()\r\n mods.models[0].mod.load()\r\n pred = mods.models[0].mod.predict(test_data)\r\n test_data.miniOD = None\r\n if squared:\r\n data[test_data.get_stations_col(2015)] = (data[test_data.get_stations_col(2015)] - pred) ** 2 # /pred\r\n else:\r\n data[test_data.get_stations_col(2015)] = data[test_data.get_stations_col(2015)] - pred\r\n ind = data[data['Annee'] == 0].index\r\n data.drop(ind, inplace=True)\r\n print(data.columns.values)\r\n i = 0\r\n if station is None:\r\n ch_an = test_data.get_stations_col(2015)\r\n else:\r\n ch_an = 'End date ' + str(station)\r\n for ch in learning_var:\r\n if not (ch[0] == 'h') and not (ch in ['LV', 'MMJ', 'SD', 'poudrerie', 'verglas']):\r\n # data.boxplot(ch_an, by=ch)\r\n # if ch != 'Heure':\r\n plt.figure(i // 9)\r\n # plt.title('squared error / expectation')\r\n fig = plt.subplot(3, 3, (i % 9) + 1)\r\n i += 1\r\n # fig = plt.figure().add_subplot(111)\r\n fig.set_xlabel(ch)\r\n if squared:\r\n fig.set_ylabel('error²')\r\n else:\r\n fig.set_ylabel('error')\r\n l = []\r\n xaxis = np.unique(data[ch])\r\n print(ch, xaxis.shape)\r\n if xaxis.shape[0] < 20 or ch == 'Heure':\r\n for u in xaxis:\r\n l.append(data[ch_an][data[ch] == u])\r\n else:\r\n m = np.min(data[ch])\r\n M = np.max(data[ch])\r\n step = (M - m) / 20\r\n xaxis = np.arange(m, M, step)\r\n for u in xaxis:\r\n l.append(data[ch_an][(data[ch] >= u) * (data[ch] < u + step)])\r\n xaxis = xaxis.astype(int)\r\n # fig = plt.boxplot(ch_an, by=ch)\r\n # g = data.groupby(ch).mean()[ch_an]\r\n # v = data.groupby(ch).std()[ch_an]\r\n plt.boxplot(l, labels=xaxis)\r\n if squared:\r\n plt.ylim((0, 12))\r\n else:\r\n plt.ylim((-5, 5))\r\n # plt.plot(g, '-r')\r\n # plt.plot(g + v, ':r')\r\n # plt.plot(g - v, ':r')\r\n plt.show()", "def error():\n\n # Make data set using errors\n dataset_a = DataSet(oscillating,error_y=oscillating_error,plot='error_bar',label='Data and error')\n dataset_a.set_error(interval=5,width=1,cap=2)\n dataset_b = DataSet(oscillating,plot='error_shade',error_y=oscillating_error,order=0,colour='lightgrey',label='Error')\n dataset_c = DataSet(oscillating,plot='line',order=1,colour='firebrick',label='Data')\n\n # Make line graph with error bars\n plot_bar = Plot()\n plot_bar.set_legend(legend=True)\n plot_bar.add_dataset(dataset_a)\n plot_bar.plot()\n plot_bar.save(name='./figures/2d_error_bar',fmt='png')\n plot_bar.display()\n\n # Make line graph with shaded errors\n plot_shade = Plot()\n plot_shade.set_legend(legend=True,location='upper left')\n plot_shade.add_dataset(dataset_b)\n plot_shade.add_dataset(dataset_c)\n plot_shade.plot()\n plot_shade.save(name='./figures/2d_error_shade',fmt='png')\n plot_shade.display()", "def plot_ts(da, key):\n p = sns.lineplot(data=da.to_pandas(), linewidth=2)\n p.set_xlabel('time')\n p.set_ylabel(key)", "def hogg_errorbar(x, y, yerr, **kwargs):\n for xx, yy, yyerr in zip(x, y, yerr):\n plt.plot([xx, xx], [yy - yyerr, yy + yyerr], 'k-', **kwargs)\n return None", "def plot_Hubble():\n pickle_in = open(\"MICE_SN_data.pickle\", \"rb\")\n SN_data = pickle.load(pickle_in)\n z = SN_data[\"SNZ\"]\n mu = SN_data['SNMU']\n mu_err = SN_data['SNMU_ERR']\n z_array = np.linspace(0.0, 1.5 + 0.01, 1001)\n mu_cosm = 5 * np.log10((1 + z_array) * Convergence.comoving(z_array, OM=0.25, OL=0.75, h=0.7) * 1000) + 25\n mu_diff = SN_data['mu_diff']\n ax = plt.subplot2grid((2, 1), (0, 0))\n ax2 = plt.subplot2grid((2, 1), (1, 0))\n ax.set_ylabel(\"$\\mu$\")\n ax2.set_xlabel(\"$z$\")\n ax2.set_ylabel(\"$\\Delta\\mu$\")\n plt.subplots_adjust(wspace=0, hspace=0)\n ax.set_xticklabels([])\n ax.tick_params(labelsize=12)\n ax.errorbar(z[::2], mu[::2], mu_err[::2], linestyle='', linewidth=0.8, marker='o',\n markersize=2, capsize=2, color='C3', zorder=0, alpha=0.6, elinewidth=0.7)\n ax.plot(z[::2], mu[::2], linestyle='', marker='o', markersize=2, color='C3', alpha=0.4, markerfacecolor='C3')\n\n ax.set_ylim([38.5, 46])\n ax.set_xlim([0, 1.5])\n ax.plot(z_array, mu_cosm, linestyle='--', linewidth=0.8, color='C0', zorder=10)\n ax2.errorbar(z[::2], mu_diff[::2], mu_err[::2], linestyle='', linewidth=1, marker='o',\n markersize=2, capsize=2, color='C3', zorder=0, alpha=0.6, elinewidth=0.7)\n ax2.plot(z[::2], mu_diff[::2], linestyle='', marker='o', markersize=2, color='C3', alpha=0.4, markerfacecolor='C3')\n ax2.plot(z_array, np.zeros(len(z_array)), zorder=10, color='C0', linewidth=0.8, linestyle='--')\n ax2.set_ylim(-1.0, 1.0)\n ax2.set_xlim([0, 1.5])\n ax2.tick_params(labelsize=12)\n\n plt.show()", "def summaryPlot(df):\n import datetime as dt\n import matplotlib.pyplot as plt\n import matplotlib as mpl\n import numpy as np\n import pandas as pd\n from numpy import array\n import matplotlib.patches as mpatches\n import seaborn as sns\n from matplotlib.pyplot import figure\n\n class color:\n # Allows for bolded and underlined text\n BOLD = \"\\033[1m\"\n UNDERLINE = \"\\033[4m\"\n END = \"\\033[0m\"\n\n # Reads df and fills empty values\n df.index = pd.to_datetime(df.date)\n df = df.drop(\"date\", axis=1)\n df_all = df.resample(\"1D\")\n df_all = df_all.fillna(method=\"ffill\")\n\n dataPoints = [\"pm25\", \"co\", \"so2\", \"pm10\", \"o3\", \"no2\", \"nox\", \"wd\", \"ws\"]\n\n i = 0\n sub = 1\n while i < 9:\n # Plots line and histogram plots for ecery polutant\n # in the correct location based on subplot\n plt.figure(1, figsize=(50, 50))\n plt.subplot(9, 2, sub)\n sub = sub + 1\n a = df_all[dataPoints[i]].plot.line(color=\"gold\")\n a.axes.get_xaxis().set_visible(False)\n a.yaxis.set_label_position(\"left\")\n plt.ylabel(dataPoints[i], fontsize=75, bbox=dict(facecolor=\"whitesmoke\"))\n # print(df['pm25'].max())\n\n plt.subplot(9, 2, sub)\n sub = sub + 1\n plt.hist(df_all[dataPoints[i]], bins=50, color=\"green\")\n i = i + 1\n i = 0\n while i < 9:\n # Calculates statistics\n nDf = df[dataPoints[i]]\n missing = nDf.isna().sum() + sum(n < 0 for n in nDf)\n minVal = nDf.min()\n maxVal = nDf.max()\n meanVal = nDf.mean()\n medianVal = nDf.median()\n percentile = nDf.quantile(0.95)\n print(\"---------------\")\n print(color.BOLD + color.UNDERLINE + dataPoints[i] + color.END)\n print(\"min = \" + str(0))\n print(\"max = \" + str(maxVal))\n print(\"missing = \" + str(missing))\n print(\"mean = \" + str(meanVal))\n print(\"median = \" + str(medianVal))\n print(\"95th percentile = \" + str(percentile))\n i = i + 1", "def plot_ridgeline(windfield_errors):\n\n models = windfield_errors.columns # Extract model names\n n_points = 30 # Number of bins for the ridge plot\n\n # Get mean and confidence intervals for each model,\n # Also create histogram for each models\n model_data = pd.DataFrame()\n for model in models:\n data = {}\n error = windfield_errors[model].dropna().values\n data[\"name\"] = model\n data[\"mse\"] = np.mean(error)\n data[\"mse_ci\"] = 2 * np.std(error) / len(error) ** 0.5\n data[\"hist\"] = Histogram(error, n_points)\n model_data = model_data.append(data, ignore_index=True)\n\n # Sort values by mean square error to make the plot more visually appealing\n model_data = model_data.sort_values(by=[\"mse\"], axis=0).reset_index(drop=True)\n\n y = 0 # First histogram vertical position\n ys = [] # List of vertical histogram positions\n error_grid = np.linspace(0, 2, n_points)\n\n # Create colors for the histograms\n cmap = cm.get_cmap(\"bwr\", len(models))\n colors = [cmap(i) for i in range(len(models))]\n\n plt.figure(figsize=(6, 4))\n ax_L = plt.gca()\n for i, data in model_data.iterrows():\n ys.append(y)\n e_mean = data[\"mse\"]\n e_error = data[\"mse_ci\"]\n hist = data[\"hist\"]\n\n freqs = hist.freqs\n bins = hist.edges\n\n freqs = np.array([0] + [a for a in freqs for _ in range(2)] + [0])\n width = bins[1] - bins[0]\n bins = np.array([bins[0]] + [a + da for a in bins for da in [0, width]] + [bins[-1]+width])\n\n ax_L.fill_between(bins, y * np.ones(2*n_points+2), y + freqs, alpha=0.5, color=colors[i])\n ax_L.plot(bins, freqs + y, color='k', linewidth=1)\n\n def get_y(x):\n return freqs[2 * int((x - min(bins)) / (max(bins) - min(bins)) * n_points) + 2]\n\n freq_mean = get_y(e_mean)\n freq_lower = get_y(e_mean - e_error)\n freq_upper = get_y(e_mean + e_error)\n\n ax_L.plot([e_mean, e_mean], [y, freq_mean + y], color='k')\n ax_L.plot([e_mean-e_error, e_mean-e_error], [y, freq_lower + y], \"k--\", linewidth=.5)\n ax_L.plot([e_mean+e_error, e_mean+e_error], [y, freq_upper + y], \"k--\", linewidth=.5)\n\n y += 1.1*max(freqs)\n\n ax_L.plot(error_grid, np.zeros(n_points), color='k', linewidth=2)\n ax_L.set_yticks(ys)\n ax_L.set_ylim([0, y])\n ax_L.set_yticklabels([])\n\n ax_L.set_yticklabels(model_data[\"name\"])\n\n ax_L.set_xlabel(\"Fraction of unexplained variance $\\widetilde{\\mathcal{E}}$\")\n ax_L.set_xlim([0, 1.2])\n ax_L.grid(axis=\"y\")\n plt.tight_layout()\n return None", "def plotErr(self):\n if self.xp and self.wp:\n # plot the spectra\n w=self.ws.value(np.array(self.xp))\n self.errcurve,=self.erraxes.plot(self.xp,self.wp-w,linewidth=0.5,linestyle='',marker='o',color='b')\n if self.dxp and self.dwp:\n # plot the spectra\n dw=self.ws.value(np.array(self.dxp))\n self.delerrcurve,=self.erraxes.plot(self.dxp,self.dwp-dw,linewidth=0.5,linestyle='',marker='x',color='b')", "def plot_mean_std(data,ax,label=None,show_error=True):\n x = np.arange(1,100)\n mean = np.array([np.mean(data_n) for data_n in data])\n if show_error: std = np.array([np.std(data_n) for data_n in data])\n ax.plot(x,mean,label=label)\n if show_error: ax.fill_between(x,mean-std,mean+std,alpha=0.3)", "def plot_hists_wratio_errorband( hists, histErros, name, **kw):\n\n #\n # Calc bands\n #\n varUp = []\n varDown = []\n\n for sysHist in histErros:\n thisUpVar, thisDownVar = calcBinByBinDiffs(hists[1],sysHist)\n\n if varUp:\n varUp = addInQuad(thisUpVar, varUp)\n else:\n varUp = thisUpVar\n\n\n if varDown:\n varDown = addInQuad(thisDownVar, varDown)\n else:\n varDown = thisDownVar\n\n #\n # Build Band\n #\n xAxis = hists[0].GetXaxis()\n nBins = xAxis.GetNbins()\n var_band = ROOT.TGraphAsymmErrors(nBins)\n var_band.SetFillColor(ROOT.kRed)\n for i in range(nBins):\n var_band.SetPoint(i,xAxis.GetBinCenter(i+1),1.0)\n \n up = varUp [i]\n down = varDown[i]\n nom = hists[1].GetBinContent(i+1)\n \n if nom:\n errUp = float(up)/nom\n errDown = float(down)/nom\n else:\n errUp = 0\n errDown = 0\n\n var_band.SetPointError(i,\n xAxis.GetBinCenter(i+1)-xAxis.GetBinLowEdge(i+1),xAxis.GetBinUpEdge(i+1)-xAxis.GetBinCenter(i+1),\n errUp,errDown)\n\n\n #\n # Make ratio\n #\n kw[\"sys_band\"] = var_band\n res = plot_hists_wratio(hists, name, **kw)\n \n return res", "def plot_sum(self):\n fig, ax = plt.subplots()\n ax.set_title(\"Unpolarized intensity: I_up + I_down\")\n ax.set_xlabel(\"Time (microseconds)\")\n ax.set_ylabel('Intensity')\n\n if (self.is_attribute(\"time\") & self.is_attribute(\"intensity_up\") & \n self.is_attribute(\"intensity_up_sigma\") &\n self.is_attribute(\"intensity_down\") & \n self.is_attribute(\"intensity_down_sigma\") &\n self.is_attribute(\"intensity_up_total\") &\n self.is_attribute(\"intensity_down_total\")):\n np_excl = numpy.array(self.excluded, dtype=bool)\n np_notexcl = numpy.logical_not(np_excl)\n np_time = numpy.array(self.time, dtype=float)\n np_up = numpy.array(self.intensity_up, dtype=float)\n np_sup = numpy.array(self.intensity_up_sigma, dtype=float)\n np_up_mod = numpy.array(self.intensity_up_total, dtype=float)\n np_down = numpy.array(self.intensity_down, dtype=float)\n np_sdown = numpy.array(self.intensity_down_sigma, dtype=float)\n np_down_mod = numpy.array(self.intensity_down_total, dtype=float)\n np_sum = np_up + np_down\n np_sum_mod = np_up_mod + np_down_mod\n np_ssum = numpy.sqrt(numpy.square(np_sup)+numpy.square(np_sdown))\n ax.plot(np_time, np_sum_mod, \"k-\", label=\"model\")\n ax.errorbar(np_time[np_notexcl], np_sum[np_notexcl], yerr=np_ssum[np_notexcl], fmt=\"ko\", alpha=0.2, label=\"experiment\")\n ax.errorbar(np_time[np_excl], np_sum[np_excl], yerr=np_ssum[np_excl], fmt=\"rs\", alpha=0.2, label=\"excluded\")\n\n y_min_d, y_max_d = ax.get_ylim()\n param = y_min_d-(np_sum - np_sum_mod).max()\n coeff = np_notexcl.astype(int)\n\n ax.plot([np_time.min(), np_time.max()], [param, param], \"k:\")\n ax.plot(np_time, coeff*(np_sum - np_sum_mod)+param, \"r-\", alpha=0.7,\n label=\"difference\")\n elif (self.is_attribute(\"time\") & self.is_attribute(\"intensity\") & \n self.is_attribute(\"intensity_total\") &\n self.is_attribute(\"intensity_sigma\")):\n np_excl = numpy.array(self.excluded, dtype=bool)\n np_notexcl = numpy.logical_not(np_excl)\n np_time = numpy.array(self.time, dtype=float)\n np_sum = numpy.array(self.intensity, dtype=float)\n np_sum_mod = numpy.array(self.intensity_total, dtype=float)\n np_ssum = numpy.array(self.intensity_sigma, dtype=float)\n ax.plot(np_time, np_sum_mod, \"k-\", label=\"model\")\n ax.errorbar(np_time[np_notexcl], np_sum[np_notexcl], yerr=np_ssum[np_notexcl], fmt=\"ko\", alpha=0.2, label=\"experiment\")\n ax.errorbar(np_time[np_excl], np_sum[np_excl], yerr=np_ssum[np_excl], fmt=\"rs\", alpha=0.2, label=\"excluded\")\n\n y_min_d, y_max_d = ax.get_ylim()\n param = y_min_d-(np_sum - np_sum_mod).max()\n coeff = np_notexcl.astype(int)\n\n ax.plot([np_time.min(), np_time.max()], [param, param], \"k:\")\n ax.plot(np_time, coeff*(np_sum - np_sum_mod)+param, \"r-\", alpha=0.7,\n label=\"difference\")\n ax.legend(loc='upper right')\n fig.tight_layout()\n return (fig, ax)", "def _plot_ts(self, data, labels, ax,\n show_ylabels=True,\n offset=0.0,\n special_idx=[],\n errors_list=[],\n fontsize=FiguresConfig.LARGE_FONT_SIZE):\n if data.ndim == 1:\n data = data[np.newaxis, :]\n offset = int(offset)\n # apply offset setting onto the data\n data = data[:, offset:]\n\n # get shape of data to be plotted\n nsamples, ntimes = data.shape\n\n nTS = 1\n def_alpha = 1.0\n # generate ylabels for the plot\n labels = generate_region_labels(nsamples, labels)\n\n # set plotting parameters: alpha_ratio, colors, alphas\n alpha_ratio = 1.0 / nsamples\n colors = np.array(['k'] * nTS)\n alphas = np.maximum(np.array(\n [def_alpha] *\n nTS) *\n alpha_ratio,\n 1.0)\n colors[special_idx] = 'r'\n alphas[special_idx] = np.maximum(alpha_ratio, 0.1)\n\n # apply normalization for each trace\n for i in range(nsamples):\n data[i, :] = data[i, :] / np.nanmax(data[i, :])\n\n # plot each trace\n x = np.arange(ntimes)\n for itrace in range(nTS):\n for i in range(nsamples):\n y = data[i, :] + np.r_[i]\n ax.plot(x, y,\n color=colors[itrace],\n label=labels[itrace],\n alpha=alphas[itrace])\n\n # plot errors bars\n if errors_list:\n error = errors_list[error]\n ax.fill_between(x, y - error, y + error,\n color=colors[itrace],\n alphas=alphas[itrace])\n\n if show_ylabels:\n # print(\"Labels are : \", labels)\n y_ticks = np.arange(len(labels))\n ax.set_yticks(y_ticks)\n ax.set_yticklabels(labels, fontsize=fontsize / 1.5)\n\n for tick in ax.yaxis.get_major_ticks():\n tick.label.set_fontsize(fontsize / 1.5)\n for tick in ax.xaxis.get_major_ticks():\n tick.label.set_fontsize(fontsize / 1.5)\n\n return ax", "def plot_XDR_PDR_Gauss():\n\n fig,axes = plt.subplots(nrows=2, ncols=2, squeeze=True, sharex='col', sharey='row', figsize=(6,6))\n fig.subplots_adjust(hspace=0, wspace=0) #, top=0.80, bottom=0.04, left=0.04, right=0.93)\n\n # get data\n sscs = [SSC['no'] for SSC in SSCs]\n colors = [plt.cm.inferno(i/(len(SSCs)+1)) for i in SSCs['no']]\n HCO_HCN, HNC_HCN, HNC_HCO = [],[],[]\n HCO_HCN_err, HNC_HCN_err, HNC_HCO_err = [],[],[]\n for SSC in SSCs:\n try:\n hco_hcn = ratios_Gauss['HCO+/HCN'][SSC['num']]['bestfit']\n hco_hcn_err = ratios_Gauss['HCO+/HCN'][SSC['num']]['error']\n HCO_HCN.append( np.log10(hco_hcn) )\n HCO_HCN_err.append( 0.434*hco_hcn_err/hco_hcn )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( np.nan )\n try:\n hnc_hcn = ratios_Gauss['HNC/HCN'][SSC['num']]['bestfit']\n hnc_hcn_err = ratios_Gauss['HNC/HCN'][SSC['num']]['error']\n HNC_HCN.append( np.log10(hnc_hcn) )\n HNC_HCN_err.append( 0.434*hnc_hcn_err/hnc_hcn )\n except:\n HNC_HCN.append( np.nan )\n HNC_HCN_err.append( np.nan )\n try:\n hnc_hco = ratios_Gauss['H15NC/HCO+'][SSC['num']]['bestfit']*ratios_Gauss['14N/15N'][SSC['num']]['bestfit']\n hnc_hco_err = np.sqrt( (ratios_Gauss['H15NC/HCO+'][SSC['num']]['error']/ratios_Gauss['H15NC/HCO+'][SSC['num']]['bestfit'])**2 +(ratios_Gauss['14N/15N'][SSC['num']]['error']/ratios_Gauss['14N/15N'][SSC['num']]['bestfit'])**2 )\n HNC_HCO.append( np.log10(hnc_hco) )\n HNC_HCO_err.append( 0.434*hnc_hco_err/hnc_hco )\n except:\n HNC_HCO.append( np.nan )\n HNC_HCO_err.append( np.nan )\n\n # comparison from Baan+08\n B_hcn = [318.2, 14]\n B_hnc = [234.0, 7]\n B_hco = [276.1, 14]\n B_hco_hcn = [B_hco[0]/B_hcn[0], B_hco[0]/B_hcn[0]*np.sqrt((B_hco[1]/B_hco[0])**2+(B_hcn[1]/B_hcn[0])**2)]\n B_hnc_hcn = [B_hnc[0]/B_hcn[0], B_hnc[0]/B_hcn[0]*np.sqrt((B_hnc[1]/B_hnc[0])**2+(B_hcn[1]/B_hcn[0])**2)]\n B_hnc_hco = [B_hnc[0]/B_hco[0], B_hnc[0]/B_hco[0]*np.sqrt((B_hnc[1]/B_hnc[0])**2+(B_hco[1]/B_hco[0])**2)]\n B_HCO_HCN = [np.log10(B_hco_hcn[0]), 0.434*B_hco_hcn[1]/B_hco_hcn[0]]\n B_HNC_HCN = [np.log10(B_hnc_hcn[0]), 0.434*B_hnc_hcn[1]/B_hnc_hcn[0]]\n B_HNC_HCO = [np.log10(B_hnc_hco[0]), 0.434*B_hnc_hco[1]/B_hnc_hco[0]]\n\n def format_panel(ax):\n ax.xaxis.set_major_locator(MultipleLocator(0.5))\n ax.xaxis.set_minor_locator(MultipleLocator(0.25))\n ax.yaxis.set_major_locator(MultipleLocator(0.5))\n ax.yaxis.set_minor_locator(MultipleLocator(0.25))\n ax.set_axisbelow(True)\n ax.grid(axis='both', which='both')\n\n def label_regions(ax):\n ax.text(0.95, 0.9, 'XDR', color='k', transform=ax.transAxes, ha='right', va='top', weight='bold', fontsize=16)\n ax.text(0.05, 0.1, 'PDR', color='k', transform=ax.transAxes, ha='left', va='bottom', weight='bold', fontsize=16)\n\n # panel 1: HCO+/HCN over HNC/HCO+\n ax = axes[0][0]\n ax.plot([-10,10],[10,-10], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([-10,10],[10,-10],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c,s in zip(HNC_HCO, HCO_HCN, HNC_HCO_err, HCO_HCN_err, colors, SSCs):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=a_err, yerr=b_err, marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, label='SSC '+str(s['no']), zorder=3)\n ax.errorbar(B_HCO_HCN[0],B_HNC_HCO[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', label=r'NGC 253 (Baan +08)', zorder=4)\n ax.set_xlim(-1.15,0.45)\n ax.set_ylim(-0.80,0.80)\n format_panel(ax)\n ax.set_ylabel(r'log I(HCO$^+$) / I(HCN)', fontsize=12)\n\n # panel 2: HNC/HCN over HCO/HCN\n ax = axes[0][1]\n ax.plot([0,0],[-10,10], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([0,10],[-10,-10],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c in zip(HNC_HCN, HCO_HCN, HNC_HCN_err, HCO_HCN_err, colors):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=a_err, yerr=b_err, marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, zorder=3)\n ax.errorbar(B_HNC_HCN[0],B_HCO_HCN[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', zorder=4)\n ax.set_xlim(-1.15,0.45)\n ax.set_ylim(-0.80,0.80)\n ax.xaxis.set_tick_params(which='both', labelbottom=True)\n format_panel(ax)\n ax.set_xlabel(r'log I(HNC) / I(HCN)', fontsize=12)\n\n # panel 3: HNC/HCO over HNC/HCN\n ax = axes[1][0]\n ax.plot([-10,10],[0,0], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([-10,10],[0,0],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c in zip(HNC_HCO, HNC_HCN, HNC_HCO_err, HNC_HCN_err, colors):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=a_err, yerr=b_err, marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, zorder=3)\n ax.errorbar(B_HNC_HCO[0],B_HNC_HCN[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', zorder=4)\n ax.set_xlim(-1.15,0.45)\n ax.set_ylim(-1.00,0.60)\n format_panel(ax)\n ax.set_xlabel(r'log I(HNC$^{**}$) / I(HCO$^+$)', fontsize=12)\n ax.set_ylabel(r'log I(HNC) / I(HCN)', fontsize=12)\n\n # panel 4: legend\n ax = axes[1][1]\n ax.set_axis_off()\n fig.legend(loc=3, bbox_to_anchor=(0.55,0.05,0.14,0.3), ncol=1, mode=\"expand\", borderaxespad=0., fontsize=12, frameon=False)\n\n savepath = escape_fname(os.path.join(plotdir, '10.results', 'XDR-PDR_line_ratio.pdf'))\n os.system('mkdir -p '+os.path.dirname(savepath))\n fig.savefig(savepath, dpi=300, bbox_inches='tight')", "def plot_XDR_PDR_XCLASS():\n\n fig,axes = plt.subplots(nrows=2, ncols=2, squeeze=True, sharex='col', sharey='row', figsize=(6,6))\n fig.subplots_adjust(hspace=0, wspace=0) #, top=0.80, bottom=0.04, left=0.04, right=0.93)\n\n # get data\n sscs = [SSC['no'] for SSC in SSCs]\n colors = [plt.cm.inferno(i/(len(SSCs)+1)) for i in SSCs['no']]\n HCO_HCN, HNC_HCN, HNC_HCO = [],[],[]\n HCO_HCN_err, HNC_HCN_err, HNC_HCO_err = [],[],[]\n for SSC in SSCs:\n try:\n hco_hcn_med = ratios_XCLASS['HCO+/HCN'][SSC['num']]['median']\n hco_hcn_p16 = ratios_XCLASS['HCO+/HCN'][SSC['num']]['16th']\n hco_hcn_p84 = ratios_XCLASS['HCO+/HCN'][SSC['num']]['84th']\n hco_hcn_low = hco_hcn_med-hco_hcn_p16\n hco_hcn_hig = hco_hcn_p84-hco_hcn_med\n HCO_HCN.append( np.log10(hco_hcn_med) )\n HCO_HCN_err.append( [0.434*hco_hcn_low/hco_hcn_med,0.434*hco_hcn_hig/hco_hcn_med] )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( [np.nan,np.nan] )\n try:\n hnc_hcn_med = ratios_XCLASS['HNC/HCN'][SSC['num']]['median']\n hnc_hcn_p16 = ratios_XCLASS['HNC/HCN'][SSC['num']]['16th']\n hnc_hcn_p84 = ratios_XCLASS['HNC/HCN'][SSC['num']]['84th']\n hnc_hcn_low = hnc_hcn_med-hnc_hcn_p16\n hnc_hcn_hig = hnc_hcn_p84-hnc_hcn_med\n HNC_HCN.append( np.log10(hnc_hcn_med) )\n HNC_HCN_err.append( [0.434*hnc_hcn_low/hco_hcn_med,0.434*hnc_hcn_hig/hco_hcn_med] )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( [np.nan,np.nan] )\n try:\n hnc_hco_med = ratios_XCLASS['H15NC/HCO+'][SSC['num']]['median']*ratios_XCLASS['14N/15N'][SSC['num']]['median']\n hnc_hco_p16 = ratios_XCLASS['H15NC/HCO+'][SSC['num']]['16th']*ratios_XCLASS['14N/15N'][SSC['num']]['median']\n hnc_hco_p84 = ratios_XCLASS['H15NC/HCO+'][SSC['num']]['84th']*ratios_XCLASS['14N/15N'][SSC['num']]['median']\n hnc_hco_low = hnc_hco_med-hnc_hco_p16\n hnc_hco_hig = hnc_hco_p84=hnc_hco_med\n HNC_HCO.append( np.log10(hnc_hco_med) )\n HNC_HCO_err.append( [0.434*hnc_hco_low/hnc_hco_med,0.434*hnc_hco_hig/hnc_hco_med] )\n except:\n HCO_HCN.append( np.nan )\n HCO_HCN_err.append( [np.nan,np.nan] )\n\n # comparison from Baan+08\n B_hcn = [318.2, 14]\n B_hnc = [234.0, 7]\n B_hco = [276.1, 14]\n B_hco_hcn = [B_hco[0]/B_hcn[0], B_hco[0]/B_hcn[0]*np.sqrt((B_hco[1]/B_hco[0])**2+(B_hcn[1]/B_hcn[0])**2)]\n B_hnc_hcn = [B_hnc[0]/B_hcn[0], B_hnc[0]/B_hcn[0]*np.sqrt((B_hnc[1]/B_hnc[0])**2+(B_hcn[1]/B_hcn[0])**2)]\n B_hnc_hco = [B_hnc[0]/B_hco[0], B_hnc[0]/B_hco[0]*np.sqrt((B_hnc[1]/B_hnc[0])**2+(B_hco[1]/B_hco[0])**2)]\n B_HCO_HCN = [np.log10(B_hco_hcn[0]), 0.434*B_hco_hcn[1]/B_hco_hcn[0]]\n B_HNC_HCN = [np.log10(B_hnc_hcn[0]), 0.434*B_hnc_hcn[1]/B_hnc_hcn[0]]\n B_HNC_HCO = [np.log10(B_hnc_hco[0]), 0.434*B_hnc_hco[1]/B_hnc_hco[0]]\n\n def format_panel(ax):\n ax.xaxis.set_major_locator(MultipleLocator(0.5))\n ax.xaxis.set_minor_locator(MultipleLocator(0.25))\n ax.yaxis.set_major_locator(MultipleLocator(0.5))\n ax.yaxis.set_minor_locator(MultipleLocator(0.25))\n ax.set_axisbelow(True)\n ax.grid(axis='both', which='both')\n\n def label_regions(ax):\n ax.text(0.95, 0.9, 'XDR', color='k', transform=ax.transAxes, ha='right', va='top', weight='bold', fontsize=16)\n ax.text(0.05, 0.1, 'PDR', color='k', transform=ax.transAxes, ha='left', va='bottom', weight='bold', fontsize=16)\n\n # panel 1: HCO+/HCN over HNC/HCO+\n ax = axes[0][0]\n ax.plot([-10,10],[10,-10], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([-10,10],[10,-10],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c,s in zip(HNC_HCO, HCO_HCN, HNC_HCO_err, HCO_HCN_err, colors, SSCs):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=[[a_err[0]],[a_err[1]]], yerr=[[b_err[0]],[b_err[1]]], marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, label='SSC '+str(s['no']), zorder=3)\n ax.errorbar(B_HCO_HCN[0],B_HNC_HCO[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', label=r'NGC 253 (Baan +08)', zorder=4)\n ax.set_xlim(-0.75,0.75)\n ax.set_ylim(-0.85,0.65)\n format_panel(ax)\n ax.set_ylabel(r'log N(HCO$^+$) / N(HCN)', fontsize=12)\n\n # panel 2: HNC/HCN over HCO/HCN\n ax = axes[0][1]\n ax.plot([0,0],[-10,10], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([0,10],[-10,-10],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c in zip(HNC_HCN, HCO_HCN, HNC_HCN_err, HCO_HCN_err, colors):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=[[a_err[0]],[a_err[1]]], yerr=[[b_err[0]],[b_err[1]]], marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, zorder=3)\n ax.errorbar(B_HNC_HCN[0],B_HCO_HCN[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', zorder=4)\n ax.set_xlim(-0.95,0.55)\n ax.set_ylim(-0.85,0.65)\n format_panel(ax)\n ax.tick_params(labelbottom=True)\n ax.set_xlabel(r'log N(HNC) / N(HCN)', fontsize=12)\n\n # panel 3: HNC/HCO over HNC/HCN\n ax = axes[1][0]\n ax.plot([-10,10],[0,0], ls='-', lw=1, c='grey', zorder=2)\n ax.fill_between([-10,10],[0,0],[10,10], color='lightgrey', alpha=0.5, zorder=1)\n label_regions(ax)\n for a,b,a_err,b_err,c in zip(HNC_HCO, HNC_HCN, HNC_HCO_err, HNC_HCN_err, colors):\n if np.isfinite(a) and np.isfinite(b):\n ax.errorbar(a,b, xerr=[[a_err[0]],[a_err[1]]], yerr=[[b_err[0]],[b_err[1]]], marker='o', ms=5, lw=0, color=c, elinewidth=1, ecolor=c, zorder=3)\n ax.errorbar(B_HNC_HCO[0],B_HNC_HCN[0], xerr=B_HCO_HCN[1], yerr=B_HNC_HCO[1], marker='o', ms=5, lw=0, color='lime', elinewidth=1, ecolor='lime', zorder=4)\n ax.set_xlim(-0.75,0.75)\n ax.set_ylim(-1.05,0.45)\n format_panel(ax)\n ax.set_xlabel(r'log N(HNC$^{**}$) / N(HCO$^+$)', fontsize=12)\n ax.set_ylabel(r'log N(HNC) / N(HCN)', fontsize=12)\n\n # panel 4: legend\n ax = axes[1][1]\n ax.set_axis_off()\n fig.legend(loc=3, bbox_to_anchor=(0.55,0.05,0.14,0.3), ncol=1, mode=\"expand\", borderaxespad=0., fontsize=12, frameon=False)\n\n savepath = escape_fname(os.path.join(plotdir, '10.results', 'XDR-PDR_column_density.pdf'))\n os.system('mkdir -p '+os.path.dirname(savepath))\n fig.savefig(savepath, dpi=300, bbox_inches='tight')", "def plot_NODE_err(err, tseries, soln_names, var_string,**kwargs):\n\n ky1 = soln_names[0]; ky2 = soln_names[1]; ky3 = soln_names[2]\n if 'unit' in kwargs:\n t_unit = kwargs['unit']\n else:\n t_unit = 'seconds'\n\n freq = tseries.size//20\n\n fig = plt.figure(figsize=(16,4))\n ax1 = fig.add_subplot(1, 2, 1)\n ax1.plot(tseries[:], err[ky1][:], 'r-s', markersize=8,\n label='$\\mathbf{%s}$'%(var_string[ky1]),lw=2, markevery=freq)\n ymax_ax1 = err[ky1][:].max()\n ax1.set_xlabel('Time (%s)'%t_unit);lg=plt.legend(ncol=2, fancybox=True,)\n\n ax2 = fig.add_subplot(1, 2, 2)\n ax2.plot(tseries[:], err[ky2][:], 'b-o', markersize=8,\n label='$\\mathbf{%s}$'%(var_string[ky2]), lw=2, markevery=freq)\n ax2.plot(tseries[:], err[ky3][:], 'g-^', markersize=8,\n label='$\\mathbf{%s}$'%(var_string[ky3]), lw=2, markevery=freq-10)\n ymax_ax2 = np.maximum(err[ky2][:].max(), err[ky3][:].max())\n ax2.set_xlabel('Time (%s)'%t_unit);lg=plt.legend(ncol=2, fancybox=True,)\n\n if 'mark' in kwargs:\n tr_mark = kwargs['mark']\n ax1.vlines(tseries[tr_mark], 0, ymax_ax1, colors ='k', linestyles='dashdot')\n ax2.vlines(tseries[tr_mark],0,ymax_ax2, colors = 'k', linestyles ='dashdot')\n\n if 'metric' in kwargs:\n if kwargs['metric'] == 'rel':\n fig.suptitle('Relative errors of PODNODE NIROM solutions', fontsize=18)\n elif kwargs['metric'] == 'rms':\n fig.suptitle('Spatial RMS errors of PODNODE NIROM solutions', fontsize=18)\n else:\n fig.suptitle('Spatial RMS errors of PODNODE NIROM solutions', fontsize=18)", "def plot_lastreviews_means_and_errors_scaled(H_in_HH_mean, H_in_HH_error, M_in_MM_mean, M_in_MM_error, L_in_LL_mean, L_in_LL_error,\n H_in_HL_mean, H_in_HL_error, L_in_HL_mean, L_in_HL_error, H_in_HM_mean, H_in_HM_error,\n M_in_HM_mean, M_in_HM_error, M_in_ML_mean, M_in_ML_error, L_in_ML_mean, L_in_ML_error):\n plt.figure(figsize=(12, 9)) \n\n # create the fig. and axes.\n ax = plt.subplot(111)\n ax.spines[\"top\"].set_visible(False) \n ax.spines[\"right\"].set_visible(False)\n\n # define the color to use\n light_green = (152, 223, 138)\n strong_green = (44, 160, 44)\n light_red = (255, 152, 150)\n orange = (255, 187, 120)\n strong_red = (214, 39, 40)\n\n strong_green = rgb_to_matplot_lib(strong_green)\n light_green = rgb_to_matplot_lib(light_green)\n strong_red = rgb_to_matplot_lib(strong_red)\n light_red = rgb_to_matplot_lib(light_red)\n orange = rgb_to_matplot_lib(orange)\n\n # axis \n ax.set_ylabel('Rating', fontsize = 14)\n ax.tick_params(axis='both', labelsize=14)\n\n # plot small dash lines to follow the grading \n for y in np.arange(3.5, 4.6, 0.1): \n ax.plot(range(0, 45), [y] * len(range(0, 45)), \"--\", lw=0.5, color=\"black\", alpha=0.3)\n\n\n # set titles\n ax.set_title('10+ reviews average rating for each case in each group', fontsize = 14)\n\n plt.errorbar(1, H_in_HH_mean, H_in_HH_error, lineStyle= None, capsize=5, marker=\"^\", color=strong_green)\n plt.errorbar(2, M_in_MM_mean, M_in_MM_error, lineStyle= None, capsize=5, marker=\"^\", color=orange)\n plt.errorbar(3, L_in_LL_mean, L_in_LL_error, lineStyle= None, capsize=5, marker=\"^\", color=strong_red)\n plt.errorbar(4, H_in_HL_mean, H_in_HL_error, lineStyle= None, capsize=5, marker=\"^\", color=light_green)\n plt.errorbar(5, L_in_HL_mean, L_in_HL_error, lineStyle= None, capsize=5, marker=\"^\", color=light_red)\n plt.errorbar(6, H_in_HM_mean, H_in_HM_error, lineStyle= None, capsize=5, marker=\"^\", color=light_green)\n plt.errorbar(7, M_in_HM_mean, M_in_HM_error, lineStyle= None, capsize=5, marker=\"^\", color=orange)\n plt.errorbar(8, M_in_ML_mean, M_in_ML_error, lineStyle= None, capsize=5, marker=\"^\", color=orange)\n plt.errorbar(9, L_in_ML_mean, L_in_ML_error, lineStyle= None, capsize=5, marker=\"^\", color=light_red) \n\n plt.text(0.7, 3.41, \"({:04.3f})\".format(H_in_HH_mean), fontsize=14, color=strong_green)\n plt.text(1.7, 3.41, \"({:04.3f})\".format(M_in_MM_mean), fontsize=14, color=orange)\n plt.text(2.7, 3.41, \"({:04.3f})\".format(L_in_LL_mean), fontsize=14, color=strong_red)\n plt.text(3.7, 3.41, \"({:04.3f})\".format(H_in_HL_mean), fontsize=14, color=light_green)\n plt.text(4.7, 3.41, \"({:04.3f})\".format(L_in_HL_mean), fontsize=14, color=light_red)\n plt.text(5.7, 3.41, \"({:04.3f})\".format(H_in_HM_mean), fontsize=14, color=light_green)\n plt.text(6.7, 3.41, \"({:04.3f})\".format(M_in_HM_mean), fontsize=14, color=orange)\n plt.text(7.7, 3.41, \"({:04.3f})\".format(M_in_ML_mean), fontsize=14, color=orange)\n plt.text(8.7, 3.41, \"({:04.3f})\".format(L_in_ML_mean), fontsize=14, color=light_red)\n\n # set ticks label\n ax.set_xticks(range(1,10))\n ax.set_xticklabels(('H in HH', 'M in MM', 'L in LL', 'H in HL', 'L in HL', 'H in HM', 'M in HM', 'M in ML', 'L in ML'))\n\n #set ticks color\n colors = [strong_green, orange, strong_red, light_green, light_red, light_green, orange, orange, light_red]\n for xtick, color in zip(ax.get_xticklabels(), colors):\n xtick.set_color(color)\n\n plt.ylim([3.4,4.6])\n plt.xlim([0.5,9.5])\n plt.show()", "def plot_error(k_vals, error):\n\n plt.plot(k_vals,error)\n plt.xlabel('k-value')\n plt.ylabel('Cost')\n plt.show()", "def plot_series(self, dates, mean_series, year_series, std_series, savename):\n year = dates[0].year\n year_label = 'Discharge for '+str(year)\n dates2 = np.concatenate([dates,dates[::-1]])\n std2 = np.concatenate([std_series+mean_series,\\\n (mean_series-std_series)[::-1]])\n fig = plt.figure(figsize=(10,5))\n ax = fig.add_subplot(111)\n p1 = ax.plot(dates,mean_series,'-k', label = 'Mean Discharge')\n p3 = ax.fill(dates2,std2,facecolor = 'gray',label='Mean Variance')\n p2 = ax.plot(dates,year_series,'-r', label = year_label)\n ax.set_ylabel('$m^3/s$')\n ax.set_title('Brazos River Discharge Near Rosharon, TX')\n plt.ylim([0,max(year_series)+500])\n plt.legend(fontsize='x-small')\n idx = [i for i in range(dates.shape[0]) if (dates[i].day == 1)]\n dt_form = '%b'\n plt.xticks(dates[idx],[datetime.datetime.strftime(dates[i],dt_form) for i in idx])\n plt.savefig(savename)", "def ExponentialTransformErrVarShapingFactor(data, comparedata, G=10):\n __standartChecksBeforeStart(data, comparedata)\n\n errArr = np.array(__calcSimpleDeviation(data, comparedata))\n\n return -G * np.nanvar(errArr)", "def plot_ts(data, enc_mean, dec_mean):\n # enc_mean, enc_cov = enc\n # dec_mean, dec_cov = dec\n\n batch_size = data.size()[0]\n D = 2\n N = int(data.size()[1]/D)\n\n f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=False, sharex=True)\n # plot data\n plt.axes(ax1)\n ax1.set_ylim(-0.1,0.1)\n\n sns.tsplot(data.view(batch_size,N,-1).data.numpy())\n\n # plot reconstruction\n plt.axes(ax2)\n ax2.set_ylim(-0.1,0.1)\n sns.tsplot(dec_mean.view(batch_size,N,-1).data.numpy())\n\n plt.axes(ax3)\n sample_Sigma = bivech2(enc_mean.view(batch_size,N,-1))\n sample_vechSigma = bvech(sample_Sigma).data.numpy()\n \n sns.tsplot(sample_vechSigma)\n\n # plot latent variables\n # sample_Sigma = ivech2x(enc_cov.data.numpy())\n # sample_vechSigma = vechx(sample_Sigma.reshape((-1,N,N)))\n # sns.tsplot(sample_vechSigma)", "def make_tuning_plot_rmse(df, error_col_name=\"rmse\",\n error_title = \"Top 10% RMSE\",\n cutoff = 0.10):\n\n df = df.copy()\n\n # Get the regularizer and reset coeff\n coeff = [float(i.split(\"evidence_new_reg_\")[1]) if \"evidence\" in i else i for i in df['method_name']]\n df[\"method_name\"] = coeff\n df[\"Data\"] = convert_dataset_names(df[\"dataset\"])\n df[\"Method\"] = df[\"method_name\"]\n\n # Get appropriate datasets\n trials = 'trial_number'\n methods = 'Method'\n\n # Make area plot\n uniq_methods = set(df[\"Method\"].values)\n method_order = sorted(uniq_methods,\n key=lambda x : x if isinstance(x, float) else -1)\n method_df = []\n datasets = set()\n for data, sub_df in df.groupby(\"Data\"):\n # Add datasets\n datasets.add(data)\n rmse_sub = sub_df[error_col_name]\n methods_sub = sub_df[\"Method\"]\n trials_sub= sub_df['trial_number']\n for method_idx, method in enumerate(method_order):\n # Now summarize these lines\n bool_select = (methods_sub == method)\n\n rmse_method = rmse_sub[bool_select]\n trials_temp = trials_sub[bool_select]\n areas = []\n # create area!\n for trial, rmse_trial in zip(trials_sub, rmse_method):\n num_tested = len(rmse_trial)\n cutoff_index = int(cutoff * num_tested) - 1\n rmse_val = rmse_trial[-cutoff_index]\n to_append = {error_title: rmse_val,\n \"Regularizer Coeff, $\\lambda$\": method,\n \"method_name\": method,\n \"Data\": data,\n \"Trial\" : trial}\n method_df.append(to_append)\n method_df = pd.DataFrame(method_df)\n\n # Filter out dropout\n method_df = method_df[[i != \"dropout\" for i in\n method_df['method_name']]].reset_index()\n\n # Normalize by dataset\n for dataset in datasets:\n # Make a divison vector of ones and change it to a different value only\n # for the correct dataset of interest to set max rmse to 1\n division_factor = np.ones(len(method_df))\n indices = (method_df[\"Data\"] == dataset)\n\n # Normalize with respect to the ensemble so that this is 1\n max_val = method_df[indices].query(\"method_name == 'ensemble'\").mean()[error_title]\n\n # Take the maximum of the AVERAGE so it's normalized to 1\n division_factor[indices] = max_val\n method_df[error_title] = method_df[error_title] / division_factor\n\n method_df_evidence = method_df[[isinstance(i, float) for i in\n method_df['method_name']]].reset_index()\n method_df_ensemble = method_df[[\"ensemble\" in str(i) for i in\n method_df['method_name']]].reset_index()\n\n data_colors = {\n dataset : sns.color_palette()[index]\n for index, dataset in enumerate(datasets)\n }\n\n min_x = np.min(method_df_evidence[\"Regularizer Coeff, $\\lambda$\"])\n max_x= np.max(method_df_evidence[\"Regularizer Coeff, $\\lambda$\"])\n\n sns.lineplot(x=\"Regularizer Coeff, $\\lambda$\", y=error_title,\n hue=\"Data\", alpha=0.8, data=method_df_evidence,\n palette = data_colors)\n\n for data, subdf in method_df_ensemble.groupby(\"Data\"):\n\n color = data_colors[data]\n area = subdf[error_title].mean()\n std = subdf[error_title].std()\n plt.hlines(area, min_x, max_x, linestyle=\"--\", color=color, alpha=0.8)\n\n # Add ensemble baseline\n ensemble_line = plt.plot([], [], color='black', linestyle=\"--\",\n label=\"Ensemble\")\n # Now make ensemble plots\n plt.legend(bbox_to_anchor=(1.1, 1.05))", "def SARIMAX_error(series, p=10, d=2, q=2):\n\n X = series\n\n # set trainset to include all but last 48 months (4 years)\n # only training on data between 9-4 years ago\n train_size = int(len(X) - 48)\n train, test = X[-108:train_size], X[train_size:]\n\n model = SARIMAX(train, order=(p, d, q), freq='MS',\n initialization='approximate_diffuse')\n\n results = model.fit()\n\n # Predict 48 months from end of train set\n forecast = results.get_forecast(steps=48)\n pred_ci = forecast.conf_int(alpha=.05)\n\n predictions = forecast.predicted_mean\n\n rmse = RMSE(test, predictions)\n # pct = error_as_pct(rmse, train[-1], test[-1])\n\n return pred_ci, rmse, (train[-1], test[-1]) # , pct", "def plot_acc_vs_nsn(df, settings):\n plt.clf()\n fig = plt.figure()\n fig, ax1 = plt.subplots()\n ax1.grid(True)\n ax1.set_axisbelow(True)\n\n models_list = [\"randomforest\", \"vanilla\"]\n redshift_list = df[\"redshift\"].unique()\n\n label_dic = {\"randomforest\": \"Random Forest\", \"vanilla\": \"Baseline RNN\"}\n\n group_cols = [\"model_name_noseed\", \"model_type\", \"redshift\", \"data_fraction\"]\n keep_cols = group_cols + [\"all_accuracy\"]\n\n # Cast to float for groupby operation (all_accuracy is type `O`)\n df.all_accuracy = df.all_accuracy.astype(float)\n\n df_errorbars = (\n df[keep_cols]\n .groupby(group_cols)\n .mean()\n .rename(columns={\"all_accuracy\": \"all_accuracy_mean\"})\n .reset_index()\n )\n df_errorbars[\"all_accuracy_std\"] = (\n df[keep_cols]\n .groupby(group_cols)\n .std()\n .rename(columns={\"all_accuracy\": \"all_accuracy_std\"})\n .reset_index()[\"all_accuracy_std\"]\n )\n\n for i, basemodel in enumerate(models_list):\n for z in redshift_list:\n df_sel = df_errorbars[\n (df_errorbars[\"model_type\"] == basemodel)\n & (df_errorbars[\"redshift\"] == z)\n ]\n # Plot these independently to avoid polluting legend\n ax1.errorbar(\n df_sel[\"data_fraction\"],\n df_sel[\"all_accuracy_mean\"],\n yerr=df_sel[\"all_accuracy_std\"],\n c=CONTRAST_COLORS[i],\n fmt=\"none\",\n zorder=3 if basemodel == \"vanilla\" else 1,\n )\n ax1.plot(\n df_sel[\"data_fraction\"],\n df_sel[\"all_accuracy_mean\"],\n label=label_dic[basemodel],\n marker=MARKER_DIC[basemodel],\n c=CONTRAST_COLORS[i],\n fillstyle=FILL_DIC[z],\n lw=0,\n markersize=10,\n markeredgewidth=1.5,\n )\n legend_elements = [\n Line2D(\n [0],\n [0],\n marker=\"s\",\n lw=0,\n color=\"indigo\",\n label=\"Baseline RNN\",\n markerfacecolor=\"w\",\n markersize=12,\n ),\n Line2D(\n [0],\n [0],\n marker=\"o\",\n lw=0,\n color=\"darkorange\",\n label=\"Random Forest\",\n markerfacecolor=\"w\",\n markersize=12,\n ),\n ]\n\n ax1.legend(handles=legend_elements, loc=4)\n ax1.set_ylabel(\"accuracy\", fontsize=18)\n ax1.set_ylim(91, 100)\n ax1.set_xlim(0.025)\n ax1.set_xlabel(\"# SNe for training\", fontsize=18)\n\n # exchange axis and reformat\n ax2 = ax1.twiny()\n ax1Xs = [round(i, 1) for i in ax1.get_xticks()]\n ax2Xs = []\n for X in ax1Xs:\n # BEWARE: only valid with SALTfitted sample\n ax2Xs.append(\"{:0.1e}\".format(int(X * 881_969 * 0.8)))\n\n ax1.set_xticklabels(ax2Xs)\n ax2.set_xticks(ax1Xs)\n ax2.set_xbound(ax1.get_xbound())\n ax2.set_xticklabels(ax1Xs)\n\n title = ax1.set_title(\"data fraction\", fontsize=18)\n title.set_y(1.1)\n plt.tight_layout()\n\n fig.subplots_adjust(top=0.85)\n fig.savefig(f\"{settings.figures_dir}/accuracy_vs_nSN.png\")\n plt.close()\n plt.clf()", "def plot_diff(self):\n if not(self.is_attribute(\"time\") & self.is_attribute(\"intensity_up\") & \n self.is_attribute(\"intensity_up_sigma\") &\n self.is_attribute(\"intensity_down\") & \n self.is_attribute(\"intensity_down_sigma\") &\n self.is_attribute(\"intensity_up_total\") &\n self.is_attribute(\"intensity_down_total\")):\n return\n fig, ax = plt.subplots()\n ax.set_title(\"Polarized intensity: I_up - I_down\")\n ax.set_xlabel(\"Time (microseconds)\")\n ax.set_ylabel('Intensity')\n \n np_time = numpy.array(self.time, dtype=float)\n np_up = numpy.array(self.intensity_up, dtype=float)\n np_sup = numpy.array(self.intensity_up_sigma, dtype=float)\n np_up_mod = numpy.array(self.intensity_up_total, dtype=float)\n np_down = numpy.array(self.intensity_down, dtype=float)\n np_sdown = numpy.array(self.intensity_down_sigma, dtype=float)\n np_down_mod = numpy.array(self.intensity_down_total, dtype=float)\n np_diff = np_up - np_down\n np_diff_mod = np_up_mod - np_down_mod\n np_sdiff = numpy.sqrt(numpy.square(np_sup)+numpy.square(np_sdown))\n\n ax.plot([np_time.min(), np_time.max()], [0., 0.], \"b:\")\n ax.plot(np_time, np_diff_mod, \"k-\",\n label=\"model\")\n ax.errorbar(np_time, np_diff, yerr=np_sdiff, fmt=\"ko\", alpha=0.2,\n label=\"experiment\")\n\n y_min_d, y_max_d = ax.get_ylim()\n param = y_min_d-(np_diff-np_diff_mod).max()\n\n ax.plot([np_time.min(), np_time.max()], [param, param], \"k:\")\n ax.plot(np_time, np_diff-np_diff_mod+param, \"r-\", alpha=0.7,\n label=\"difference\")\n ax.legend(loc='upper right')\n fig.tight_layout()\n return (fig, ax)", "def plot_insta_err(self, ax=None):\n if ax is None:\n ax = plt.gca()\n ax.set_yscale('log')\n ax.plot(list(range(1, self.max_dets)), self.errors[0], label='independent')\n ax.plot(list(range(1, self.max_dets)), self.errors[1], label='correlated')\n ax.set_ylabel(r'Stochastic error in $E_\\mathrm{corr}$ / ha')\n ax.set_xlabel('Number of determinants in estimator')\n ax.axhline(np.sqrt(self.proje_var[0]), linestyle='--', color='black', label='reference')\n ax.legend()\n return ax", "def plot_error_curve(ax,\n y_true,\n y_pred,\n uncert_meas,\n label,\n every_nth,\n window_size=10):\n error = (y_true[::every_nth] - y_pred[::every_nth])**2\n error, uncert_meas = np.array(error), np.array(uncert_meas[::every_nth])\n sorted_inds = generate_order(uncert_meas)\n sorted_error = error[sorted_inds]\n smooth_error = moving_average(sorted_error, window_size)\n ax.plot(np.arange(len(smooth_error)), smooth_error, label=label)", "def plot_error(self, maxstep=20):\n plt.ion()\n plt.xlabel(\"step\")\n plt.ylabel(\"Ave Logloss (bits)\")\n train_errors = []\n if self.dataset.test:\n test_errors = []\n for i in range(maxstep):\n self.learn(1)\n train_errors.append( sum(self.logloss(tple) for tple in self.dataset.train)\n /len(self.dataset.train))\n if self.dataset.test:\n test_errors.append( sum(self.logloss(tple) for tple in self.dataset.test)\n /len(self.dataset.test))\n plt.plot(range(1,maxstep+1),train_errors,\n label=str(self.num_classes)+\" classes. Training set\")\n if self.dataset.test:\n plt.plot(range(1,maxstep+1),test_errors,\n label=str(self.num_classes)+\" classes. Test set\")\n plt.legend()\n plt.draw()", "def plot_mte_soep(rslt, init_file, nbootstraps=250):\n # mte per year of university education\n mte = rslt[\"mte\"] / 5\n quantiles = rslt[\"quantiles\"]\n\n # bootstrap 90 percent confidence bands\n np.random.seed(6295)\n mte_boot = bootstrap(init_file, nbootstraps)\n\n # mte per year of university education\n mte_boot = mte_boot / 5\n\n # Get standard error of MTE at each gridpoint u_D\n mte_boot_std = np.std(mte_boot, axis=1)\n\n # Compute 90 percent confidence intervals\n con_u = mte + norm.ppf(0.95) * mte_boot_std\n con_d = mte - norm.ppf(0.95) * mte_boot_std\n\n # Plot\n ax = plt.figure(figsize=(17.5, 10)).add_subplot(111)\n\n ax.set_ylabel(r\"$MTE$\", fontsize=20)\n ax.set_xlabel(\"$u_D$\", fontsize=20)\n ax.tick_params(\n axis=\"both\", direction=\"in\", length=5, width=1, grid_alpha=0.25, labelsize=14\n )\n ax.xaxis.set_ticks_position(\"both\")\n ax.yaxis.set_ticks_position(\"both\")\n ax.xaxis.set_ticks(np.arange(0, 1.1, step=0.1))\n ax.yaxis.set_ticks(np.arange(-1.2, 1.2, step=0.2))\n\n ax.margins(x=0.005)\n ax.margins(y=0.05)\n\n # ax.set_ylim([-1.2, 1.2])\n ax.set_xlim([0, 1])\n\n ax.plot(quantiles, mte, label=\"$no migrants$\", color=\"orange\", linewidth=4)\n ax.plot(quantiles, con_u, color=\"orange\", linestyle=\":\")\n ax.plot(quantiles, con_d, color=\"orange\", linestyle=\":\")\n\n plt.show()\n\n return mte, quantiles", "def plot_errors(loss_train, loss_val, jet):\n plt.plot(list(range(len(loss_train))), loss_train, 'g', label='Training loss')\n plt.plot(list(range(len(loss_val))), loss_val, 'b', label='Validation loss')\n plt.title('Training and Validation loss for jet: {jet}'.format(jet=jet))\n plt.xlabel('Epochs')\n plt.ylabel('Loss')\n plt.legend()\n plt.show()", "def plot_pretty():\n\n ts, ys, lin_model, K, us, dt_control, biass, end_time = simulate()\n plt.style.use('seaborn-deep')\n\n black = '#2B2B2D'\n red = '#E90039'\n orange = '#FF1800'\n white = '#FFFFFF'\n yellow = '#FF9900'\n\n plt.figure(figsize=(12.8, 9.6))\n plt.rcParams.update({'font.size': 16, 'text.color': white, 'axes.labelcolor': white,\n 'axes.edgecolor': white, 'xtick.color': white, 'ytick.color': white})\n\n plt.gcf().set_facecolor(black)\n\n plt.subplot(2, 3, 1)\n plt.plot(ts, ys[:, 2], color=orange)\n plt.axhline(lin_model.yd2n(K.ysp)[1], color=white)\n plt.title(r'$C_{FA}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 2)\n plt.plot(ts, ys[:, 0], color=orange)\n plt.axhline(lin_model.yd2n(K.ysp)[0], color=white)\n plt.title(r'$C_{G}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 3)\n plt.plot(ts, ys[:, 3], color=orange)\n plt.title(r'$C_{E}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 4)\n plt.plot(ts, us[:, lin_model.inputs[1]], color=red)\n plt.title(r'$F_{m, in}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 5)\n plt.plot(ts, us[:, lin_model.inputs[0]], color=red)\n plt.title(r'$F_{G, in}$')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n plt.subplot(2, 3, 6)\n plt.plot(\n numpy.arange(dt_control, end_time, dt_control),\n biass[:, 1],\n color=red\n )\n plt.plot(\n numpy.arange(dt_control, end_time, dt_control),\n biass[:, 0],\n color=yellow\n )\n plt.legend([r'$C_{FA}$', r'$C_G$'], facecolor=black)\n plt.title('bias')\n plt.xlim([0, ts[-1]])\n plt.gca().set_facecolor(black)\n\n # plt.suptitle('Closedloop bioreactor without noise')\n plt.tight_layout(rect=[0, 0.03, 1, 0.95])\n plt.savefig('no_noise_pretty.png', transparent=True)\n plt.show()", "def plot_eeg(Data,start_sec = 0, window_size = 10, amp = 200, figure_size = (15,8),\n dpi=600, detrend = True, envelope=False, plot_bad = False, exclude = [], grid=True, \n xtickspace = 1,saveplot = None, subplot = None ,spines = ['left', 'bottom'],time_out = False, common_ref=False, **kwargs):\n #geting data from Data_dict\n data = Data.data\n time_vec = Data.time_vec\n sample_rate = Data.sample_rate\n ch_labels = Data.ch_labels\n if plot_bad:\n badch = np.array([],dtype=int) # a empty array \n else:\n badch = Data.bad_channels\n \n if type(exclude) == list:\n for item in exclude:\n if type(item) == str:\n idx = [i for i,x in enumerate(ch_labels) if x == item]\n badch = sorted(set(np.append(badch,idx)))\n elif type(item) == int:\n idx = item\n badch = sorted(set(np.append(badch,idx)))\n\n elif type(exclude) == str:\n idx = [i for i,x in enumerate(ch_labels) if x == exclude]\n badch = sorted(set(np.append(badch,idx)))\n elif type(exclude) == int:\n idx = exclude\n badch = sorted(set(np.append(badch,idx)))\n \n # Transforming the start_sec in points\n start_sec *= sample_rate\n start_sec = int(start_sec)\n # Transforming the window_size in points\n window_size *= sample_rate\n window_size = int(window_size)\n if subplot == None: \n # Creating the figure \n f = plt.figure(figsize=figure_size,dpi=dpi)\n # creating the axes\n sp = f.add_subplot(111)\n else:\n sp = subplot\n # creating a vector with the desired index\n time_window = np.arange(start_sec, start_sec + window_size)\n # declaring tick variables\n yticklocs = []\n yticklabel = [] \n ch_l = 1\n if len(data.shape) == 1:\n # in the axes, plot the raw signal for each channel with a amp diference \n if detrend:\n sp.plot(time_vec[time_window],(ch_l)*amp + sig.detrend(data[time_window]),**kwargs)\n else:\n sp.plot(time_vec[time_window],(ch_l)*amp + data[time_window],**kwargs)\n if envelope:\n sp.plot(time_vec[time_window],(ch_l)*amp + np.abs(sig.hilbert(data[time_window])),**kwargs)\n # appeng the channel label and the tick location\n if ch_labels is None:\n yticklabel.append(ch_l) \n else:\n yticklabel.append(ch_labels[0])\n yticklocs.append((ch_l)*amp)\n else:\n # Loop to plot each channel\n for ch in [x for x in range(data.shape[1]) if x not in badch]:\n # in the axes, plot the raw signal for each channel with a amp diference \n if detrend:\n sp.plot(time_vec[time_window],(ch_l)*amp + sig.detrend(data[time_window,ch]),**kwargs)\n else:\n sp.plot(time_vec[time_window],(ch_l)*amp + data[time_window,ch],**kwargs)\n if envelope:\n sp.plot(time_vec[time_window],(ch_l)*amp + np.abs(sig.hilbert(data[time_window,ch])),**kwargs)\n # appeng the channel label and the tick location\n if ch_labels is None:\n yticklabel.append(ch_l) \n else:\n yticklabel.append(ch_labels[ch])\n \n yticklocs.append((ch_l)*amp)\n ch_l += 1\n if common_ref: \n sp.plot(time_vec[time_window],(ch_l)*amp + Data.common_ref[time_window],**kwargs)\n yticklocs.append((ch_l)*amp)\n yticklabel.append('common_ref')\n\n\n adjust_spines(sp, spines)\n if len(spines) > 0:\n # changing the x-axis (label, limit and ticks)\n plt .xlabel('time (s)', size = 16)\n #xtickslocs = np.linspace(int(time_vec[time_window[0]]),int(time_vec[time_window[-1]]),int(window_size/(sample_rate*xtickspace)),endpoint=True)\n \n xtickslocs = np.arange(math.ceil(time_vec[time_window[0]]),math.ceil(time_vec[time_window[-1]]+xtickspace),xtickspace) \n xtickslabels = ['']*len(xtickslocs)\n for x in np.arange(0,len(xtickslocs),10):\n xtickslabels[x] = xtickslocs[x]\n plt.xticks(xtickslocs,xtickslabels,size = 16)\n # changing the y-axis\n plt.yticks(yticklocs, yticklabel, size=16)\n \n if grid: \n ax = plt.gca()\n ax.xaxis.grid(True)\n \n sp.set_xlim(time_vec[time_window[0]],time_vec[time_window[-1]]+np.diff(time_vec[time_window[0:2]]))\n #sp.set_ylim(0,(ch_l)*amp)\n if time_out:\n return time_vec[time_window[0]], time_vec[time_window[-1]]+np.diff(time_vec[time_window[0:2]]), sp\n if saveplot != None:\n if type(saveplot) == str: \n plt.savefig(saveplot, bbox_inches='tight')\n else:\n raise Exception('saveplot should be a string')", "def plot_msd(msd, h_exp):\n fig, ax = plt.subplots(1, 2, figsize = (10, 10))\n av_msd = np.mean(msd, axis = 0)\n\n for p in np.arange(0, msd.shape[0], step = 1):\n for t in np.arange(0, msd.shape[1], step = 1): \n ax[0].plot(t, msd[p, t], 'bx')\n ax[1].plot(t, av_msd[t], 'ro')\n ax[0].set_xlabel('Time lag (number of steps)')\n ax[0].set_ylabel('MSD (pix^2)')\n ax[0].set_title('Individual TAMSDs: H = ' + str(h_exp))\n ax[1].set_xlabel('Time lag (number of steps)')\n ax[1].set_ylabel('MSD (pix^2)')\n ax[1].set_title('Averaged TAMSDs: H = ' + str(h_exp)) \n ax[0].set_xlim([0, np.max(t)])\n ax[1].set_xlim([0, np.max(t)])\n ax[0].set_ylim([0, np.max(msd)]) \n ax[1].set_ylim([0, np.max(av_msd)])", "def plot_mean_econ_loss(fc, sc, economic_loss_array):\n\n fig = plt.figure(figsize=(9, 5), facecolor='white')\n sns.set(style='ticks', palette='Set3')\n # ax = sns.boxplot(economic_loss_array*100, showmeans=True,\n # widths=0.3, linewidth=0.7, color='lightgrey',\n # meanprops=dict(marker='s',\n # markeredgecolor='salmon',\n # markerfacecolor='salmon')\n # )\n ax = sns.boxplot(economic_loss_array * 100, showmeans=True,\n linewidth=0.7, color='lightgrey',\n meanprops=dict(marker='s',\n markeredgecolor='salmon',\n markerfacecolor='salmon')\n )\n sns.despine(top=True, left=True, right=True)\n ax.tick_params(axis='y', left='off', right='off')\n ax.yaxis.grid(True)\n\n intensity_label = sc.intensity_measure_param+' ('\\\n +sc.intensity_measure_unit+')'\n ax.set_xlabel(intensity_label)\n ax.set_ylabel('Loss Fraction (%)')\n ax.set_xticklabels(sc.hazard_intensity_vals);\n ax.set_title('Loss Ratio', loc='center', y=1.04);\n ax.title.set_fontsize(12)\n\n figfile = os.path.join(sc.output_path, 'fig_lossratio_boxplot.png')\n plt.savefig(figfile, format='png', bbox_inches='tight', dpi=300)\n plt.close(fig)", "def plotPredictedError():\n\tglobal normalized\n\n\twarmthPred = []\n\twarmthObserved = []\n\tcompPred = []\n\tcompObserved = []\n\tSStotalWarmth = 0\n\tSSresWarmth = 0\n\tSStotalComp = 0\n\tSSresComp = 0\n\tkeys = parser.getMappings(normalized)[0].keys()\n\tfor key in keys:\n\n\t\tif \"_\" in key:\n\t\t\twarmthAxis, compAxis = getPlotData(key)\n\t\t\twarmthPred.append(warmthAxis[3])\n\t\t\twarmthObserved.append(warmthAxis[2])\n\t\t\tcompPred.append(compAxis[3])\n\t\t\tcompObserved.append(compAxis[2])\n\n\tmeanObservedWarmth = np.mean(warmthObserved)\n\tmeanObservedComp = np.mean(compObserved)\n\tfor i in range(0, len(warmthObserved)):\n\t\tSStotalWarmth += (warmthObserved[i] - meanObservedWarmth)**2\n\t\tSSresWarmth += (warmthObserved[i] - warmthPred[i])**2\n\t\tSStotalComp += (compObserved[i] - meanObservedComp)**2\n\t\tSSresComp += (compObserved[i] - compPred[i])**2\n\n\n\tplt.axis([0, 100, 0, 100])\n\tfig = plt.figure(1)\n\tax = fig.add_subplot(111)\n\tslope, intercept, r_value, p_value, std_err = stats.linregress(warmthObserved, warmthPred)\n\tprint(r_value**2)\n\ttext = ax.text(60, 20, \"R^2 value: \" + str(r_value**2) , \\\n fontsize = 12, color = 'black')\n\tplt.title(\"Observed vs Predicted Warmth\")\n\tplt.ylabel(\"Predicted Value\")\n\tplt.xlabel(\"Observed Value\")\n\tplt.scatter(warmthObserved, warmthPred)\n\tplt.plot([0,100], [0,100])\n\tplt.show()\n\n\tfig = plt.figure(1)\n\tax = fig.add_subplot(111)\n\tslope, intercept, r_value, p_value, std_err = stats.linregress(compObserved, compPred)\n\tprint(r_value**2)\n\ttext = ax.text(60, 20, \"R^2 value: \" + str(r_value**2) , \\\n fontsize = 12, color = 'black')\n\tplt.axis([0, 100, 0, 100])\n\tplt.title(\"Observed vs Predicted Competence\")\n\tplt.ylabel(\"Predicted Value\")\n\tplt.xlabel(\"Observed Value\")\n\tplt.scatter(compObserved, compPred)\n\tplt.plot([0,100], [0,100])\n\tplt.show()", "def plot_unit_rate_change(cat_table, title='Unit FR Change', norm_dmso = False, dmso_table = pd.DataFrame([]), **plot_kwargs):\n unit_freq_mean_base = cat_table.query('time < 0 and time > -0.125').groupby(('exp','unit_name'))['spike_freq'].mean() \n unit_freq_mean_end = cat_table.query('time < 1.25 and time > 1.125').groupby(('exp','unit_name'))['spike_freq'].mean()\n \n \n\n for e in cat_table['exp'].unique():\n for unit in cat_table.query('exp == @e')['unit_name'].unique(): \n # if unit_freq_mean_base.loc[e,unit] > 0 and unit_freq_mean_end.loc[e,unit] > 0:\n plt.plot(unit_freq_mean_base.loc[e,unit], unit_freq_mean_end.loc[e,unit], '.', **plot_kwargs)\n \n max_base = max(unit_freq_mean_base)\n max_end = max(unit_freq_mean_end)\n \n plt.plot([0.001,np.ceil(max(max_base,max_end))],[0.001,np.ceil(max(max_base,max_end))],'k')\n\n plt.yscale('log')\n plt.xscale('log')\n plt.xlim([0.001,np.ceil(max(max_base,max_end))])\n plt.ylim([0.001,np.ceil(max(max_base,max_end))])\n plt.axis('equal')\n \n \n plt.ylabel('End mean firing rate (Hz)')\n plt.xlabel('Baseline mean firing rate (Hz)')\n plt.title(title)\n return", "def visualize_tma_time_series(data_path):\n\n X, y = load_tma_data(data_path)\n\n fig = plt.figure()\n ax = fig.add_subplot('111')\n\n for i in range(X.shape[0]):\n C = X[i, ...].reshape(X.shape[1], X.shape[2])\n l = y[i]\n ax.imshow(C, vmin=0, vmax=1)\n ax.set_title('Label : %i' % l)\n plt.pause(0.1)\n\n # labels = np.unique(y)\n # fig, axes = plt.subplots(figsize=(13, 4), ncols=4)\n # for i, l in enumerate(labels, start=0):\n # idx = np.where(y == l)[0]\n # temp = np.mean(X[idx, ...], axis=0)\n # temp[:8, :] = temp[:8, :]*6\n # pos = axes[i].imshow(temp, vmin=0, vmax=1)\n # axes[i].set_title(\"Label : %i\" % l)\n # fig.colorbar(pos, ax=axes[i])\n # plt.show()", "def trysavgol(window, order, data, xaxis):\r\n for datum in data:\r\n filt = scipy.signal.savgol_filter(datum,window,order)\r\n f = makeplot(xaxis, [datum, filt],[\"data\",\"filter\"], \"Temperature / $[J/k_B]$\",\\\r\n \"Heat Capacity / $[k_B]$\", plainlines = True)\r\n f.show()\r\n f.savefig(\"savgol.svg\")\r\n Tc = np.mean(xaxis[filt == max(filt)])\r\n print(\"The predicted value of Tc is: \"+str(Tc))", "def traces(mndata,Params,srate,imagepath):\n\t#plot high gamma traces\n\t#data should be bandpassed (todo)\n\t#resample to srate\n\tst = resample(Params[\"st\"],srate)\n\ten = resample(Params[\"en\"],srate)\n\tbl_en = resample(Params[\"bl_en\"],srate)\n\tbl_st = resample(Params[\"bl_st\"],srate)\n\tplot_tp = resample(Params[\"plot\"],srate)\n\tcue = resample(500,srate)\n\t\n\tcolors = ['red','orange','green','blue']\n\tx = np.array(range(st,en+1))\n\tf, (ax,ax2) = plt.subplots(1,2, sharex = False)\n\tax.axhline(y = 0,color = 'k',linewidth=2)\n\tax.axvline(x = 0,color='k',linewidth=2)\n\tax.axvline(x = cue,color = 'gray',linewidth = 2)\n\tax.axvline(x = cue+cue,color = 'gray',linewidth = 2)\n\tax.axvspan(cue, cue+cue, facecolor='0.5', alpha=0.25,label = 'cue')\n\n\tfor j in range(len(Params[\"conditions\"])):\n\t\tcondition = Params['conditions'][j]\n\t\ty = mndata[condition]['data']\n\t\tax.plot(x,y, label = condition,linewidth = 2,color = colors[j])\n\t\n\tax.set_ylim((-30,85))\n\tax.set_xlim(st,en)\n\tax.legend()\n\tax.xaxis.set_ticklabels(['', '0', '','500', '', '1000', '', '1500', '', '2000','','2500','', '3000'],minor=False)\n\tax.xaxis.set_ticks(range(st,en,plot_tp))\n\n\tax.set_xlabel(\"time (ms)\")\n\tax.set_ylabel(\"% change baseline\")\n\tax.set_title('Analytic Amplitude - High Gamma (70-150Hz)', fontsize = 18)\n\n\t#plot brain with elec location\n\t#brain = plt.imread(imagepath)\n\t#aa = pylab.mean(brain,2)\n\t#ax2.imshow(aa)\n\t#a2.gray()\n\n\t#brain = Image.open(imagepath)\n\t#ax2.set_axis_off()\n\t#im = plt.imshow(brain, origin = 'lower')\n\n\t#brain = _png.read_png(imagepath)\n\t#imagebox = OffsetImage(brain,zoom =5)\n\t#ab = AnnotationBbox(imagebox,)\n\n\tim = Image.open(imagepath)\n\tax2.imshow(im,aspect = 'auto',origin = 'lower')\n\tax2.set_xlim((0,750))\n\tax2.set_title('Electrode Location',fontsize = 18)\n\n\n\n\treturn f, (ax, ax2)", "def plot_series(groups, series):\n fig, ax = plt.subplots()\n ax.set_xlabel(\"Iterations\")\n ax.set_ylabel(series)\n\n for gkey, gval in groups.items():\n args = dict(gkey)\n\n series_values = get_series(gval, series)\n interval_size = args['test_interval']\n interval_count = series_values.shape[1] - 1\n\n x = np.arange(0, interval_size * interval_count + 1, step=interval_size)\n mean = np.mean(series_values, axis=0)\n std = np.std(series_values, axis=0)\n\n ax.plot(x, mean, label=format_group_key(gkey))\n ax.fill_between(x, mean + std, mean - std, alpha=0.2)\n\n ax.legend()\n return fig, ax", "def plot_tseries(*args, **kwargs) :\n data = kwargs.pop('data')\n return data.dropna().plot(x=args[0], y=args[1], **kwargs)", "def EDA(data):\n\t# mean value curve\n\n\tfig,axs = plt.subplots(5,1, sharey='all')\n\tfig.set_size_inches(10, 15)\n\tdata.groupby('weather').mean().plot(y='count', marker='o', ax=axs[0])\n\tdata.groupby('humidity').mean().plot(y='count', marker='o', ax=axs[1])\n\tdata.groupby('temp').mean().plot(y='count', marker='o', ax=axs[2])\n\tdata.groupby('windspeed').mean().plot(y='count', marker='o', ax=axs[3])\n\tprint('\\n')\n\tdata.groupby('hour').mean().plot(y='count', marker='o', ax=axs[4])\n\tplt.title('mean count per hour')\n\tplt.tight_layout()\n\tplt.show()\n\n\t# grouping scatter\n\tfig,axs = plt.subplots(2,3, sharey='all')\n\tfig.set_size_inches(12, 8)\n\tdata.plot(x='temp', y='count', kind='scatter', ax=axs[0,0], color='magenta')\n\tdata.plot(x='humidity', y='count', kind='scatter', ax=axs[0,1], color='bisque')\n\tdata.plot(x='windspeed', y='count', kind='scatter', ax=axs[0,2], color='coral')\n\tdata.plot(x='month', y='count', kind='scatter', ax=axs[1,0], color='darkblue')\n\tdata.plot(x='day', y='count', kind='scatter', ax=axs[1,1], color='cyan')\n\tdata.plot(x='hour', y='count', kind='scatter', ax=axs[1,2], color='deeppink')\n\tplt.tight_layout()\n\tplt.show()\n\n\t# correlation analysis\n\tcorrMatt = data[[\"temp\",\"atemp\",\"casual\",\"registered\",\"humidity\",\"windspeed\",\"count\"]].corr()\n\tmask = np.array(corrMatt)\n\tmask[np.tril_indices_from(mask)] = False\n\n\tfig,ax= plt.subplots()\n\tfig.set_size_inches(20,10)\n\tsn.heatmap(corrMatt, mask=mask, vmax=.8, square=True, annot=True, cmap=\"Greens\")\n\tplt.show()", "def regression_plot():\n\n df = pd.read_csv('tempYearly.csv')\n\n sns_plot = sns.regplot(x='Rainfall', y='Temperature', data=df) # construct regression line\n\n image = io.BytesIO()\n\n sns_plot.figure.savefig(image, format = 'png')\n\n image.seek(0) # reset position of image to 0\n\n return image", "def plot_bp_exptimes(self, plot_spectrum = True, title = None, ylims = (1.0, 1e7),\n cc = [\"C0\", \"C2\", \"C3\"], iremove = []):\n\n # Reshape exposure times\n tmp = self.tpbpcs_rect.T\n\n # Calculate clean spectrum\n output = self.complete_spectrum_time()\n spectrum = output[2]\n\n fig, ax2 = plt.subplots(figsize = (16,5))\n\n if title is not None:\n ax2.set_title(title)\n\n icount = 0\n for ichan in range(len(CHANNELS)):\n\n data = []\n positions = []\n widths = []\n\n for j in range(len(self.bp_names[self.bp_chan == ichan])):\n\n nanmask = np.isfinite(tmp[icount,:])\n\n data.append(tmp[icount,nanmask])\n positions.append(np.mean(spectrum[0][icount]))\n widths.append(spectrum[0][icount][-1] - spectrum[0][icount][0] + np.mean(spectrum[1][icount][:]))\n color1 = cc[ichan]\n\n comp_str = \"$%i \\%%$\" %(100.*self.frac_bias_bp[icount])\n comp_str2 = \"$\\mathbf{%i {\\%%}}$\" %(100.*self.frac_bias_bp[icount])\n comp_str3 = \"$\\mathbf{%i}$\" %(100.*self.frac_bias_bp[icount])\n #ax2.text(positions[j], np.median(tmp[icount,:]) + 5.*np.std(tmp[icount,:]), comp_str2,\n # ha = \"center\", va = \"top\", fontsize = 12, color = \"w\")\n q_l, q_50, q_h, q_m, q_p = nsig_intervals(tmp[icount,nanmask], intvls=[0.25, 0.5, 0.75])\n #ax2.text(positions[j], ylims[1], comp_str2,\n # ha = \"center\", va = \"top\", color = color1, fontsize = 12)\n ax2.text(positions[j], q_50 + q_p, comp_str3,\n ha = \"center\", va = \"bottom\", color = color1)\n\n #ax2.plot(self.bandpasses[icount], [q_50, q_50], color = color1, zorder = 120, ls = \"dashed\")\n\n icount += 1\n\n positions = np.array(positions)\n widths = np.array(widths)\n bp1 = ax2.boxplot(data, sym = '', widths = widths, showfliers = False,\n boxprops = {\"color\" : color1, \"alpha\" : 0.5},\n whiskerprops = {\"color\" : color1, \"linewidth\" : 2.0},\n capprops = {\"color\" : color1, \"linewidth\" : 0.0},\n medianprops = {\"color\" : \"w\", \"linewidth\" : 2.0},\n patch_artist=True, positions = positions, whis = [5, 95]);\n\n for patch in bp1['boxes']:\n patch.set_facecolor(color1)\n\n if plot_spectrum:\n\n ax = ax2.twinx()\n ax2.set_zorder(100)\n ax2.patch.set_visible(False)\n\n ax.set_xlabel(\"Wavelength [$\\mu$m]\")\n ax.set_ylabel(r\"Planet-Star Flux Ratio ($\\times 10^{-10}$)\", rotation = 270, labelpad = 25)\n for i in range(len(self.bp_names)):\n if i not in iremove:\n pass\n #ax.plot(spectrum[0][i], 1e10*spectrum[3][i], \"o\", ms = 4.0, alpha = 0.65, color = \"w\", zorder = 80)\n #ax.errorbar(spectrum[0][i], 1e10*spectrum[3][i], yerr=1e10*spectrum[4][i], fmt = \"o\", ms = 2.0, alpha = 0.65, color = \"k\", zorder = 80)\n #ax.axvspan(drmA.bandpasses[i][0], drmA.bandpasses[i][1], alpha = 0.2, color = cc[drmA.bp_chan[i]])\n\n self.cn.telescope.lammin = 0.2\n self.cn.telescope.lammax = 2.0\n self.cn.telescope.resolution = 140.\n # Re-do count rate calcs for true Earth spectrum\n self.cn.run_count_rates(self.AHR, self.LAMHR, self.FSTAR)\n l1, = ax.plot(self.cn.lam, 1e10*self.cn.Cratio, color = \"purple\", zorder = 0, lw = 4.0, alpha = 1.)\n l2, = ax.plot(self.cn.lam, 1e10*self.cn.Cratio, color = \"w\", zorder = 0, lw = 2.0, alpha = 0.65)\n ax.set_ylim(bottom=0.0)\n ax.legend([(l1, l2)], [(\"Modern Earth\")], framealpha = 0.0)\n\n # Label Molecules\n ax.text(0.27, 1.55, \"O$_3$\", ha = \"center\", va = \"center\")\n ax.text(0.6, 1.25, \"O$_3$\", ha = \"center\", va = \"center\")\n ax.text(0.68, 1.35, \"O$_2$\", ha = \"center\", va = \"center\")\n ax.text(0.76, 1.45, \"O$_2$\", ha = \"center\", va = \"center\")\n ax.text(0.96, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.15, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.4, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.9, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.6, 1.25, \"CO$_2$\", ha = \"center\", va = \"center\")\n\n ax2.set_ylabel(\"Science Time [hrs]\")\n #ax2.set_title(r\"All %i targets (S/N$\\approx$%i)\" %(Ndraw, wantSNR))\n ax2.set_yscale(\"log\")\n\n ax2.set_xlabel(\"Wavelength [$\\mu$m]\")\n ax2.set_ylim(bottom = ylims[0], top = ylims[1])\n\n ax2.set_xticks([0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0])\n ax2.set_xticklabels([\"$0.2$\", \"$0.4$\", \"$0.6$\", \"$0.8$\", \"$1.0$\", \"$1.2$\", \"$1.4$\", \"$1.6$\", \"$1.8$\", \"$2.0$\"])\n ax2.set_xlim(0.1, 2.1)\n #ax2.set_xlim(0.4, 1.0)\n\n #fig.savefig(\"/Users/Jake/Dropbox/Astronomy/UW/Astrobio/Research Rotation/LUVOIR/figures/drm_bp10_science_time_%s.pdf\" %drm.architecture, bbox_inches = \"tight\")\n\n return fig", "def LSPmetrics(phen, xnew, nGS, num, phentype):\n inds = np.isnan(phen) # check if array has NaN values\n if inds.any(): # check is all values are NaN\n return np.repeat(np.nan, num)\n else:\n try:\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n\n # basic variables\n vpos = np.max(phen)\n ipos = np.where(phen == vpos)[0]\n pos = xnew[ipos]\n trough = np.min(phen)\n ampl = vpos - trough\n\n # get position of seasonal peak and trough\n ipos = np.where(phen == vpos)[0]\n\n # scale annual time series to 0-1\n ratio = (phen - trough) / ampl\n\n # separate greening from senesence values\n dev = np.gradient(ratio) # first derivative\n greenup = np.zeros([ratio.shape[0]], dtype=bool)\n greenup[dev > 0] = True\n\n # select time where SOS and EOS are located (arround trs value)\n # KneeLocator looks for the inflection index in the curve\n try:\n with warnings.catch_warnings():\n # estimate SOS and EOS as median of the season\n i = np.median(xnew[:ipos[0]][greenup[:ipos[0]]])\n ii = np.median(xnew[ipos[0]:][~greenup[ipos[0]:]])\n sos = xnew[(np.abs(xnew - i)).argmin()]\n eos = xnew[(np.abs(xnew - ii)).argmin()]\n isos = np.where(xnew == int(sos))[0]\n ieos = np.where(xnew == eos)[0]\n if sos is None:\n isos = 0\n sos = xnew[isos]\n if eos is None:\n ieos = len(xnew) - 1\n eos = xnew[ieos]\n except ValueError:\n sos = np.nan\n isos = np.nan\n eos = np.nan\n ieos = np.nan\n except TypeError:\n sos = np.nan\n isos = np.nan\n eos = np.nan\n ieos = np.nan\n\n # los: length of season\n try:\n los = eos - sos\n if los < 0:\n los[los < 0] = len(phen) + \\\n (eos[los < 0] - sos[los < 0])\n except ValueError:\n los = np.nan\n except TypeError:\n los = np.nan\n\n # get MSP, MAU (independent from SOS and EOS)\n # mean spring\n try:\n idx = np.mean(xnew[(xnew > sos) & (xnew < pos[0])])\n idx = (np.abs(xnew - idx)).argmin() # indexing value\n msp = xnew[idx] # DOY of MGS\n vmsp = phen[idx] # mgs value\n\n except ValueError:\n msp = np.nan\n vmsp = np.nan\n except TypeError:\n msp = np.nan\n vmsp = np.nan\n # mean autum\n try:\n idx = np.mean(xnew[(xnew < eos) & (xnew > pos[0])])\n idx = (np.abs(xnew - idx)).argmin() # indexing value\n mau = xnew[idx] # DOY of MGS\n vmau = phen[idx] # mgs value\n\n except ValueError:\n mau = np.nan\n vmau = np.nan\n except TypeError:\n mau = np.nan\n vmau = np.nan\n\n # doy of growing season\n try:\n green = xnew[(xnew > sos) & (xnew < eos)]\n id = []\n for i in range(len(green)):\n id.append((xnew == green[i]).nonzero()[0])\n # index of growing season\n id = np.array([item for sublist in id for item in sublist])\n except ValueError:\n id = np.nan\n except TypeError:\n id = np.nan\n\n # get intergral of green season\n try:\n ios = trapz(phen[id], xnew[id])\n except ValueError:\n ios = np.nan\n except TypeError:\n ios = np.nan\n\n # rate of greening [slope SOS-POS]\n try:\n rog = (vpos - phen[isos]) / (pos - sos)\n except ValueError:\n rog = np.nan\n except TypeError:\n rog = np.nan\n\n # rate of senescence [slope POS-EOS]\n try:\n ros = (phen[ieos] - vpos) / (eos - pos)\n except ValueError:\n ros = np.nan\n except TypeError:\n ros = np.nan\n\n # skewness of growing season\n try:\n sw = skew(phen[id])\n except ValueError:\n sw = np.nan\n except TypeError:\n sw = np.nan\n\n metrics = np.array((sos, pos[0], eos, phen[isos][0], vpos,\n phen[ieos][0], los, msp, mau, vmsp, vmau, ampl, ios, rog[0],\n ros[0], sw))\n\n return metrics\n\n except IndexError:\n return np.repeat(np.nan, num)\n except ValueError:\n return np.repeat(np.nan, num)\n except TypeError:\n return np.repeat(np.nan, num)", "def LSPmetrics(phen, xnew, nGS, num, phentype):\n inds = np.isnan(phen) # check if array has NaN values\n if inds.any(): # check is all values are NaN\n return np.repeat(np.nan, num)\n else:\n try:\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n\n # basic variables\n vpos = np.max(phen)\n ipos = np.where(phen == vpos)[0]\n pos = xnew[ipos]\n trough = np.min(phen)\n ampl = vpos - trough\n\n # get position of seasonal peak and trough\n ipos = np.where(phen == vpos)[0]\n\n # scale annual time series to 0-1\n ratio = (phen - trough) / ampl\n\n # separate greening from senesence values\n dev = np.gradient(ratio) # first derivative\n greenup = np.zeros([ratio.shape[0]], dtype=bool)\n greenup[dev > 0] = True\n\n # select time where SOS and EOS are located (arround trs value)\n # KneeLocator looks for the inflection index in the curve\n try:\n with warnings.catch_warnings():\n # estimate SOS and EOS as median of the season\n i = np.median(xnew[:ipos[0]][greenup[:ipos[0]]])\n ii = np.median(xnew[ipos[0]:][~greenup[ipos[0]:]])\n sos = xnew[(np.abs(xnew - i)).argmin()]\n eos = xnew[(np.abs(xnew - ii)).argmin()]\n isos = np.where(xnew == int(sos))[0]\n ieos = np.where(xnew == eos)[0]\n if sos is None:\n isos = 0\n sos = xnew[isos]\n if eos is None:\n ieos = len(xnew) - 1\n eos = xnew[ieos]\n except ValueError:\n sos = np.nan\n isos = np.nan\n eos = np.nan\n ieos = np.nan\n except TypeError:\n sos = np.nan\n isos = np.nan\n eos = np.nan\n ieos = np.nan\n\n # los: length of season\n try:\n los = eos - sos\n if los < 0:\n los[los < 0] = len(phen) + \\\n (eos[los < 0] - sos[los < 0])\n except ValueError:\n los = np.nan\n except TypeError:\n los = np.nan\n\n # get MSP, MAU (independent from SOS and EOS)\n # mean spring\n try:\n idx = np.mean(xnew[(xnew > sos) & (xnew < pos[0])])\n idx = (np.abs(xnew - idx)).argmin() # indexing value\n msp = xnew[idx] # DOY of MGS\n vmsp = phen[idx] # mgs value\n\n except ValueError:\n msp = np.nan\n vmsp = np.nan\n except TypeError:\n msp = np.nan\n vmsp = np.nan\n # mean autum\n try:\n idx = np.mean(xnew[(xnew < eos) & (xnew > pos[0])])\n idx = (np.abs(xnew - idx)).argmin() # indexing value\n mau = xnew[idx] # DOY of MGS\n vmau = phen[idx] # mgs value\n\n except ValueError:\n mau = np.nan\n vmau = np.nan\n except TypeError:\n mau = np.nan\n vmau = np.nan\n\n # doy of growing season\n try:\n green = xnew[(xnew > sos) & (xnew < eos)]\n id = []\n for i in range(len(green)):\n id.append((xnew == green[i]).nonzero()[0])\n # index of growing season\n id = np.array([item for sublist in id for item in sublist])\n except ValueError:\n id = np.nan\n except TypeError:\n id = np.nan\n\n # get intergral of green season\n try:\n ios = trapz(phen[id], xnew[id])\n except ValueError:\n ios = np.nan\n except TypeError:\n ios = np.nan\n\n # rate of greening [slope SOS-POS]\n try:\n rog = (vpos - phen[isos]) / (pos - sos)\n except ValueError:\n rog = np.nan\n except TypeError:\n rog = np.nan\n\n # rate of senescence [slope POS-EOS]\n try:\n ros = (phen[ieos] - vpos) / (eos - pos)\n except ValueError:\n ros = np.nan\n except TypeError:\n ros = np.nan\n\n # skewness of growing season\n try:\n sw = skew(phen[id])\n except ValueError:\n sw = np.nan\n except TypeError:\n sw = np.nan\n\n metrics = np.array((sos, pos[0], eos, phen[isos][0], vpos,\n phen[ieos][0], los, msp, mau, vmsp, vmau, ampl, ios, rog[0],\n ros[0], sw))\n\n return metrics\n\n except IndexError:\n return np.repeat(np.nan, num)\n except ValueError:\n return np.repeat(np.nan, num)\n except TypeError:\n return np.repeat(np.nan, num)", "def graph_MAE(history,title): \n _, ax = plt.subplots()\n ax.set_title(title)\n try:\n ax.plot(history.history['loss'], label='Train')\n ax.plot(history.history['val_loss'], label='Test')\n except:\n ax.plot(history['loss'], label='Train')\n ax.plot(history['val_loss'], label='Test')\n ax.set_xlabel(\"Epochs\")\n ax.set_ylabel(\"Mean Absolute Error\")\n ax.legend()", "def add_bollinger_bands(self, rstd):\n self.data['upper_band'] = self.data['rolling_mean'] + 2 * rstd\n self.data['lower_band'] = self.data['rolling_mean'] - 2 * rstd", "def app_SN_animated_series_plot(self):\n print('this option is yet to be implemented')", "def plot_error_over_time(errors: MetricsContainer, **kwargs: Any) -> mpl.figure.Figure:\n\n if 'width' not in kwargs:\n kwargs['width'] = 2 * DEFAULT_WIDTH\n if 'height' not in kwargs:\n kwargs['height'] = 2 * DEFAULT_HEIGHT\n\n fig, (ax1, ax2) = _new_subplots(2, 1, **kwargs)\n\n data = [[e.translation.kitti, np.rad2deg(e.rotation.kitti)] for e in errors]\n df = pd.DataFrame(data, columns=['t_err', 'r_err'])\n\n ax1.plot(df['t_err'])\n ax1.set_title('Translation Error')\n ax1.set_ylabel('e_t')\n\n ax2.plot(np.rad2deg(df['r_err']))\n ax2.set_title('Rotation Error')\n ax2.set_ylabel('e_r [deg]')\n\n return fig", "def plot_seaice_timeseries(anomlous = False, temporal_resolution = 'monthly', spatial_resolution = 1, detrend = False, imagefolder = 'images/timeseries/SIC/',seaice_source='nsidc'):\n output_folder = 'processed_data/SIC/'\n if seaice_source == 'ecmwf':\n output_folder = 'processed_data/ERA5/SIC/'\n\n if anomlous:\n temp_decomp = 'anomalous'\n else:\n temp_decomp = 'raw'\n\n\n title = temp_decomp.capitalize() + ' '\n\n if detrend:\n dt = 'detrended'\n title += dt + ' '\n else:\n dt = 'raw'\n\n title += temporal_resolution\n title += ' mean SIC in Antarctica'\n\n\n seaicename = f'{temp_decomp}_{temporal_resolution}_{spatial_resolution}_{dt}'\n seaice = xr.open_dataset(output_folder + seaicename +'.nc')\n\n if seaice_source == 'nsidc':\n seaice = seaice\n mean_seaice = seaice_area_mean(seaice[seaicename],1)\n seaice_m, seaice_b, seaice_r_value, seaice_p_value, seaice_std_err = scipy.stats.linregress(mean_seaice.time.values.astype(float), mean_seaice)\n if seaice_source =='ecmwf':\n seaice_m, seaice_b, seaice_r_value, seaice_p_value, seaice_std_err = scipy.stats.linregress(seaice[seaicename].time.values.astype(float), seaice[seaicename].sum(dim = ('longitude', 'latitude')))\n ax = plt.gca()\n if anomlous or detrend: ax.axhline(0, alpha = 0.5)\n if seaice_source == 'nsidc':\n mean_seaice = seaice_area_mean(seaice[seaicename],1)\n plt.plot(seaice.time, mean_seaice)\n\n if seaice_source == 'ecmwf':\n plt.plot(seaice.time, seaice[seaicename].mean(dim = ('longitude', 'latitude')))\n plt.plot(seaice.time, (seaice_m * seaice.time.values.astype(float) + seaice_b), color = '#177E89')\n plt.title(title)\n plt.savefig(imagefolder + seaicename+f'_{seaice_source}.pdf')\n plt.show()", "def plot_sirs(SIRs,Ts, figsize=(10,10), facecolor='LightGrey', lw=2, ylim=0.4):\n\n def set_ax(ax):\n ax.set_facecolor(facecolor)\n ax.set_xlabel('Time /days')\n ax.set_ylabel('Fraction of population')\n ax.yaxis.set_tick_params(length=0)\n ax.xaxis.set_tick_params(length=0)\n ax.grid(b=True, which='major', c='w', lw=2, ls='-')\n legend = ax.legend()\n legend.get_frame().set_alpha(0.5)\n for spine in ('top', 'right', 'bottom', 'left'):\n ax.spines[spine].set_visible(False)\n\n\n fig = plt.figure(figsize=figsize)\n\n ax = plt.subplot(2,1,1, axisbelow=True)\n for i, sir in enumerate(SIRs):\n ax.plot(sir.t, sir.I/sir.N, alpha=0.5, lw=lw, label=Ts[i])\n set_ax(ax)\n ax.set_ylim(0,ylim)\n\n ax = plt.subplot(2,1,2, axisbelow=True)\n for i, sir in enumerate(SIRs):\n ax.plot(sir.t, sir.R/sir.N, alpha=0.5, lw=lw, label=Ts[i])\n set_ax(ax)\n ax.set_ylim(0,1.1)\n\n plt.tight_layout()\n plt.show()", "def error_dropoff(data):\n # 2009-05-05 08:58 IJC: Adapted to Python from Matlab.\n # 2006/06/06 IJC: Made it work with arrays of column vectors.\n # Added the '--nomean' option.\n \n \n# PARSE INPUTS\n data = array(data).copy()\n \n#interval = max([1 round(extract_from_options('--interval=', 1, options))]);\n \n \n if len(data)==len(data.ravel()):\n data = data.ravel()\n data = data.reshape(len(data),1)\n \n nsets = data.shape[1]\n npts_vec = arange(data.shape[0]/2)+1.0\n errors = zeros((data.shape[0]/2, nsets))\n \n# LOOP AND CALCULATE STUFF\n for ii in range(len(npts_vec)):\n npts = npts_vec[ii] # number of points we average over\n nsamp = floor(data.shape[0]/npts) # number of subsamples\n dre = reshape(data[0:nsamp*npts,:], (npts, nsamp, nsets))\n error_values = std(dre.mean(1))\n errors[ii,:] = error_values\n \n return (errors, npts_vec)", "def add_line_sSFR_obs(line,L_line,ax,**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n # --- Observations compiled in Observations.ipynb ---\n\n L_obs = np.array([])\n sSFR_obs = np.array([])\n\n if p.plot: print('\\nObserved galaxies with %s:' % line)\n\n # Cormier et al. 2015 and Madden et al. 2013\n df = pd.read_pickle('data/observations/DGS_Cormier_2015') \n try:\n df = pd.read_pickle('data/observations/DGS_Cormier_2015')\n if p.plot: \n ax.errorbar(10.**df.sSFR,df['L_'+line],yerr=df['e_'+line], elinewidth=1,marker='s',ms=5,mew=0,\\\n color='grey',alpha=0.8,lw=0)\n # ax.plot(10.**df.sSFR,df['L_'+line],'s',ms=5,mew=0,color='grey',alpha=0.8,label='Cormier+15 (dwarfs)')\n L_ul = df['L_'+line][df['L_'+line] < 0]\n if len(L_ul) > 0:\n ax.plot(10.**df.sSFR[df['L_'+line] < 0],L_ul,'s',ms=5,mew=0,color='grey',alpha=0.8)\n ax.errorbar(10.**df.sSFR[df['L_'+line] < 0],L_ul, elinewidth=1,\\\n uplims=np.ones(len(L_ul)),yerr=0.3,color='grey',alpha=0.8,lw=0)\n #-1.*L_ul - 10.**(np.log10(-1.*L_ul)-0.3)\n L_obs = np.append(L_obs,df['L_'+line].values)\n # print(df['L_'+line].values)\n sSFR_obs = np.append(sSFR_obs,df.sSFR.values)\n if p.plot: print('%i galaxies from Cormier+15 with positiv flux' % (len(df['L_'+line].values[df['L_'+line].values > 0])))\n # print('min SFR: ',np.min(df.SFR.values[df.sizes < 47]))\n except:\n pass", "def temperature_stderr(ax, metrics, temp_group, sizes: Dict[str, int], ewma_alpha:float=0.0,\n mask:Union[slice, np.ndarray]=slice(None), label=None, legend=True,\n line_kwargs={}, confidence_kwargs={}):\n temperatures = metrics[temp_group]\n if label is None:\n label = temp_group\n\n mean = temperatures['all'][mask]\n keys = list(temperatures.keys())\n keys.remove('all')\n\n temps = np.stack([temperatures[k][mask] for k in keys], axis=1)\n # Weights: number of elements of each parameter\n weights = np.array([sizes[k] for k in keys], dtype=float)\n\n _mean, var_se = weighted_var_se(weights, temps)\n\n dist = MultivariateNormal(\n torch.from_numpy(mean), DiagLazyTensor(torch.from_numpy(var_se)))\n\n steps = metrics[\"steps\"][mask]\n line, *_ = ax.plot(steps, metrics[\"temperature\"][mask], linestyle='--', **line_kwargs)\n gp_posterior(ax, torch.from_numpy(steps), dist, ewma_alpha=ewma_alpha, color=line.get_color(), label=label, **confidence_kwargs)\n\n if legend:\n ax.legend()", "def plot_series_and_differences(series, ax, num_diff, title=''):\n plt.xticks(rotation=40)\n ax[0].plot(series.index, series)\n ax[0].set_title('Raw series: {}'.format(title))\n ax[0].set_xticklabels(labels=series.index.date, rotation=45)\n for i in range(1, num_diff+1):\n diff = series.diff(i)\n ax[i].plot(series.index, diff)\n ax[i].set_title('Difference # {}'.format(str(i)))\n ax[i].set_xticklabels(labels=series.index.date, rotation=45)", "def _plot_std_bars(self, ax, x, s, color='grey'):\n\n if x.shape != s.shape:\n print x.shape, s.shape\n raise ValueError('Invalid shapes!')\n\n if x.ndim != 1:\n raise ValueError('Currently only 1D data supported')\n\n segments = []\n for i in xrange(x.nt):\n yref = x.data[i]\n dnum = pl.date2num(x.date[i]) # the conversion using pylab is required as otherwise there is a 1-day shift! Reason seems to be that matplotlib converts the numerical value automatically using num2date()\n xx = [dnum, dnum]\n yy = [yref - s.data[i], yref + s.data[i]]\n segments.append(list(zip(xx, yy)))\n collection = LineCollection(segments, colors=color)\n ax.add_collection(collection)", "def _timeseries_scatter_plot_reg(large_scale_signal_ts, regional_signal_ts,\n rvalue, slope):\n res = stats.linregress(large_scale_signal_ts, regional_signal_ts)\n y_values = res.intercept + res.slope * \\\n np.array(large_scale_signal_ts)\n rvalue = res.rvalue\n slope = res.slope\n return rvalue, slope, y_values", "def plot_fishing_mortality(df):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n\n ax.set_position(default_timeseries_position) \n\n Fn = df['Fn'].groupby([df.Year, df.Reg, df.Sreg]).mean()\n\n all_fishing_mortality = Fn.loc[:, 'All', 'All']\n ma_fishing_mortality = Fn.loc[:, '1', 'All']\n gb_fishing_mortality = Fn.loc[:, '2', 'All']\n\n # Don't plot the first year. Also, the data is shifted by one year.\n # For some reason, restricting the year range above results in a series\n # that still have a multi-index. This seems like the cleanest way to do\n # that.\n all_fishing_mortality = all_fishing_mortality[2:]\n ma_fishing_mortality = ma_fishing_mortality[2:]\n gb_fishing_mortality = gb_fishing_mortality[2:]\n\n all_fishing_mortality.index = all_fishing_mortality.index - 1\n ma_fishing_mortality.index = ma_fishing_mortality.index - 1\n gb_fishing_mortality.index = gb_fishing_mortality.index - 1\n\n all_fishing_mortality.plot(ax=ax, label='All') \n ma_fishing_mortality.plot(ax=ax, label='Mid Atlantic')\n gb_fishing_mortality.plot(ax=ax, label='Georges Bank')\n\n ax.legend(loc='best')\n\n content = io.BytesIO()\n plt.savefig(content, format='png')\n content.seek(0)\n image_cache['fishing_mortality']['fishing_mortality'] = content\n\n plt.close()", "def dataTimeSeries(timesteps,df,predictors,target,dropnan,out=2,dropVars=True): \r\n \r\n series = series_to_supervised(df[predictors+[target]].copy(),timesteps,out,dropnan=dropnan)\r\n \r\n if dropnan==False:\r\n series.replace(pd.np.nan,0,inplace=True)\r\n \r\n # Dropping other variables:\r\n if dropVars:\r\n index = list(np.arange(series.shape[1]-2,\r\n series.shape[1]-len(predictors)-2,\r\n -1))\r\n \r\n labels = [item for idx,item in enumerate(series.columns) \r\n if idx in index]\r\n \r\n #print(\"Eliminando variáveis: {}\".format(labels))\r\n series.drop(labels,axis=1,inplace=True) \r\n \r\n return series", "def avg_sse_plot(self):\n df_sse = self.df[\"sse_avg\"].sort_values(ascending=False)\n plt.figure(figsize=(self.plot_width, self.plot_height))\n df_sse.plot(\"bar\")\n plt.title(\"Media SSE por cluster\")\n output_path_sse = os.path.join(self.output_folder, 'sse_avg_plot.png')\n plt.savefig(output_path_sse)", "def plot_quanti_missing(self, **kwargs):\n sns.set_style('white')\n ax = self.missing_quanti.plot(\n kind='barh', stacked=True, color=sns.color_palette(), **kwargs)\n ax.legend(bbox_to_anchor=(1.5, 1))\n ax.axvline(len(self.quanti)/2, linestyle='dashed', color='indigo')\n sns.despine(left=True)\n return ax", "def plot_ave(results_list):\n x_range = range(len(results_list[0]))\n err_x, err_y, std_list = [], [], []\n\n for i in x_range:\n if i % 10 == 0:\n #get average for each generation\n column = [] \n for result in results_list:\n column.append(result[i])\n average = np.average(column)\n \n std_dev = np.std(column)\n err_x.append(i)\n err_y.append(average)\n std_list.append(std_dev)\n\n pylab.errorbar(err_x, err_y, yerr=std_list)\n pylab.show()", "def plot_losses(train, test, mode):\n\tplt.figure()\n\tplt.plot(range(len(train)), train, 'r', label='Training')\n\tplt.plot(range(len(test)), test, 'b', label='Testing')\n\tplt.title('MSE Loss (batch type: ' + mode + ')')\n\tplt.legend()\n\tplt.show()", "def plot_lastreviews_means_and_errors(H_in_HL_mean, H_in_HL_error, L_in_HL_mean, L_in_HL_error,\n H_in_HH_mean, H_in_HH_error, H_in_HM_mean, H_in_HM_error,\n M_in_HM_mean, M_in_HM_error):\n # plot the result in a nice plot\n plt.figure(figsize=(12, 9)) \n\n # create the fig. and axes.\n ax = plt.subplot(111)\n ax.spines[\"top\"].set_visible(False) \n ax.spines[\"right\"].set_visible(False)\n\n # define the color to use\n color_1 = rgb_to_matplot_lib(strong_green)\n color_2 = rgb_to_matplot_lib(light_green)\n color_3 = rgb_to_matplot_lib(strong_red)\n color_4 = rgb_to_matplot_lib(light_green)\n color_5 = rgb_to_matplot_lib(orange)\n\n\n\n # axis \n ax.set_ylabel('Rating', fontsize = 14)\n ax.tick_params(axis='both', labelsize=14)\n\n # plot small dash lines to follow the grading \n for y in np.arange(4.0, 4.6, 0.1): \n ax.plot(range(0, 45), [y] * len(range(0, 45)), \"--\", lw=0.5, color=\"black\", alpha=0.3)\n\n\n # set titles\n ax.set_title('10+ reviews average rating for each case in each group', fontsize = 14)\n\n plt.ylim([1,5.1])\n plt.xlim([0,5.1])\n\n plt.errorbar(1, H_in_HH_mean, H_in_HH_error, lineStyle= None, capsize=5, marker=\"^\", color=color_1)\n plt.errorbar(2, H_in_HL_mean, H_in_HL_error, lineStyle= None, capsize=5, marker=\"^\", color=color_2)\n plt.errorbar(3, L_in_HL_mean, L_in_HL_error, lineStyle= None, capsize=5, marker=\"^\", color=color_3)\n plt.errorbar(4, H_in_HM_mean, H_in_HM_error, lineStyle= None, capsize=5, marker=\"^\", color=color_4)\n plt.errorbar(5, M_in_HM_mean, M_in_HM_error, lineStyle= None, capsize=5, marker=\"^\", color=color_5)\n\n plt.text(0.8, 4.01, \"({:04.3f})\".format(H_in_HH_mean), fontsize=14, color=color_1)\n plt.text(1.8, 4.01, \"({:04.3f})\".format(H_in_HL_mean), fontsize=14, color=color_2) \n plt.text(2.8, 4.01, \"({:04.3f})\".format(L_in_HL_mean), fontsize=14, color=color_3) \n plt.text(3.8, 4.01, \"({:04.3f})\".format(H_in_HM_mean), fontsize=14, color=color_4) \n plt.text(4.8, 4.01, \"({:04.3f})\".format(M_in_HM_mean), fontsize=14, color=color_5) \n\n\n # set ticks label\n ax.set_xticks(range(1,6))\n ax.set_xticklabels(('H in HH', 'H in HL', 'L in HL', 'H in HM', 'M in HM'))\n\n #set ticks color\n colors = [color_1, color_2, color_3, color_4, color_5]\n for xtick, color in zip(ax.get_xticklabels(), colors):\n xtick.set_color(color)\n\n plt.ylim([4,4.6])\n plt.xlim([0.5,5.5])\n plt.show()", "def plot_subplot_trend(anomlous = False, temporal_resolution = 'monthly', spatial_resolution = 1, detrend = False, imagefolder = 'images/subplots/',seaice_source='nsidc'):\n output_folder = 'processed_data/SIC/'\n if seaice_source == 'ecmwf':\n output_folder = 'processed_data/ERA5/SIC/'\n\n if anomlous:\n temp_decomp = 'anomalous'\n else:\n temp_decomp = 'raw'\n\n\n title = temp_decomp.capitalize() + ' '\n\n if detrend:\n dt = 'detrended'\n title += dt + ' '\n else:\n dt = 'raw'\n\n title += temporal_resolution\n title += ' SIE trends'\n\n\n# Loading Seaice Trends\n seaicename = f'{temp_decomp}_{temporal_resolution}_{spatial_resolution}_{dt}'\n seaice = xr.open_dataset(output_folder + seaicename +'.nc')\n if seaice_source == 'nsidc':\n seaice = seaice/250\n seaice_m, seaice_b, seaice_r_value, seaice_p_value, seaice_std_err = xr.apply_ufunc(scipy.stats.linregress, seaice[seaicename].time.values.astype(float), seaice[seaicename], input_core_dims=[['time'],['time']], vectorize=True, dask='parallelized', output_dtypes=[float]*5, output_core_dims=[[]]*5)\n if seaice_source =='ecmwf':\n seaice_m, seaice_b, seaice_r_value, seaice_p_value, seaice_std_err = scipy.stats.linregress(seaice[seaicename].time.values.astype(float), seaice[seaicename])\n seaice_m = seaice_m * 1e9 * 60 * 60 * 24 * 365\n area = xr.open_dataset('data/area_files/processed_nsidc.nc').area\n seaice_m = seaice_m*area\n seaice_m = seaice_m.where(seaice_m != 0)\n seaice_m = seaice_m.where(seaice_p_value <= 0.05)\n\n\n# Index xontributions\n filename = f'processed_data/regressions/spatial_multiple/regr_{temp_decomp}_{temporal_resolution}_{dt}_{spatial_resolution}'\n dataset = xr.open_dataset(filename + '.nc')\n indicies = np.array([i for i in dataset])\n values = np.array([dataset[i].values for i in dataset])\n index_data = {}\n for indexname in indicies[:-1]:\n filename = f'{indexname}_{temp_decomp}_{temporal_resolution}_{dt}'\n index_data[indexname] = xr.open_dataset('processed_data/INDICIES/' + filename +'.nc')[indexname]\n index_data[indexname] = (index_data[indexname] - index_data[indexname].mean()) \n index_data[indexname] = index_data[indexname] / index_data[indexname].std()\n newdata = {} \n for indexname in indicies[:-1]:\n a = scipy.stats.linregress(index_data[indexname].time.values.astype(float), index_data[indexname])\n newdata[indexname] = a[0] * dataset[indexname] * 24*60*60*365e9\n title = temp_decomp.capitalize() + ' '\n if detrend == 'detrended':\n title += detrend + ' '\n title += temporal_resolution\n title += f' SIC trend contributions'\n area = xr.open_dataset('data/area_files/processed_nsidc.nc').area\n # Plotting\n for i in range(len(indicies)-1):\n indexname = indicies[i]\n newdata[indexname] = newdata[indexname] * area / 250\n newdata[indexname] = newdata[indexname].where(newdata[indexname] !=0)\n\n\n\n fig = plt.figure(figsize = (15,5))\n\n # seaice_m = log_data(seaice_m)\n max_ = min(seaice_m.max(),-seaice_m.min())\n # max_ = 1\n divnorm = TwoSlopeNorm(vmin=-max_, vcenter=0, vmax=max_)\n ax = fig.add_subplot(131, projection = ccrs.SouthPolarStereo())\n # Plotting\n contor = ax.contourf(seaice_m.x, seaice_m.y, seaice_m, cmap = 'RdBu', levels = 100, norm = divnorm, transform=ccrs.SouthPolarStereo())\n ax.coastlines()\n ax.set_axis_off()\n # cbar = plt.colorbar(contor)\n # cbar.set_label('Trend in SIE (km$^2$ yr$^{-1}$)')\n # plt.title(title)\n ax = [fig.add_subplot(2,6,3, projection = ccrs.SouthPolarStereo()),fig.add_subplot(2,6,4, projection = ccrs.SouthPolarStereo()),fig.add_subplot(2,6,9, projection = ccrs.SouthPolarStereo()),fig.add_subplot(2,6,10, projection = ccrs.SouthPolarStereo())]\n for i in range(len(indicies)-1):\n indexname = indicies[i]\n # newdata[indexname] = log_data(newdata[indexname])\n newdata[indexname] = newdata[indexname].where(newdata[indexname] !=0)\n contor = ax[i].contourf(dataset.x, dataset.y, newdata[indexname], cmap = 'RdBu', norm = divnorm, transform=ccrs.SouthPolarStereo(), levels = 100)\n ax[i].coastlines()\n ax[i].set_axis_off()\n ax[i].set_title(indicies[i])\n\n ax = fig.add_subplot(1,3,3, projection = ccrs.SouthPolarStereo())\n data = seaice_m\n for i in range(len(indicies)-1):\n indexname = indicies[i]\n data = data - newdata[indexname]\n ax.contourf(dataset.x, dataset.y, data, cmap = 'RdBu', norm = divnorm, levels = 100, transform=ccrs.SouthPolarStereo())\n ax.coastlines()\n fig.subplots_adjust(right=0.9)\n cbar_ax = fig.add_axes([0.95, 0.15, 0.05, 0.7])\n cbar = fig.colorbar(cm.ScalarMappable(norm=divnorm, cmap='RdBu'), cax=cbar_ax, shrink=0.88)\n\n plt.savefig(imagefolder + seaicename + '.pdf')\n plt.show()", "def plot_spk(df_flt, df_speak, seps=None, show_trace=False, show_std=False, beg=0, dur=50, rotation=0, interval=5, markersize=1.5, figsize=(30,15), title='', alpha=1, spkplot_gap=1):\n idxes = df_speak.index.unique()\n idxes = get_meet_sec(df_flt).index\n sbeg = str(idxes[beg*20]) \n send = str(idxes[beg*20+dur*20]) \n df_speak = df_speak.loc[sbeg: send]\n n_sub = len(df_flt.columns)\n n_row = n_sub + 1\n \n ## Cannot set [sbeg: send] due to pandas\n df_flt_part = df_flt.loc[:send]\n fig, axes = plt.subplots(n_row, 1, figsize=figsize, sharex=True)\n axs = df_flt_part.plot(figsize=figsize, subplots=True, linewidth=1, marker='o', \n markersize=markersize, alpha=alpha, title=title, ax=axes[:n_sub])\n\n ### add std\n if show_std:\n df_sec = get_meet_sec(df_flt_part)\n df_std = df_sec.groupby(df_sec.index).std()\n dt_std = {}\n \n colors = []\n dt_uc = {}\n dt_ps = {}\n for comb in zip(axs, df_flt.columns):\n ax, u = comb\n l = ax.lines[0]\n dt_uc[u] = l.get_color()\n dt_ps[u] = []\n if show_std:\n dt_std[u] = []\n subjects = sorted(dt_uc.keys(), reverse=True)\n \n if show_std:\n for k in df_sec.index.unique():\n k1 = k + datetime.timedelta(seconds=1)\n for u in df_sec.columns:\n # add std\n stdu = df_std.ix[k, u]\n dt_std[u].append([k, k1])\n dt_std[u].append([stdu, stdu])\n \n \n for k in df_speak.index:\n k1 = k + datetime.timedelta(seconds=1)\n us = df_speak.loc[df_speak.index == k].speaker.tolist()\n for u in us:\n y = -1 * spkplot_gap * ( 1 + subjects.index(u) )\n dt_ps[u].append([k, k1])\n dt_ps[u].append([y, y])\n \n nax = axes[n_sub]\n\n for i,u in enumerate(df_flt.columns):\n c = dt_uc[u]\n params = dt_ps[u]\n axs[i].plot(*params, linewidth=5, color=c)\n if seps is not None:\n axs[i].axhline(seps[i], linestyle= '--', color='black', alpha=0.8)\n axs[i].set_ylim([-10,60])\n axs[i].set_ylabel('Volume')\n axs[i].grid(axis='x', which='major', alpha=0.5, linestyle=':') \n \n # add std\n if show_std:\n params_std = dt_std[u]\n axs[i].plot(*params_std, linewidth=3, color='black', linestyle='--')\n \n if show_trace and len(params) != 0:\n nax.axhline(params[1][0], linestyle=':' , color=c )\n nax.plot(*params, linewidth=spkplot_gap*20, color=c);\n nax.set_ylim([0, -1*spkplot_gap*(n_sub+1) ])\n nax.set_yticklabels('') \n nax.xaxis.set_major_locator(mdates.SecondLocator(interval=interval))\n dateformatter = ':%S' if dur <= 60 else '%M:%S'\n nax.xaxis.set_major_formatter(mdates.DateFormatter(dateformatter))\n\n nax.grid(axis='x', which='major', alpha=0.5, linestyle='--')\n nax.set_xlabel('Time')\n nax.set_ylabel('Speaker')\n ## This is just a work-around. Something should be wrong with df.plot (pd version 0.22.)\n nax.set_xlim([sbeg, send])\n plt.xticks(rotation=rotation)\n plt.tight_layout()\n return sbeg, send", "def figure_size_resp_bms(df):\n sns.set_style('ticks')\n gs = GridSpec(2, 3)\n fig = plt.figure(figsize=(7, 8))\n axs = [fig.add_subplot(gs[0, 0]), fig.add_subplot(gs[0, 1]), fig.add_subplot(gs[0, 2]),\n fig.add_subplot(gs[1, :])]\n # fig, axs = plt.subplots(2, 2, figsize=(8, 6))\n # axs = axs.reshape(-1)\n\n sns.boxplot('genotype', 'area', hue='treatment', data=df, ax=axs[0], order=('wt', 'ko'), hue_order=('veh', 'bms'))\n axs[0].set_ylim((0, 2000000))\n axs[0].set_ylabel('Responsive area in µm²')\n sns.boxplot('genotype', 'max_df', hue='treatment', data=df, ax=axs[1], order=('wt', 'ko'), hue_order=('veh', 'bms'))\n axs[1].set_ylabel('Average peak response amplitude (%)')\n axs[1].set_ylim((0, 3.5))\n sns.boxplot('genotype', 'fwhm', hue='treatment', data=df, ax=axs[2], order=('wt', 'ko'), hue_order=('veh', 'bms'))\n gp = df.groupby(('genotype', 'treatment'))\n t = np.arange(-3, 5, .1)\n for g in product(('wt', 'ko'), ('veh', 'bms')):\n try:\n avg_df = np.vstack(gp.get_group(g).avg_df.as_matrix())\n mean_df = avg_df.mean(0)\n # mean_df[mean_df > 0.7] = 0\n axs[3].plot(t, mean_df, label=g, linewidth=2)\n except KeyError:\n pass\n axs[3].legend()\n axs[3].set_xlabel(TIME_LABEL)\n axs[3].set_ylabel('Average $\\Delta$ F / F (%)')\n fig.tight_layout()\n fig.savefig('Intrinsic/figure/responses.png')\n fig.savefig('Intrinsic/figure/responses.svg')\n with open('Intrinsic/figure/stats.txt', 'w') as f:\n f.write('Mann-Whitney U-test\\n\\n')\n for g1, g2 in combinations(product(('wt', 'ko'), ('veh', 'bms')), 2):\n f.write(f'+ {g1} vs {g2}:\\n')\n pval = mannwhitneyu(df.area[df.genotype == g1], df.area[df.genotype == g2]).pvalue\n f.write(f'\\tArea comparison {g1} vs {g2}: {pval:.3f}\\n')\n pval = mannwhitneyu(df.max_df[df.genotype == g1], df.max_df[df.genotype == g2]).pvalue\n f.write(f'\\tAmplitude comparison {g1} vs {g2}: {pval:.3f}\\n')\n pval = mannwhitneyu(df.fwhm[df.genotype == g1], df.fwhm[df.genotype == g2]).pvalue\n f.write(f'\\tFull width at half maximum comparison {g1} vs {g2}: {pval:.3f}\\n')", "def plot_noerror(self, ax):\n tmp_lefts = deepcopy(self.lefts)\n tmp_lefts = np.append(tmp_lefts, self.lefts[-1] + self.widths[-1])\n tmp_values = deepcopy(self.values)\n tmp_values = np.append(tmp_values, self.values[-1])\n return ax.plot(tmp_lefts, tmp_values, color=self.color, drawstyle='steps-post', label=self.label, **self.options)", "def plot_loss(self):\n train_elbo_range = range(len(self.train_elbo_hist))\n val_elbo_range = range(len(self.val_elbo_hist))\n train_loss_range = range(len(self.train_loss_hist))\n val_loss_range = range(len(self.val_loss_hist))\n\n fig, ax = plt.subplots(2, 2)\n ax[0][0].plot(train_elbo_range, self.train_elbo_hist)\n ax[0][0].title.set_text(\"Train ELBO\")\n ax[0][1].plot(val_elbo_range, self.val_elbo_hist)\n ax[0][1].title.set_text(\"Val ELBO\")\n ax[1][0].plot(train_loss_range, self.train_loss_hist)\n ax[1][0].title.set_text(\"Train MSE\")\n ax[1][1].plot(val_loss_range, self.val_loss_hist)\n ax[1][1].title.set_text(\"Val MSE\")\n plt.tight_layout()\n plt.show()", "def test_lightcurve_seismology_plot():\n KeplerLightCurveFile(TABBY_Q8).PDCSAP_FLUX.periodogram().plot()", "def PlotTimeSeries(ticker, years_ago=5, verbose_mode=False):#, months_ago=0): \n \n # There are two Yahoo Modules we can use to pull our data (closeHist)\n # We'll pull from one and if we get an error will use the alternate\n try:\n closeHist = pd.DataFrame(yf.download(ticker,\n period='max', \n progress=False)['Close']).rename({'Close':'Price'}, axis=1)\n #closeHist = pd.DataFrame(yf.Ticker(ticker).history(period='max')['Close']).rename({'Close':'Price'}, axis=1)\n closeHist.index = closeHist.index.to_pydatetime()\n closeHist.index.name = 'Date'\n except json.JSONDecodeError:\n closeHist = pd.DataFrame(y_fin.get_data(ticker)['close']).rename({'close':'Price'}, axis=1)\n closeHist.index = closeHist.index.to_pydatetime()\n closeHist.index.name = 'Date'\n # Trim our data to years_ago\n closeHist = closeHist[closeHist.index > dt.datetime.now() + relativedelta(years=-years_ago)]\n closeHist.reset_index(inplace=True)\n #Calculate monthly avg. Price\n closeHist['Month'] = closeHist.Date.apply(lambda x: dt.date(x.year, x.month, 1))\n closeHist = closeHist.groupby('Month').last().rename({'Price':'Price(Monthly avg.)'}, axis=1)\n closeHist['x_index'] = pd.Series(range(len(closeHist.index)), closeHist.index)\n\n # Find Peaks and Troughs (Local Maximums and Minimums)\n MinSeries = closeHist['Price(Monthly avg.)'][(closeHist['Price(Monthly avg.)'].shift(1) > closeHist['Price(Monthly avg.)']) & \n (closeHist['Price(Monthly avg.)'].shift(-1) > closeHist['Price(Monthly avg.)'])]\n MaxSeries = closeHist['Price(Monthly avg.)'][(closeHist['Price(Monthly avg.)'].shift(1) < closeHist['Price(Monthly avg.)']) & \n (closeHist['Price(Monthly avg.)'].shift(-1) < closeHist['Price(Monthly avg.)'])]\n \n \n MinSeries = pd.concat([MinSeries, \n closeHist['Price(Monthly avg.)'][(closeHist.index <= MaxSeries.index[0])&\n (closeHist['Price(Monthly avg.)'] < MaxSeries.iloc[0])].head(1)]).sort_index()\n\n \n #BothSeries = pd.concat([MinSeries, MaxSeries]).sort_index()\n #MaxMaxSeries = BothSeries[(BothSeries.shift(1) < BothSeries) & (BothSeries.shift(-1) < BothSeries)]\n #MinMinSeries = BothSeries[(BothSeries.shift(1) > BothSeries) & (BothSeries.shift(-1) > BothSeries)]\n \n \n\n #3PTL Buy Line\n X = list()\n Y = list()\n x_1_date = MaxSeries.idxmax()\n x_1 = closeHist[closeHist.index==x_1_date].x_index.iloc[0]\n X.append(x_1)\n Y.append(MaxSeries.max())\n try:\n x_2_date = MaxSeries[MaxSeries.index > x_1_date].idxmax()\n x_2 = closeHist[closeHist.index==x_2_date].x_index.iloc[0]\n X.append(x_2)\n Y.append(MaxSeries[MaxSeries.index > x_1_date].max())\n except ValueError:\n pass\n #3PTL Sell Line\n X2 = list()\n Y2 = list()\n x2_1_date = MinSeries.idxmin()\n x2_1 = closeHist[closeHist.index==x2_1_date].x_index.iloc[0]\n X2.append(x2_1)\n Y2.append(MinSeries.min())\n try:\n x2_2_date = MinSeries[MinSeries.index > x2_1_date].idxmin()\n x2_2 = closeHist[closeHist.index==x2_2_date].x_index.iloc[0]\n X2.append(x2_2)\n Y2.append(MinSeries[MinSeries.index > x2_1_date].min())\n except ValueError:\n pass\n\n print('Current Price for', ticker, 'is', str(round(closeHist['Price(Monthly avg.)'].iloc[-1], 2)))\n\n sellLine_list = list()\n buyLine_list = list()\n\n #Calculate and plot Sell line:\n if len(X2) < 2:\n # IF WE CANNOT BUILD A SELL LINE USING MAX, START WITH FIRST TWO TROUGHS\n X2 = list(closeHist.loc[MinSeries.index]['x_index'].iloc[:2])\n Y2 = list(closeHist.loc[MinSeries.index]['Price(Monthly avg.)'].iloc[:2])\n ThreePtS = drawLine2P(x=X2,y=Y2,xlims=[closeHist['x_index'].values.min(),\n closeHist['x_index'].values.max()+1])\n sellLine_list.append(ThreePtS[1])\n else: \n ThreePtS = drawLine2P(x=X2,y=Y2,xlims=[closeHist['x_index'].values.min(),\n closeHist['x_index'].values.max()+1])\n sellLine_list.append(ThreePtS[1])\n\n #Calculate and plot Buy line:\n if len(X) < 2:\n pass\n else: \n ThreePtB = drawLine2P(x=X,y=Y,xlims=[closeHist['x_index'].values.min(),\n closeHist['x_index'].values.max()+1])\n buyLine_list.append(ThreePtB[1])\n\n\n Buy_Breach = max(closeHist[closeHist.x_index.isin(X2)].index)\n if verbose_mode:\n n = 1 #TESTING\n while Buy_Breach:\n # FIRST BUY ITERATION\n latestHist = closeHist.loc[Buy_Breach:]\n subSell = latestHist.index[latestHist['Price(Monthly avg.)'] < pd.Series(ThreePtS[1], closeHist.index).loc[Buy_Breach:]]\n if len(subSell) > 0:\n Sell_Breach = subSell[0] \n preBreach = MaxSeries[MaxSeries.index < Sell_Breach].index\n postBreach = MaxSeries[MaxSeries.index > Sell_Breach].index\n if verbose_mode:\n print(\"{} Sell Breach at {}, this is Breach #{}\".format(ticker, Sell_Breach, n)) #TESTING\n n+=1\n if len(postBreach) > 0:\n pt_1 = closeHist.loc[closeHist.loc[preBreach]['Price(Monthly avg.)'].idxmax()]\n pt_2 = closeHist.loc[postBreach[0]]\n Y2 = [pt_1['Price(Monthly avg.)'], pt_2['Price(Monthly avg.)']]\n X2 = [pt_1['x_index'], pt_2['x_index']]\n ThreePtB = drawLine2P(x=X2,y=Y2,xlims=[closeHist['x_index'].values.min(),\n closeHist['x_index'].values.max()+1])\n # plt.plot(closeHist.index, ThreePtB[1],\n # c='g', linestyle='dashed', \n # alpha=buyAlpha)\n buyLine_list.append(ThreePtB[1])\n else:\n Sell_Breach = None\n break \n else:\n Sell_Breach = None\n break\n while Sell_Breach:\n # FIRST SELL ITERATION\n latestHist = closeHist.loc[Sell_Breach:]\n superBuy = latestHist.index[latestHist['Price(Monthly avg.)'] > pd.Series(ThreePtB[1], closeHist.index).loc[Sell_Breach:]]\n if len(superBuy) > 0:\n Buy_Breach = superBuy[0]\n preBreach = MinSeries[MinSeries.index < Buy_Breach].index\n postBreach = MinSeries[MinSeries.index > Buy_Breach].index\n if verbose_mode:\n print(\"{} Buy Breach at {}, this is Breach #{}\".format(ticker, Buy_Breach, n)) #TESTING\n n+=1\n if len(postBreach) > 0:\n pt_1 = closeHist.loc[closeHist.loc[preBreach]['Price(Monthly avg.)'].idxmin()]\n pt_2 = closeHist.loc[postBreach[0]]\n Y2 = [pt_1['Price(Monthly avg.)'], pt_2['Price(Monthly avg.)']]\n X2 = [pt_1['x_index'], pt_2['x_index']]\n ThreePtS = drawLine2P(x=X2,y=Y2,xlims=[closeHist['x_index'].values.min(),\n closeHist['x_index'].values.max()+1])\n # plt.plot(closeHist.index, ThreePtS[1],\n # c='r', linestyle='dashed', \n # alpha=sellAlpha)\n sellLine_list.append(ThreePtS[1])\n\n break\n else:\n Buy_Breach = None\n break\n else:\n Buy_Breach = None\n break\n #sellLine_alpha = np.linspace(0.1, 1, len(sellLine_list))\n #buyLine_alpha = np.linspace(0.1, 1, len(buyLine_list))\n sellLine_alpha = np.flipud(np.linspace(1, 0.1, len(sellLine_list)+1)[:-1])\n buyLine_alpha = np.flipud(np.linspace(1, 0.1, len(buyLine_list)+1)[:-1])\n\n\n\n if len(sellLine_list) > 0:\n sellPrice = round(sellLine_list[-1][-1], 2)\n if sellPrice < 0:\n sellPrice = round(0.00, 2) \n print('Sell Price for', ticker, 'is', sellPrice)\n if len(buyLine_list) > 0:\n buyPrice = round(buyLine_list[-1][-1], 2)\n if buyPrice < 0:\n buyPrice = round(0.00, 2)\n print('Buy Price for', ticker, 'is', buyPrice)\n\n plt.figure(figsize=[20,9])\n with plt.style.context('fivethirtyeight'):\n plt.plot(closeHist['Price(Monthly avg.)'], zorder=0)\n \n if verbose_mode:\n for i in np.arange(len(sellLine_list)):\n plt.plot(closeHist.index, sellLine_list[i],\n c='r', linestyle='dashed', \n alpha=sellLine_alpha[i])\n\n for i in np.arange(len(buyLine_list)):\n plt.plot(closeHist.index, buyLine_list[i],\n c='g', linestyle='dashed', \n alpha=buyLine_alpha[i])\n\n if len(sellLine_list) > 0:\n plt.plot(closeHist.index, sellLine_list[-1],\n c='r',\n alpha=1)\n \n if len(buyLine_list) > 0:\n plt.plot(closeHist.index, buyLine_list[-1],\n c='g', \n alpha=1) \n\n plt.scatter(MinSeries.index, \n MinSeries,\n c='r', s=50, zorder=10)\n plt.scatter(MaxSeries.index, \n MaxSeries,\n c='g', s=50, zorder=10)\n # plt.scatter(MaxMaxSeries.index, \n # MaxMaxSeries,\n # c='y', s=100, zorder=5)\n # plt.scatter(MinMinSeries.index, \n # MinMinSeries,\n # c='y', s=100, zorder=5)\n plt.title(\"Buy and Sell Lines for \"+ ticker, {'fontsize':20})\n plt.autoscale()\n num = closeHist['Price(Monthly avg.)'].min()\n Y_lim_min = math.floor(num / 10 ** math.floor(math.log10(num))) * 10 ** math.floor(math.log10(num))\n num = closeHist['Price(Monthly avg.)'].max()\n Y_lim_max = math.ceil(num / 10 ** math.floor(math.log10(num))) * 10 ** math.floor(math.log10(num))\n plt.ylim(0, Y_lim_max)#,Y_lim_max)\n plt.show()", "def check_stationarity(time_series, window, figsize=(10,6)): \n # Calculating rolling mean and standard deviation\n rolling_mn = time_series.rolling(window).mean()\n rolling_std = time_series.rolling(window).std()\n \n plt.figure(figsize=figsize)\n plt.plot(time_series, color = 'blue',label = 'Original TS')\n plt.plot(rolling_mn, color = 'red', label = 'Rolling Mean')\n plt.plot(rolling_std, color = 'black', label = 'Rolling St.Dev.')\n plt.legend(loc = 'best')\n plt.grid(True, color = 'lightgrey')\n plt.title('Rolling Mean & Standard Deviation of the Trade Value of Vaccines', fontsize = 10)\n \n # Dickey-Fuller test:\n print('Results of Dickey-Fuller Test:')\n fuller_test = adfuller(time_series, autolag = 'AIC')\n results_ts = pd.Series(fuller_test[0:4], index = ['Test Statistic','P-value','#Lags Used','Number of Observations Used'])\n for key,value in fuller_test[4].items():\n results_ts['Critical Value (%s)'%key] = value\n print(results_ts)", "def plot_decompose(self):\n try:\n assert self._arr_seasonal is not None\n except AssertionError:\n self.ts_decompose()\n\n fig, axes = plt.subplots(5, 1, figsize=(20, 9), sharex=True)\n axes[0].plot(self._res_decomp.observed)\n axes[0].set_ylabel(\"Original\")\n #\n axes[1].plot(self._arr_trend)\n axes[1].set_ylabel(\"Trend\")\n #\n axes[2].plot(self._arr_seasonal)\n axes[2].set_ylabel(\"Seasonal\")\n #\n axes[3].plot(self._arr_baseline)\n axes[3].set_ylabel(\"Baseline\")\n #\n axes[4].plot(self.residuals)\n axes[4].set_ylabel(\"Residuals\")\n #\n if self.upper_whisker_res is not None:\n axes[4].axhline(y=self.upper_whisker_res,\n xmin=0,\n xmax=1, color='m',\n label='upper_whisker',\n linestyle='--', linewidth=1.5)\n axes[4].axhline(y=-self.upper_whisker_res,\n xmin=0,\n xmax=1, color='m',\n label='upper_whisker',\n linestyle='--', linewidth=1.5)\n\n plt.gcf().autofmt_xdate()\n plt.grid(True)\n plt.show()", "def shadow_plot(x, y, **kwargs):\n n_samples = y.size # Number of samples\n smooth_factor = 0.5 # Default smooth factor\n label = None\n semilogy = False\n clr = None\n shadow_std = False # Whether to subtend the shadow between -std and +std or not.\n\n ax = plt.gca()\n if get_prop_cycle() is None:\n cmap = plt.cm.get_cmap(name=\"Vega20\", lut=64)\n ax.set_prop_cycle(cycler('color', cmap(range(64))))\n\n for key, value in kwargs.items():\n if key == \"label\":\n label = value\n elif key == \"smooth\": # Smooth factor\n smooth_factor = value\n if smooth_factor < 0 or smooth_factor > 1:\n raise ValueError(\"The smooth factor must lie between 0 and 1.\")\n elif key == \"semilogy\":\n semilogy = value\n if not isinstance(semilogy, bool):\n raise ValueError(\"semilogy can only be True or False.\")\n elif key == \"color\":\n clr = value\n elif key == \"shadowstd\":\n if not isinstance(semilogy, bool):\n raise ValueError(\"shadowstd can only be True or False.\")\n shadow_std = value\n\n n = int(math.ceil(smooth_factor * 0.25 * n_samples)) # Window size for averaging (at most, 25% of all samples)\n mu = np.full(n_samples, np.nan)\n sigma = np.full(n_samples, np.nan)\n perc5 = np.full(n_samples, np.nan)\n perc95 = np.full(n_samples, np.nan)\n for s in range(n_samples):\n s0 = max(s - n + 1, 0)\n mu[s] = np.mean(y[s0:s + 1])\n sigma[s] = np.std(y[s0:s + 1])\n perc5[s] = np.percentile(y[s0:s + 1], 5)\n perc95[s] = np.percentile(y[s0:s + 1], 95)\n assert np.all(sigma >= 0), \"Negative standard deviation\"\n\n if semilogy:\n base_line, = ax.semilogy(x, mu, lw=2, label=label, color=clr)\n else:\n base_line, = ax.plot(x, mu, lw=2, label=label, color=clr)\n\n if shadow_std:\n higher_border = mu + sigma\n lower_border = mu - sigma\n else:\n higher_border = perc95\n lower_border = perc5\n\n if semilogy:\n # If the lower border of the shadowed region is 0 or negative, clip its minimum value. The minimum value shall\n # be equidistant from the average in log scale.\n factor = higher_border / mu\n lower_border = np.clip(lower_border, mu / factor, mu)\n\n ax.fill_between(x, higher_border, lower_border, facecolor=base_line.get_color(), alpha=0.2)", "def draw_bonus_error(error):\n f, ax = plt.subplots()\n vertices = np.arange(10, 50)\n ax.plot(vertices, error[10:], 'b', label='Error')\n plt.xlabel('Rounds')\n plt.ylabel('Misclassification Error')\n plt.title('Misclassification Error: l = 10, m = 20, n = 40')\n plt.legend(loc='upper left')\n plt.grid(True)\n plt.show()", "def test_plot_ts_valueerror(multidim_models, val_err_kwargs):\n idata2 = multidim_models.model_1\n with pytest.raises(ValueError):\n plot_ts(idata=idata2, y=\"y\", **val_err_kwargs)", "def plot_biomass_quantiles(df):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n\n ax.set_position(default_timeseries_position) \n \n grp = df['BmsMT'].groupby([df.Year, df.Reg, df.Sreg]) \n \n qmean = grp.mean().loc[:, 'All', 'All'] \n q90 = grp.quantile(0.90).loc[:, 'All', 'All'] \n q75 = grp.quantile(0.75).loc[:, 'All', 'All'] \n q50 = grp.quantile(0.50).loc[:, 'All', 'All'] \n q25 = grp.quantile(0.25).loc[:, 'All', 'All'] \n q10 = grp.quantile(0.10).loc[:, 'All', 'All'] \n \n colors = seaborn.color_palette(n_colors=3);\n\n q90.plot(ax=ax, color=colors[0], linestyle='--', label='90%') \n q75.plot(ax=ax, color=colors[1], linestyle='--', label='75%') \n qmean.plot(ax=ax, color='black', label='Mean') \n q50.plot(ax=ax, color=colors[2], linestyle='--', label='50%') \n q25.plot(ax=ax, color=colors[1], linestyle='--', label='25%') \n q10.plot(ax=ax, color=colors[0], linestyle='--', label='10%') \n \n ax.legend(loc='best') \n ax.set_ylim(bottom=-500) \n \n ax.set_ylabel('Biomass (mt meats)') \n\n content = io.BytesIO()\n plt.savefig(content, format='png')\n content.seek(0)\n image_cache['biomass']['quantiles'] = content\n\n plt.close()", "def plot_tseries(time_series, fig=None, axis=0,\r\n xticks=None, xunits=None, yticks=None, yunits=None,\r\n xlabel=None, ylabel=None, yerror=None, error_alpha=0.1,\r\n time_unit=None, **kwargs):\r\n\r\n if fig is None:\r\n fig = plt.figure()\r\n\r\n if not fig.get_axes():\r\n ax = fig.add_subplot(1, 1, 1)\r\n else:\r\n ax = fig.get_axes()[axis]\r\n\r\n #Make sure that time displays on the x axis with the units you want:\r\n #If you want to change the time-unit on the visualization from that used to\r\n #represent the time-series:\r\n if time_unit is not None:\r\n tu = time_unit\r\n conv_fac = ts.time_unit_conversion[time_unit]\r\n #Otherwise, get the information from your input:\r\n else:\r\n tu = time_series.time_unit\r\n conv_fac = time_series.time._conversion_factor\r\n\r\n this_time = time_series.time / float(conv_fac)\r\n ax.plot(this_time, time_series.data.T, **kwargs)\r\n\r\n if xlabel is None:\r\n ax.set_xlabel('Time (%s)' % tu)\r\n else:\r\n ax.set_xlabel(xlabel)\r\n\r\n if ylabel is not None:\r\n ax.set_ylabel(ylabel)\r\n\r\n if yerror is not None:\r\n if len(yerror.data.shape) == 1:\r\n this_e = yerror.data[np.newaxis, :]\r\n else:\r\n this_e = yerror.data\r\n delta = this_e\r\n e_u = time_series.data + delta\r\n e_d = time_series.data - delta\r\n for i in range(e_u.shape[0]):\r\n ax.fill_between(this_time, e_d[i], e_u[i], alpha=error_alpha)\r\n\r\n return fig", "def folding(eventfile,Porb,nbins):\n times = fits.open(eventfile)[1].data['TIME'] #getting array of times\n gtis_data = fits.open(eventfile)[2].data #getting GTIs\n T = sum([ gtis_data[i]['STOP']-gtis_data[i]['START'] for i in range(len(gtis_data)) ]) #exposure time\n\n gtis_conform = []\n for i in range(len(gtis_data)):\n gtis_conform.append([gtis_data[i][0],gtis_data[i][1]]) #conform to the input that Stingray uses\n\n phase_sr,prof_sr,err_sr = fold_events(times,1/Porb,gtis=np.array(gtis_conform),ref_time=times[0],nbin=nbins)\n phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times,1/Porb,gtis=np.array(gtis_conform),ref_time=times[0],expocorr=True,nbin=nbins)\n\n total_phase_sr = list(phase_sr) + list(phase_sr+1)\n total_prof_sr = list(prof_sr)*2\n total_err_sr = list(err_sr)*2\n\n total_phase_sr_expo = list(phase_sr_expo) + list(phase_sr_expo+1)\n total_prof_sr_expo = list(prof_sr_expo)*2\n total_err_sr_expo = list(err_sr_expo)*2\n\n plt.figure()\n plt.errorbar(x=total_phase_sr,y=total_prof_sr/T,yerr=total_err_sr/T,color='r',drawstyle='steps-mid')\n plt.errorbar(x=total_phase_sr_expo,y=total_prof_sr_expo/T,yerr=total_err_sr_expo/T,color='b',drawstyle='steps-mid')\n plt.legend(('Folded profile','Exposure-corrected'),loc='best',fontsize=12)\n plt.title(str(pathlib.Path(eventfile).name) +', exposure-corrected (using Stingray fold_events)',fontsize=12)\n plt.xlabel('Phase',fontsize=12)\n plt.ylabel('Counts/s',fontsize=12)\n\n return total_phase_sr_expo,total_prof_sr_expo/T,total_err_sr_expo/T", "def plot_exp_traces_plus_means(all_table, yscale = 'linear', data_col = 'spike_freq', x_label = 'Time (days)', title = 'Experiment Traces plus Mean', ymax=10, end_time = 1.375, norm_dmso = False, dmso_table = pd.DataFrame([]), c = 'multi', **plot_kwargs):\n \n time_vector = all_table['time']\n cat_table_norm = pd.DataFrame([])\n \n for exp in all_table['exp'].unique():\n exp_table = all_table.query('exp == @exp')\n time_vector = exp_table['time']\n if norm_dmso == True:\n exp_dmso_table = dmso_table.query('exp == @exp')\n if exp_dmso_table.empty:\n norm_exp = exp_table[data_col]\n else:\n norm_exp = np.divide(exp_table[data_col], exp_dmso_table[data_col])\n if c == 'multi':\n plt.plot(time_vector, norm_exp, alpha=0.4, **plot_kwargs)\n else:\n plt.plot(time_vector, norm_exp, alpha=0.2, color=c, label='_nolegend_', **plot_kwargs)\n cat_table_norm = pd.concat([cat_table_norm, (pd.DataFrame(data = {'spike_freq': norm_exp, 'time': time_vector, 'exp': exp}))])\n tt_drug, tt_end = exp_stats(cat_table_norm, end_time)\n else:\n if c == 'multi':\n plt.plot(time_vector, exp_table[data_col], alpha=0.4, **plot_kwargs)\n else:\n plt.plot(time_vector, exp_table[data_col], alpha=0.2, color = c, label='_nolegend_', **plot_kwargs)\n tt_drug, tt_end = exp_stats(all_table, end_time)\n \n mean_freq_traces = all_table.groupby(('time'))[data_col].mean()\n mean_freq_traces = mean_freq_traces.rename(data_col).reset_index() # Convert the multiindexed series back to a dataframe\n \n if norm_dmso == True:\n mean_freq_traces_dmso = dmso_table.groupby(('time'))[data_col].mean()\n mean_freq_traces_dmso = mean_freq_traces_dmso.rename(data_col).reset_index()\n if c == 'multi':\n plt.plot(mean_freq_traces['time'], np.divide(mean_freq_traces[data_col], mean_freq_traces_dmso[data_col]), 'k', **plot_kwargs)\n else:\n plt.plot(mean_freq_traces['time'], np.divide(mean_freq_traces[data_col], mean_freq_traces_dmso[data_col]), c, **plot_kwargs)\n else:\n if c == 'multi':\n plt.plot(mean_freq_traces['time'], mean_freq_traces[data_col], 'k', **plot_kwargs)\n else:\n plt.plot(mean_freq_traces['time'], mean_freq_traces[data_col], c, **plot_kwargs)\n \n plt.axhline(1, color='k', label='_nolegend_')\n plt.ylim([0,ymax])\n \n \n print('Drug Stats: ')\n print(tt_drug)\n print('End Stats: ')\n print(tt_end)\n \n plt.yscale(yscale)\n plt.xlabel(x_label)\n plt.ylabel('Fold Induction')\n plt.title(title)\n if c == 'multi':\n plt.legend(all_table['exp'].unique())\n \n return cat_table_norm", "def error_rates(epoch, model, features, filters, figname, fgal=0.5, \n idx=-1, N=10000):\n Xsingle, Xsinglecov = fetch_prepped_s82data(epoch, fgal, features, filters)\n Xcoadd, Xcoaddcov = fetch_prepped_s82data(epoch, fgal, features,\n filters, use_single=False)\n Xdr10, Xdr10cov = fetch_prepped_dr10data(60000, fgal, features, filters)\n\n Xdr10 = Xdr10[:N]\n Xdr10cov = Xdr10cov[:N]\n Xsingle = Xsingle[:N]\n Xsinglecov = Xsinglecov[:N]\n Xcoadd = Xcoadd[:N]\n Xcoaddcov = Xcoaddcov[:N]\n\n # unpickle the XD model\n if type(model) == str:\n f = open(model, 'rb')\n model = cPickle.load(f)\n f.close()\n\n a1, m1, v1 = model.posterior(Xsingle, Xsinglecov)\n a2, m2, v2 = model.posterior(Xdr10, Xdr10cov)\n\n dr10_med = np.zeros_like(Xdr10)\n single_med = np.zeros_like(Xsingle)\n dr10_sig = np.zeros_like(Xdr10)\n single_sig = np.zeros_like(Xsingle)\n for i in range(N):\n samp = model.sample(a1[i], m1[i], v1[i], size=1000)\n single_med[i] = np.median(samp, axis=0)\n single_sig[i] = np.std(samp, axis=0)\n samp = model.sample(a2[i], m2[i], v2[i], size=1000)\n dr10_med[i] = np.median(samp, axis=0)\n dr10_sig[i] = np.std(samp, axis=0)\n\n fs = 5\n dlt = 0.2\n fac = 2\n lsize = 20\n mags = np.linspace(18, 22, 4. / dlt)\n ind = [0, 1, 2, idx, 3, 4]\n ylab = ['psfmag $r$ error', 'modelmag $u-g$ error',\n 'modelmag $g-r$ error', 'psfmag - modelmag $r$ error',\n 'modelmag $r-i$ error', 'modelmag $i-z$ error']\n xlab = 'psfmag $r$'\n xticks = np.array(['%0.0f' % v for v in np.linspace(18, 22, 9)])\n xticks[range(1, 8, 2)] = ''\n f = pl.figure(figsize=(3 * fs, 2 * fs))\n pl.subplots_adjust(wspace=0.3)\n for i in range(len(ind)):\n dr10err = bininator(mags, dlt, Xdr10[:, 0],\n np.sqrt(Xdr10cov[:, ind[i]][:, ind[i]]))\n singleerr = bininator(mags, dlt, Xsingle[:, 0],\n np.sqrt(Xsinglecov[:, ind[i]][:, ind[i]]))\n coadderr = bininator(mags, dlt, Xcoadd[:, 0],\n np.sqrt(Xcoaddcov[:, ind[i]][:, ind[i]]))\n dr10_posterr = bininator(mags, dlt, dr10_med[:, 0],\n dr10_sig[:, ind[i]])\n single_posterr = bininator(mags, dlt, single_med[:, 0],\n single_sig[:, ind[i]])\n ax = pl.subplot(2, 3, i + 1)\n pl.plot(mags, coadderr, 'k', lw=2, label='coadd')\n pl.plot(mags, singleerr, 'k', ls=':', lw=2, label='single epoch')\n pl.plot(mags, single_posterr, 'k--', lw=2,\n label='XD post. single epoch')\n pl.plot(mags, dr10err, 'r', ls=':', lw=2, label='DR10')\n pl.plot(mags, dr10_posterr, 'r--', lw=2, label='XD post. DR10')\n pl.xlabel(xlab, fontsize=lsize)\n pl.ylabel(ylab[i], fontsize=lsize)\n pl.xlim(18, 22)\n pl.ylim(-fac * coadderr[0], singleerr.max())\n ax.set_xticklabels(xticks)\n if i == 1:\n pl.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15), ncol=5,\n prop={'size':15})\n f.savefig(figname, bbox_inches='tight')", "def tplot(self, analytes=None, figsize=[10, 4], scale=None, filt=None,\n ranges=False, stats=False, stat='nanmean', err='nanstd',\n interactive=False, focus_stage=None, err_envelope=False):\n\n if interactive:\n enable_notebook() # make the plot interactive\n\n if type(analytes) is str:\n analytes = [analytes]\n if analytes is None:\n analytes = self.analytes\n\n if focus_stage is None:\n focus_stage = self.focus_stage\n\n fig = plt.figure(figsize=figsize)\n ax = fig.add_axes([.1,.12,.77,.8])\n\n for a in analytes:\n x = self.Time\n y, yerr = unpack_uncertainties(self.data[focus_stage][a])\n\n if scale is 'log':\n ax.set_yscale('log')\n y[y == 0] = np.nan\n\n if filt:\n ind = self.filt.grab_filt(filt, a)\n xf = x.copy()\n yf = y.copy()\n yerrf = yerr.copy()\n if any(~ind):\n xf[~ind] = np.nan\n yf[~ind] = np.nan\n yerrf[~ind] = np.nan\n if any(~ind):\n ax.plot(x, y, color=self.cmap[a], alpha=.4, lw=0.6)\n ax.plot(xf, yf, color=self.cmap[a], label=a)\n if err_envelope:\n ax.fill_between(xf, yf - yerrf, yf + yerrf, color=self.cmap[a],\n alpha=0.2, zorder=-1)\n else:\n ax.plot(x, y, color=self.cmap[a], label=a)\n if err_envelope:\n ax.fill_between(x, y - yerr, y + yerr, color=self.cmap[a],\n alpha=0.2, zorder=-1)\n\n # Plot averages and error envelopes\n if stats and hasattr(self, 'stats'):\n sts = self.stats[sig][0].size\n if sts > 1:\n for n in np.arange(self.n):\n n_ind = ind & (self.ns == n + 1)\n if sum(n_ind) > 2:\n x = [self.Time[n_ind][0], self.Time[n_ind][-1]]\n y = [self.stats[sig][self.stats['analytes'] == a][0][n]] * 2\n\n yp = ([self.stats[sig][self.stats['analytes'] == a][0][n] +\n self.stats[err][self.stats['analytes'] == a][0][n]] * 2)\n yn = ([self.stats[sig][self.stats['analytes'] == a][0][n] -\n self.stats[err][self.stats['analytes'] == a][0][n]] * 2)\n\n ax.plot(x, y, color=self.cmap[a], lw=2)\n ax.fill_between(x + x[::-1], yp + yn,\n color=self.cmap[a], alpha=0.4,\n linewidth=0)\n else:\n x = [self.Time[0], self.Time[-1]]\n y = [self.stats[sig][self.stats['analytes'] == a][0]] * 2\n yp = ([self.stats[sig][self.stats['analytes'] == a][0] +\n self.stats[err][self.stats['analytes'] == a][0]] * 2)\n yn = ([self.stats[sig][self.stats['analytes'] == a][0] -\n self.stats[err][self.stats['analytes'] == a][0]] * 2)\n\n ax.plot(x, y, color=self.cmap[a], lw=2)\n ax.fill_between(x + x[::-1], yp + yn, color=self.cmap[a],\n alpha=0.4, linewidth=0)\n\n if ranges:\n for lims in self.bkgrng:\n ax.axvspan(*lims, color='k', alpha=0.1, zorder=-1)\n for lims in self.sigrng:\n ax.axvspan(*lims, color='r', alpha=0.1, zorder=-1)\n\n if filt is not None:\n ind = self.filt.grab_filt(filt)\n lims = bool_2_indices(~ind)\n for l, u in lims:\n if u >= len(self.Time):\n u = -1\n ax.axvspan(self.Time[l], self.Time[u], color='k',\n alpha=0.05, lw=0)\n\n # drawn = []\n # for k, v in self.filt.switches.items():\n # for f, s in v.items():\n # if s & (f not in drawn):\n # lims = bool_2_indices(~self.filt.components[f])\n # for u, l in lims:\n # ax.axvspan(self.Time[u-1], self.Time[l], color='k',\n # alpha=0.05, lw=0)\n # drawn.append(f)\n\n ax.text(0.01, 0.99, self.sample + ' : ' + self.focus_stage,\n transform=ax.transAxes,\n ha='left', va='top')\n\n ax.set_xlabel('Time (s)')\n ax.set_xlim(np.nanmin(x), np.nanmax(x))\n \n # y label\n ud = {'rawdata': 'counts',\n 'despiked': 'counts',\n 'bkgsub': 'background corrected counts',\n 'ratios': 'counts/{:s} count',\n 'calibrated': 'mol/mol {:s}'}\n if focus_stage in ['ratios', 'calibrated']:\n ud[focus_stage] = ud[focus_stage].format(self.internal_standard)\n ax.set_ylabel(ud[focus_stage])\n\n if interactive:\n ax.legend()\n plugins.connect(fig, plugins.MousePosition(fontsize=14))\n display.clear_output(wait=True)\n display.display(fig)\n input('Press [Return] when finished.')\n disable_notebook() # stop the interactivity\n else:\n ax.legend(bbox_to_anchor=(1.15, 1))\n\n return fig, ax", "def plot_multi_bars_means_stds_sats(image_list, ax): \n N = len(image_list)\n means = [compute_means(image) for image in image_list]\n stds = [compute_stds(image) for image in image_list]\n saturations = [(1-compute_saturations(image)) for image in image_list]\n \n mean_of_every_feature = []\n for idx in range(len(means)):\n mean_of_every_feature.append((means[idx] + stds[idx] + saturations[idx])/3)\n \n \n ## necessary variables\n ind = np.arange(N) # the x locations for the groups\n width = 0.3 # the width of the bars\n #the bars\n rects1 = ax.bar(ind, means, width, color='red')\n rects2 = ax.bar(ind+width, stds, width, color='green')\n rects3 = ax.bar(ind+2*width, saturations, width, color='blue')\n # axes and labels\n ax.set_xlim(-0.5*width,len(ind)+0.5*width)\n ax.set_ylim(0,1)# this is customized for optimal visualization\n# ax.set_xlabel(r'$Methods \\ in$')\n \n #ax.set_title('Scores by group and gender')\n xTickMarks = [r'$[9]$', \n r'$[23]$', \n r'$[17]$', \n r'$[18]$', \n r'$[24]$', \n r'RC']\n ax.set_xticks(ind+width)\n xtickNames = ax.set_xticklabels(xTickMarks)\n plt.setp(xtickNames, rotation=0)\n ## add a legend\n ax.legend( (rects1[0], rects2[0], rects3[0]), (r'$\\mu_{\\mathrm{diff}}$', r'$\\sigma_{\\mathrm{diff}}$', r'$\\lambda$'), \n loc=1, ncol=3, handlelength=0.8, borderpad=0.2, labelspacing=0.0)\n\n return mean_of_every_feature", "def plot_var_time_series_dt0_multiquant(TRT_ID_sel, df_nonnan, cfg_tds):\r\n \r\n date_of_cell = datetime.datetime.strptime(TRT_ID_sel[\"TRT_ID\"][:12], \"%Y%m%d%H%M\")\r\n \r\n ## Find cells where the there are loads of similar TRT Ranks:\r\n DTI_sel = [dti for dti in df_nonnan.index.values if dti[13:] in TRT_ID_sel[\"TRT_ID\"]]\r\n cell_sel = df_nonnan.loc[DTI_sel]\r\n cell_sel.set_index(pd.to_datetime([datetime.datetime.strptime(date[:12],\"%Y%m%d%H%M\") for date in cell_sel.index]),\r\n drop=True,append=False,inplace=True)\r\n \r\n fig, axes = plt.subplots(2,2)\r\n fig.set_size_inches(10,8) \r\n cmap_3_quant = truncate_cmap(plt.get_cmap('afmhot'), 0.2, 0.6)\r\n legend_entries = []\r\n cell_sel[[\"IR_108_stat|0|MIN\",\"IR_108_stat|0|PERC05\",\"IR_108_stat|0|PERC25\"]].plot(ax=axes[0,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,0].set_title(r\"Brightness Temperatures T$_B$\")\r\n axes[0,0].set_ylabel(r\"IR 10.8$\\mu$m [K]\")\r\n legend_entries.append([\"Min\",\"5%\", \"25%\"])\r\n\r\n cell_sel[[\"CG3_stat|0|PERC99\",\"CG3_stat|0|PERC95\",\"CG3_stat|0|PERC75\"]].plot(ax=axes[0,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,1].set_title(\"Glaciation indicator (GI)\")\r\n axes[0,1].set_ylabel(r\"IR 12.0$\\mu$m - IR 10.8$\\mu$m [K]\")\r\n legend_entries.append([\"99%\",\"95%\", \"75%\"])\r\n\r\n cell_sel[[\"CD5_stat|0|MAX\",\"CD5_stat|0|PERC95\",\"CD5_stat|0|PERC75\"]].plot(ax=axes[1,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,0].set_title(\"Cloud optical depth indicator (COD)\")\r\n axes[1,0].set_ylabel(r\"WV 6.2$\\mu$m - IR 10.8$\\mu$m [K]\")\r\n legend_entries.append([\"Max\",\"95%\", \"75%\"])\r\n\r\n cell_sel[[\"IR_108_stat|-15|PERC25\",\"IR_108_stat|-15|PERC50\",\"IR_108_stat|-15|PERC75\"]].plot(ax=axes[1,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,1].set_title(r\"Updraft strength indicator ($w_{T}$)\")\r\n axes[1,1].set_ylabel(r\"IR 10.8$\\mu$m (t$_0$) - IR 10.8$\\mu$m (t$_{-15}$) [K]\")\r\n legend_entries.append([\"25%\",\"50%\", \"75%\"])\r\n for ax, leg_ent in zip(axes.flat,legend_entries):\r\n ax.grid()\r\n ax.legend(leg_ent, fontsize=\"small\", loc=\"upper right\") #, title_fontsize=\"small\", title =\"Quantiles\"\r\n plt.tight_layout()\r\n plt.savefig(os.path.join(cfg_tds[\"fig_output_path\"],\"SEVIRI_series_%s.pdf\" % (TRT_ID_sel[\"TRT_ID\"])))\r\n plt.close()\r\n\r\n fig, axes = plt.subplots(3,2)\r\n fig.set_size_inches(10,8) \r\n legend_entries = []\r\n cell_sel[[\"RZC_stat_nonmin|0|PERC50\",\"RZC_stat_nonmin|0|PERC75\",\"RZC_stat_nonmin|0|MAX\"]].plot(ax=axes[0,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"RZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[0,0],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[0,0].set_title(r\"Rain Rate (RR)\")\r\n axes[0,0].set_ylabel(r\"Rain Rate [mm h$^{-1}$]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"50%\",\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"LZC_stat_nonmin|0|PERC50\",\"LZC_stat_nonmin|0|PERC75\",\"LZC_stat_nonmin|0|MAX\"]].plot(ax=axes[0,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"LZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[0,1],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[0,1].set_title(\"Vertically Integrated Liquid (VIL)\")\r\n axes[0,1].set_ylabel(r\"VIL [kg m$^{-2}$]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"50%\",\"95%\", \"MAX\"])\r\n\r\n cell_sel[[\"MZC_stat_nonmin|0|PERC50\",\"MZC_stat_nonmin|0|PERC75\",\"MZC_stat_nonmin|0|MAX\"]].plot(ax=axes[1,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"MZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[1,0],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[1,0].set_title(\"Maximum Expected Severe Hail Size (MESHS)\")\r\n axes[1,0].set_ylabel(\"MESHS [cm]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"25%\",\"50%\", \"75%\"])\r\n\r\n cell_sel[[\"BZC_stat_nonmin|0|PERC50\",\"BZC_stat_nonmin|0|PERC75\",\"BZC_stat_nonmin|0|MAX\"]].plot(ax=axes[1,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"BZC_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[1,1],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[1,1].set_title(\"Probability of Hail (POH)\")\r\n axes[1,1].set_ylabel(r\"POH [%]\")\r\n ax_pixc.set_ylabel(\"Covered areal fraction [%]\")\r\n legend_entries.append([\"50%\",\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"EZC15_stat_nonmin|0|PERC75\",\"EZC15_stat_nonmin|0|MAX\",\"EZC45_stat_nonmin|0|PERC75\",\"EZC45_stat_nonmin|0|MAX\"]].plot(ax=axes[2,0],color=[\"#fdbf6f\",\"#ff7f00\",\"#fb9a99\",\"#e31a1c\"],linewidth=1,style='-',alpha=0.8)\r\n ax_pixc=(100-cell_sel[[\"EZC45_pixc_NONMIN|0|SUM\"]]/4.21).plot(ax=axes[2,0],color=\"black\",linewidth=0.5,style='--',alpha=0.8, secondary_y=True)\r\n axes[2,0].set_title(\"Echo Top (ET)\")\r\n axes[2,0].set_ylabel(\"Altitude a.s.l. [km]\")\r\n ax_pixc.set_ylabel(\"Pixel count\")\r\n legend_entries.append([\"75% (15dBZ)\",\"Max (15dBZ)\", \"75% (45dBZ)\", \"Max (45dBZ)\"])\r\n\r\n cell_sel[[\"THX_dens_stat|0|MEAN\",\"THX_densIC_stat|0|MEAN\",\"THX_densCG_stat|0|MEAN\"]].plot(ax=axes[2,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[2,1].set_title(\"Mean lightning Density (THX)\")\r\n axes[2,1].set_ylabel(\"Lightning density [km$^{-2}$]\")\r\n ax_pixc.set_ylabel(\"Pixel count\")\r\n legend_entries.append([\"Total\",\"IC\", \"CG\"])\r\n for ax, leg_ent in zip(axes.flat,legend_entries):\r\n ax.grid()\r\n ax.legend(leg_ent, fontsize=\"small\", loc=\"upper left\") #) #, title_fontsize=\"small\", title =\"Quantiles\"\r\n plt.tight_layout()\r\n plt.savefig(os.path.join(cfg_tds[\"fig_output_path\"],\"RADAR_series_%s.pdf\" % (TRT_ID_sel[\"TRT_ID\"])))\r\n plt.close()\r\n\r\n fig, axes = plt.subplots(2,2)\r\n fig.set_size_inches(10,8) \r\n legend_entries = []\r\n cell_sel[[\"CAPE_ML_stat|0|PERC50\",\"CAPE_ML_stat|0|MAX\"]].plot(ax=axes[0,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,0].set_title(r\"CAPE (mean surface layer parcel)\")\r\n axes[0,0].set_ylabel(r\"CAPE [J kg$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"CIN_ML_stat|0|PERC50\",\"CIN_ML_stat|0|MAX\"]].plot(ax=axes[0,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[0,1].set_title(r\"CIN (mean surface layer parcel)\")\r\n axes[0,1].set_ylabel(r\"CIN [J kg$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"WSHEAR_0-3km_stat|0|PERC50\",\"WSHEAR_0-3km_stat|0|MAX\"]].plot(ax=axes[1,0],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,0].set_title(r\"Wind shear (0km - 3km)\")\r\n axes[1,0].set_ylabel(r\"Wind shear [m s$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n cell_sel[[\"POT_VORTIC_30000_stat|0|PERC50\",\"POT_VORTIC_30000_stat|0|MAX\"]].plot(ax=axes[1,1],cmap=cmap_3_quant,linewidth=1,style='-',alpha=0.8)\r\n axes[1,1].set_title(r\"Potential vorticity (300hPa)\")\r\n axes[1,1].set_ylabel(r\"PV [K m$^{2}$ kg$^{-1}$ s$^{-1}$]\")\r\n legend_entries.append([\"75%\", \"MAX\"])\r\n\r\n for ax, leg_ent in zip(axes.flat,legend_entries):\r\n ax.grid()\r\n ax.legend(leg_ent, fontsize=\"small\", loc=\"upper left\") #) #, title_fontsize=\"small\", title =\"Quantiles\"\r\n plt.tight_layout()\r\n plt.savefig(os.path.join(cfg_tds[\"fig_output_path\"],\"COSMO_THX_series_%s.pdf\" % (TRT_ID_sel[\"TRT_ID\"])))\r\n plt.close()", "def plot_loss(stats):\r\n plt.plot(stats['train_loss_ind'], stats['train_loss'], label='Training loss')\r\n plt.plot(stats['val_loss_ind'], stats['val_loss'], label='Validation loss')\r\n plt.legend()\r\n plt.xlabel('Number of iterations')\r\n plt.ylabel('Loss')\r\n plt.show()", "def plotTimeDelta(data, type_plot, device):\n mean = data.mean()\n std = data.std()\n max_data = data.max()\n min_data = data.min()\n max_indx = np.argmax(data) # max value index\n min_indx = np.argmin(data) # min value index\n x = np.arange(min_data, max_data, 0.1)\n y = normfun(x, mean, std)\n res_quantile = quantileValues(data, device)\n if type_plot == 0:\n plt.plot(x, y, color='blue')\n annot_max_min(x, y)\n # plt.hist(data.dropna(), bins=500, rwidth=0.9, normed=True)\n plt.title('Time Delta distribution')\n plt.xlabel('Time Delta')\n plt.ylabel('Probability')\n sns.distplot(tmp.deltaSeconds.dropna(),\n kde=True, rug=True, rug_kws={\"color\": \"k\"},\n kde_kws={\"color\": \"red\", \"lw\": 3, \"label\": \"KDE\"},\n hist_kws={\"histtype\": \"step\", \"lw\": 3, \"alpha\": 1,\n \"color\": \"g\"},\n bins=500)\n # ax.set(xlabel='Vibration Intensity', ylabel='Probability')\n elif type_plot == 1: # plot the max and min point\n plt.plot(data)\n plt.plot(max_indx, data[max_indx], 'ks')\n show_max = '['+str(max_indx)+' '+str(data[max_indx])+']'\n plt.annotate(show_max,\n xytext=(max_indx, data[max_indx]),\n xy=(max_indx, data[max_indx]))\n plt.plot(min_indx, data[min_indx], 'gs')\n show_min = '['+str(min_indx)+' '+str(data[min_indx])+']'\n plt.annotate(show_min,\n xytext=(min_indx, data[min_indx]),\n xy=(min_indx, data[min_indx]))\n plt.title('Time Delta')\n plt.xlabel('Index')\n plt.ylabel('Vibration Intensity Value')\n elif type_plot == 2: # boxplot\n boxplot(data.dropna())\n return res_quantile", "def plot_seaice_trend(anomlous = False, temporal_resolution = 'monthly', spatial_resolution = 1, detrend = False, imagefolder = 'images/trends/SIC/',seaice_source='nsidc'):\n output_folder = 'processed_data/SIC/'\n if seaice_source == 'ecmwf':\n output_folder = 'processed_data/ERA5/SIC/'\n\n if anomlous:\n temp_decomp = 'anomalous'\n else:\n temp_decomp = 'raw'\n\n\n title = temp_decomp.capitalize() + ' '\n\n if detrend:\n dt = 'detrended'\n title += dt + ' '\n else:\n dt = 'raw'\n\n title += temporal_resolution\n title += ' SIE trends'\n\n\n seaicename = f'{temp_decomp}_{temporal_resolution}_{spatial_resolution}_{dt}'\n seaice = xr.open_dataset(output_folder + seaicename +'.nc')\n area = xr.open_dataset('data/area_files/processed_nsidc.nc').area\n\n if seaice_source == 'nsidc':\n seaice = seaice * area /250\n seaice_m, seaice_b, seaice_r_value, seaice_p_value, seaice_std_err = xr.apply_ufunc(scipy.stats.linregress, seaice[seaicename].time.values.astype(float), seaice[seaicename], input_core_dims=[['time'],['time']], vectorize=True, dask='parallelized', output_dtypes=[float]*5, output_core_dims=[[]]*5)\n if seaice_source =='ecmwf':\n seaice_m, seaice_b, seaice_r_value, seaice_p_value, seaice_std_err = scipy.stats.linregress(seaice[seaicename].time.values.astype(float), seaice[seaicename])\n \n seaice_m = seaice_m * 1e9 * 60 * 60 * 24 * 365\n area = xr.open_dataset('data/area_files/processed_nsidc.nc').area\n # seaice_m = seaice_m*area\n seaice_m = seaice_m.where(seaice_m != 0)\n # seaice_m = seaice_m.where(seaice_p_value <= 0.05)\n max_ = seaice_m.max()\n min_ = seaice_m.min() \n # max_ = 1\n divnorm = TwoSlopeNorm(vmin=min_, vcenter=0, vmax=max_)\n fig = plt.figure(figsize = (5,5))\n ax = fig.add_subplot(111, projection = ccrs.SouthPolarStereo())\n # Plotting\n contor = ax.contourf(seaice_m.x, seaice_m.y, seaice_m, cmap = 'RdBu', levels = 11, norm = divnorm, transform=ccrs.SouthPolarStereo())\n ax.coastlines()\n ax.set_axis_off()\n cbar = plt.colorbar(contor)\n cbar.set_label('Trend in SIE (km$^2$ yr$^{-1}$)')\n plt.title(title)\n plt.savefig(imagefolder + seaicename + '.pdf')\n plt.show()", "def plot_multif_prediction(pred_multif, y_reana, forecast_range=14, title=None):\n fig, ax = plt.subplots(figsize=(15, 5))\n y_reana.sel({'time': pred_multif.time.values.ravel()}\n ).to_pandas().plot(ax=ax, label='GloFAS Reanalysis')\n\n pdseries = pd.Series(data=pred_multif.sel(num_of_forecast=1).values,\n index=pred_multif.sel(num_of_forecast=1).time.values)\n pdseries.plot(ax=ax, label='Model')\n plt.legend()\n for i in pred_multif.num_of_forecast[1:]:\n fcst = pd.Series(data=pred_multif.sel(num_of_forecast=i).values,\n index=pred_multif.sel(num_of_forecast=i).time.values)\n fcst.plot(ax=ax)\n\n ax.set_ylabel('river discharge [m$^3$/s]')\n\n y_o = y_reana.loc[{'time': pred_multif.time.values.ravel()}].values\n y_m = pred_multif.values.ravel()\n\n rmse = np.sqrt(np.nanmean((y_m - y_o)**2))\n nse = 1 - np.sum((y_m - y_o)**2)/(np.sum((y_o - np.nanmean(y_o))**2))\n\n plt.title(f\"{title} | RMSE={round(float(rmse), 2)}; NSE={round(float(nse), 2)} |\")\n return fig, ax" ]
[ "0.5816549", "0.5654263", "0.5647554", "0.5641937", "0.5606311", "0.5547447", "0.5509094", "0.5484054", "0.54816675", "0.5443533", "0.5367257", "0.5338084", "0.52920187", "0.52789783", "0.5228553", "0.5223728", "0.52207625", "0.52193207", "0.5198728", "0.5193151", "0.518719", "0.51768726", "0.51384556", "0.51314616", "0.512458", "0.51170695", "0.50964254", "0.5092824", "0.509155", "0.5085633", "0.5073361", "0.50671184", "0.5064674", "0.50566983", "0.5047882", "0.5045654", "0.503853", "0.50235546", "0.5015056", "0.5009643", "0.49910134", "0.49897563", "0.49887168", "0.4985732", "0.49834076", "0.4979205", "0.49772754", "0.49684456", "0.49597442", "0.49554613", "0.49526784", "0.4946763", "0.49440238", "0.4932232", "0.49314895", "0.4930676", "0.4930676", "0.49293646", "0.49187058", "0.49162975", "0.49131355", "0.49123582", "0.49121338", "0.49033442", "0.49013248", "0.48996142", "0.4887419", "0.48863792", "0.48844498", "0.48832726", "0.48796874", "0.4873722", "0.48717862", "0.4866398", "0.48659095", "0.4859535", "0.48582438", "0.48554", "0.48549396", "0.48482677", "0.48458531", "0.48373613", "0.48355943", "0.48247483", "0.48244974", "0.4824119", "0.48230278", "0.48199362", "0.48164138", "0.48148134", "0.48134515", "0.48125225", "0.48077485", "0.47998637", "0.47990304", "0.4797398", "0.4792221", "0.4791324", "0.47904524", "0.47903195" ]
0.580105
1
Serializes C{result} to JSON and writes it to C{request}.
def _writeJSONResponse(result, request, code=CODE.SUCCESS, status=http.OK): response = { u'code': code.value, u'result': result} request.setHeader('content-type', 'application/json') request.setResponseCode(status) request.write(json.dumps(response)) request.finish()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _success(self, result_ser, request):\n result = json.dumps(result_ser)\n request.write(result)\n request.finish()", "def _convert_to_JSON(result):\n response = make_response(json.dumps(result))\n response.headers['Access-Control-Allow-Origin'] = \"*\"\n response.mimetype = \"application/json\"\n return response", "def send_rpc_result(req, result):", "def make_json(result):\n new_result = result.to_dict()\n json_result = json.dumps(new_result, indent=4)\n return json_result", "def result():\n # Retrieve JSON parameters data.\n data = request.get_json() or {}\n data.update(dict(request.values))\n tid = data.get(\"tid\")\n if not tid:\n raise abort(400, \"missing 'tid' data\")\n\n # Get the result (if exists and finished).\n result = tasks.process_message.AsyncResult(tid)\n # Return status and result if available.\n resp = {\n \"status\": result.status,\n \"result\": None,\n }\n if result.ready():\n resp[\"result\"] = result.get()\n return resp", "def create_response(result):\n return ControllerResponse(\n response=result,\n status=200,\n mime='application/json',\n jsonize=True,\n )", "def respond(self,result):\n callback = self.request.get('callback')\n self.response.headers['Content-Type'] = 'application/json'\n #self.response.headers['Content-Type'] = '%s; charset=%s' % (config.CONTENT_TYPE, config.CHARSET)\n self.response.headers['Access-Control-Allow-Origin'] = '*'\n self.response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, DELETE, OPTIONS, PATCH, HEAD'\n self.response.headers['Access-Control-Allow-Headers'] = 'Origin, Content-Type, X-Requested-With'\n self.response.headers['Access-Control-Allow-Credentials'] = 'True'\n\n #Add a handler to automatically convert datetimes to ISO 8601 strings. \n dthandler = lambda obj: obj.isoformat() if isinstance(obj, datetime.datetime) else None\n if callback:\n content = str(callback) + '(' + json.dumps(result,default=dthandler) + ')'\n return self.response.out.write(content)\n \n return self.response.out.write(json.dumps(result,default=dthandler))", "def process_response(self, id, result):\n return {\n 'version': '1.1',\n 'id': id,\n 'result': result,\n 'error': None,\n }", "def return_api_result(self, result=None):\n if result is None:\n result = {}\n\n result['ok'] = True\n\n self.return_result(result)", "def finish(self, result: Dict):", "def push_result(self, result):\n try:\n new_item = json.dumps(result)\n self.db.zadd(\"soq_results\", new_item, time())\n except Exception as e:\n print(\"An error occurred while saving the result:\", e)", "def normalize_transfer_result(cls, result: JSON) -> JSON:\n ...", "def _store_result(self, task_id, result, status,\n traceback=None, request=None):\n content_type, content_encoding, result = self.encode_content(result)\n _, _, meta = self.encode_content({\n 'children': self.current_task_children(request),\n 'task_name': request.task\n })\n\n self.TaskModel._default_manager.store_result(\n content_type, content_encoding,\n task_id, result, status,\n traceback=traceback,\n meta=meta,\n )\n return result", "def _store(self, result, status):\n response = Response(\n host=result._host.get_name(),\n status=status,\n task=result._task.get_name(),\n payload=result._result)\n self._bucket.append(response)", "def save_result(res, name):\n with open('dist/'+name+'.json','w') as fp:\n json.dump(res, fp)", "def save(self, result_dir):\n path = os.path.join(result_dir, self._filename)\n\n util.write_json(path, {\n 'results': self._results,\n 'params': self._params,\n 'requirements': self._env.requirements,\n 'commit_hash': self._commit_hash,\n 'date': self._date,\n 'python': self._python\n }, self.api_version)", "def _send_response(self, result, peer):\n try:\n response = json.dumps(result).encode()\n self._socket.sendto(response, peer)\n except (ConnectionRefusedError, FileNotFoundError, PermissionError,\n TypeError):\n pass", "def normalize_transaction_result(cls, result: JSON) -> JSON:\n ...", "def upload_result():\n if len(request.files) == 0:\n return jsonify(success=False), 400\n\n file = next(request.files.values())\n filename = secure_filename(file.filename)\n file.save(op.join(RESULTS_FOLDER, filename))\n\n result = Result()\n result.file = op.join(RESULTS_FOLDER, filename)\n\n result.detector_start_time = datetime.fromtimestamp(float(request.form[\"detector_start_time\"]))\n result.detector_end_time = datetime.fromtimestamp(float(request.form[\"detector_end_time\"]))\n\n db.session.add(result)\n db.session.commit()\n\n return jsonify(success=True, result_id=result.id), 200", "def respond(self, result: Any) -> Optional['JSONRPCSuccessResponse']:\n if self.one_way or self.unique_id is None:\n return None\n\n response = JSONRPCSuccessResponse()\n\n response.result = result\n response.unique_id = self.unique_id\n\n return response", "def result(self):\n if self.__json:\n return self.__json[\"result\"]\n else:\n return {}", "def json_response(self, request, *args, **kwargs):\n\n return HttpResponse(self.construct_json(),\n content_type='application/json',\n mimetype='application/json', status=self.status)", "def result(self, result):\n self._result = result", "def result(self, result):\n self._result = result", "def post_algorithm():\n try:\n request_json = request.get_json()\n result = json.dumps([])\n response = app.response_class(\n response=result,\n status=200,\n mimetype='application/json')\n except ValueError as e:\n response = app.response_class(\n status=400,\n response=str(e)\n )\n return response", "def get_result(params):\n global PublicKey, ProjectId, url\n params[\"PublicKey\"] = PublicKey\n params[\"Signature\"] = verfy_ac(params)\n\n if ProjectId != '':\n params[\"ProjectId\"] = ProjectId\n\n r = requests.post(url, params)\n response = json.dumps(r.json(), indent=4)\n\n return response", "def result(self, result):\n\n self._result = result", "def result(self, result):\n\n self._result = result", "def save_result(working_space: str, result: dict) -> None:\n result_path = os.path.join(working_space, 'output')\n if not os.path.exists(result_path):\n os.makedirs(result_path)\n result_path = os.path.join(result_path, 'result.json')\n logging.info(\"Storing result at location: '%s'\", result_path)\n logging.debug(\"Result: %s\", str(result))\n\n with open(result_path, 'w') as out_file:\n json.dump(result, out_file, indent=2)", "def create_result_json(json_object, result_json_file):\n write_json_to_file(json_object, result_json_file)", "def _build_rpc_result(self, id, result):\n if id is None:\n return None\n\n return {\n 'jsonrpc': '2.0',\n 'id': id,\n 'result': result\n }", "def json_response(obj):\n return HttpResponse(json.dumps(obj), content_type=\"application/json\")", "def create(self):\n response = self.request()\n result_obj = result.Result()\n result_obj.response = response\n\n self.set_result(result_obj, self.execution_type)\n if self.execution_type == \"async\":\n # We set the execution status to 0 as there is no way of knowing the\n # status of async call. Only while reading the response data we will set\n # the actual status code in the result object\n result_obj.set_status_code(int(0))\n return result_obj", "def api_call():\n\n json_str = load_input()\n output = {\n 'inputs': json_str,\n 'results': 'cool results'}\n\n return json.dumps(output), 200, {'Content-Type': 'text/plain;charset=utf-8'}", "def process_json_result(self, json_result: dict) -> RestoRequestResult:\n try:\n resto_response = self.resto_response_cls(self, json_result)\n except RestoResponseError:\n msg = 'Response to {} from {} resto server cannot be understood.'\n # TOOD: move elsewhere ?\n raise IncomprehensibleResponse(msg.format(self.get_server_name()))\n\n return resto_response.as_resto_object()", "def save_fingerprint_result_to_file(result):\n file_path = os.path.join(helpers.get_json_output_directory(), str(int(time.time())) + '.json')\n with open(file_path, 'w') as outfile:\n json.dump(result, outfile)", "def get_json_response(obj):\n return HttpResponse(json.dumps(obj))", "def get_json_response(obj):\n return HttpResponse(json.dumps(obj))", "def _render_result(self, errno, errmsg, data=None):\n self.set_header(\"Content-Type\", \"application/json; charset=utf-8\")\n if self._finished:\n return\n self.write(tornado.escape.json_encode({\n \"errno\": errno,\n \"errmsg\": errmsg,\n \"logid\": self.logid,\n \"data\": data,\n }))", "def json_response(self, out, code=200):\n self.response.set_status(code)\n self.response.headers[CONTENT_TYPE] = CONTENT_TYPE_JSON\n self.response.out.write(json.dumps(out))", "def process_result(self, result: Any) -> None:\n raise NotImplementedError()", "def jsonresp(value):\n body = (json.dumps(value),)\n cherrypy.response.headers['Content-Type'] = 'application/json'\n return body", "def form_success_return(result):\n return { 'code' : 0, 'value' : result, 'output' : None }", "def get(self):\n self.finish(json.dumps(self.build_response_dict()))", "def _handle_result(result: 'Request'):\n for route in result.routes:\n if route.executor == GATEWAY_NAME:\n route.end_time.GetCurrentTime()\n\n self._update_end_request_metrics(result)\n\n return result", "def get_json_results(result):\n if result.status_code == 200 and is_json(result.text):\n return json.loads(result.text)\n\n else:\n print(f\"The result code not successful. The error code is: {result.status_code}\")\n return False", "def storeResult(request, run_uuid):\n if request.method == 'POST':\n trialresult = TrialResult()\n trialresult.subject = get_object_or_404(SubjectData, pk=run_uuid)\n trialresult.trialitem = get_object_or_404(TrialItem, pk=int(request.POST.get('trialitem')))\n trialresult.start_time = dateutil.parser.parse(request.POST.get('start_time'))\n trialresult.end_time = dateutil.parser.parse(request.POST.get('end_time'))\n trialresult.key_pressed = request.POST.get('key_pressed')\n #trialresult.webcam_file = request.POST.get('webcam_file')\n trialresult.trial_number = int(request.POST.get('trial_number'))\n trialresult.resolution_w = int(request.POST.get('resolution_w'))\n trialresult.resolution_h = int(request.POST.get('resolution_h'))\n trialresult.save()\n return JsonResponse({'resultId': trialresult.pk})\n else:\n logger.error('Failed to store result.')\n raise Http404('Page not found.')", "def store_json_convertible_result(result: dict, filepath: str):\n\n with open(filepath, \"w\") as file:\n file.write(json.dumps(result, ensure_ascii=False, indent=3))", "def post(self, request):\n result = None\n print(\"RESULT API: \", request.data)\n task_exec_update = TaskExecutionResult.objects.get(\n id=request.data['context']['taskExecutionID']\n )\n try:\n if request.data['result'].lower() == \"pass\":\n result = apisettings.PASS\n if request.data['result'].lower() == \"fail\":\n result = apisettings.FAIL\n if request.data['result'].lower() == \"abort\":\n result = apisettings.ABORT\n\n task_exec_update.result = result\n task_exec_update.save(update_fields=['result'])\n Log.summary_task_result(context=request.data.get(\"context\"), result=request.data['result'])\n return Response(status=HTTP_200_OK)\n except Exception as e:\n logger = Log.get_logger(__name__)\n logger.exception(e)\n return Response(status=HTTP_400_BAD_REQUEST)", "def result(self, result: Item):\n\n self._result = result", "def postprocessRequest(self, retval, route):\n JSONed = False\n GZIPPED = False\n\n if retval is None:\n self.logger.warn(\"retval is None!\")\n return retval\n\n # Is this request under the a path we're enforcing JSON output for?\n if (route is not None and hasattr(route, 'rule') and route.rule.startswith(self.baseRulePath)) or response.status_code >= 400:\n # It is. Try to serialize the returned data as JSON\n self.logger.debug(\"response should be JSON\")\n\n # First, is the data even something we can serialize as JSON?\n # if the retval is not a dict, we don't know what to do with it, so just be transparent\n if type(retval) not in (dict, list):\n self.logger.error(\"\\033[41;1m You are trying to send the client data that doesn't look like it should be JSON (%s). Fix this! \\033[0m\" % type(retval))\n # TODO: consider raising an exception so as to generate a server error (500), forcing the app developer\n # to confront why/how they are sending back something that doesn't make much sense serializing as JSON\n else:\n # Was the \"pretty\" query parameter set?\n if request.query.get(\"pretty\") == 'true':\n # It was. Indent & sort keys\n self.logger.debug(\"found pretty query param, value is true, prettying JSON\")\n retval = json.dumps(retval, indent=4, sort_keys=True)\n else:\n # It was not. By default, we'll use the most compact representation\n retval = json.dumps(retval, separators=(',', ':'))\n response.content_type = \"application/json\"\n self.logger.debug(\"%d bytes of JSON created\" % len(retval))\n JSONed = True\n else:\n self.logger.debug(\"response should NOT be JSON\")\n\n # Gzipping the response\n # Can the client even handle gzipped response bodies?\n httpRespObj = None\n if isinstance(retval, bottle.HTTPResponse):\n # we'll keep the HTTPResponse so we can update it after gzipping.\n self.logger.debug(\"Found HTTPResponse instance\")\n httpRespObj = retval\n if type(retval.body) in (str, unicode):\n retval = retval.body\n elif hasattr(retval.body, \"read\"):\n retval = retval.body.read()\n else:\n self.logger.error(\"HTTPResponse.body attr is not a str and does not have a read() method!\")\n raise ValueError(\"HTTPResponse.body is not sane: attr is not a str, and is not a file-like object\")\n\n elif isinstance(retval, bottle.HTTPError):\n self.logger.debug(\"Found HTTPError instance\")\n httpRespObj = retval\n if type(retval.body) in (str, unicode):\n retval = retval.body\n elif hasattr(retval.body, \"read\"):\n retval = retval.body.read()\n else:\n self.logger.error(\"HTTPError.body attr is not a str and does not have a read() method!\")\n raise ValueError(\"HTTPError.body is not sane: attr is not a str, and is not a file-like object\")\n\n if 'gzip' in request.headers.get(\"Accept-Encoding\", \"\") and len(retval) > 0:\n self.logger.debug(\"client accepts gzip, gzipping data\")\n # the client handle gzipped data, so lets gzip out data\n self.logger.debug(\"original response data was %d bytes\" % len(retval))\n sio = StringIO.StringIO()\n gzFile = gzip.GzipFile(fileobj=sio, mode='wb', compresslevel=6)\n gzFile.write(retval)\n gzFile.close()\n sio.seek(0)\n retval = sio.read()\n sio.close()\n self.logger.debug(\"new gzipped response data is %d bytes\" % len(retval))\n GZIPPED = True\n\n # Were we given an HTTPResponse isntance? If so, we need to update it a bit\n if httpRespObj:\n self.logger.debug(\"Updating HTTPResponse instance with gzipped content, headers\")\n httpRespObj.body = retval\n httpRespObj['Content-Length'] = str(len(retval))\n httpRespObj['Content-Encoding'] = 'gzip'\n else:\n # update the content-length (it is already set) and add the content-encoding header\n response.set_header('Content-Length', str(len(retval)))\n response.set_header('Content-Encoding', 'gzip')\n else:\n self.logger.debug(\"client either doesn't accept gzip or there's no data to return; len(retval)=%d\" % len(retval))\n\n self.logger.info(\"RESPONSE %s gzipped:%s json:%s size:%dB\" % (response.status_code, GZIPPED, JSONed, len(retval)))\n if httpRespObj:\n return httpRespObj\n return retval", "def __call__(self, status_code, headers, body):\n self.result_obj['status_code'] = status_code\n self.result_obj['headers'] = dict(headers)\n self.result_obj['body'] = body\n \n return self.result_obj", "def __call__(self, status_code, headers, body):\n self.result_obj['status_code'] = status_code\n self.result_obj['headers'] = dict(headers)\n self.result_obj['body'] = body\n \n return self.result_obj", "def respond(self, obj):\r\n url = '{0}/{1}'.format(self.get_url(), 'respond')\r\n request = http.Request('PUT', url, {'response': obj})\r\n\r\n return request, parsers.parse_json", "def handle_rest_api_result(result):\n\n if (result.status_code < 200) or (result.status_code > 299):\n try:\n json_result = result.json()\n except ValueError:\n raise VMRayRESTAPIError(\"API returned error {}: {}\".format(result.status_code, result.text),\n status_code=result.status_code)\n\n raise VMRayRESTAPIError(json_result.get(\"error_msg\", \"Unknown error\"), status_code=result.status_code)", "def _store_result(self, task_id, result, state,\n traceback=None, request=None, **kwargs):\n self._get_connection(write=True)\n\n self._session.execute(self._write_stmt, (\n task_id,\n state,\n buf_t(self.encode(result)),\n self.app.now(),\n buf_t(self.encode(traceback)),\n buf_t(self.encode(self.current_task_children(request)))\n ))", "def serialize(self) -> dict:\n return {\n \"parameters\": self.parameters,\n \"results\": self.results,\n }", "def json_response( json_object ):\n return HttpResponse( json.dumps(json_object) )", "def post(self, id):\n try:\n json_data = request.get_json(force=True)\n if not json_data:\n return {'message': 'No input data provided'}, 400\n\n if id in results:\n results[id] += [json_data]\n else:\n results[id] = [json_data]\n return {'message': 'Result added successfully'}, 200\n\n except Exception as e:\n return {'message': 'Internal Server error', 'error': e}, 500", "def P_SendResult(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def handle_result(self, results: List[Dict], **info):\n pass", "def save_results(results):\n json.dump(results, open(\"results.json\", \"w\"))", "def success(self, result):\r\n raise NotImplementedError", "def print_json(res, ctx):\n\n return json.dumps(res)", "def _CompleteRequest(self, request_id, result):\n logging.info('Reaped %s, result = %r', request_id, result)\n completion_path = self._GetRequestPathname(request_id, self._COMPLETE)\n with open(completion_path, 'w') as f:\n pickle.dump(result, f)\n self._ClearRequest(request_id, self._RUNNING)", "def save_result(data, user_id):\n result = ResultModel(**data, user_id=user_id)\n result.save_to_db()\n user = UserModel.query.get(user_id)\n logging.info(f'User {user.username} scores {result.wpm} wpm.')\n return jsonify({}), 201", "def write_result_to_file(self):\n self.__test_result[Result.__RUN] = self.__run\n with open(self.__json_file_path, \"w+\") as outfile:\n json.dump(self.__test_result, outfile,\n ensure_ascii=False, indent=2)", "def _r_send_result(self, response, protocol):\n #print(\"Send result: %s\" % result)\n protocol.send_message(response)", "def result(self, result):\n print(result)", "def format_json(self,query_results):\n results=query_results.data\n factory=factory_json()\n dump=factory.dumps(results)\n print(dump)\n # TODO return output for this\n return \"\"", "def result(self): \n return self.body", "def write_response(self):\n response = {\n \"data\": self.data,\n \"errors\": self.errors,\n }\n self.write(json.dumps(response))", "def _post_process_result(result: Any) -> Any:\n return result", "def get_result(self) -> Any:\n ...", "def get(self, request):\r\n data = {\r\n 'results': 'THIS IS THE PROTECTED STRING FROM SERVER',\r\n }\r\n return Response(data, status=status.HTTP_200_OK)", "def __init__(self):\n self.result_obj = {}", "def __init__(self):\n self.result_obj = {}", "def __init__(self, result, data='', msg='', safe=True, status=200):\n\n content = {'code': result, 'data': data, 'msg': msg}\n super(ResultResponse, self).__init__(content, status=status, safe=safe)", "def _respond(self, request, response):\n request.respond(200, {\"Content-Type\": \"application/json\"}, JSON.stringify(response))", "def push(self):\n result = self.get_result_dict()\n headers = {\n 'Authorization': self.token\n }\n response = requests.post(\n url, json=json.dumps(result, indent=4), headers=headers\n )\n return response.json()", "def set_result(self, result):\n self._result = result\n self._set_done()", "def make_dict(result):\n response = dict()\n response.update(\n {\n 'url': result.get('Url', None),\n 'title': result.get('Title', None),\n 'description': result.get('Description', None),\n 'card_type': 1,\n 'icon_url': None,\n 'provider_icon_url': None,\n 'action_type': 1,\n })\n return response", "def save_result(result, file_name='result.txt', formatted=False):\n print \"Save result into %s\" % file_name\n with open(file_name, 'w') as file_:\n for product_name in result:\n if formatted:\n file_.write(json.dumps({\n 'product_name' : product_name,\n 'listings' : result[product_name]\n }, sort_keys=True, indent=2))\n else:\n file_.write(json.dumps({\n 'product_name' : product_name,\n 'listings' : result[product_name]\n }))", "def rest_api_request_handler(self, request_type):\n result = {}\n success_code = 0\n with self.resource_lock:\n if request_type == self.RestRequest.REST_MUTS:\n result = self.muts # Returns MUTs\n elif request_type == self.RestRequest.REST_TEST_SPEC:\n result = self.test_spec # Returns Test Specification\n elif request_type == self.RestRequest.REST_TEST_RESULTS:\n pass # Returns test results\n else:\n success_code = -1\n return json.dumps(self.get_rest_result_template(result, 'request/' + request_type, success_code), indent=4)", "def on_result(self, result):\n # we create a self.results list to store the results as they come back from the process() method\n self.results.append(result)", "def SaveResult(self, result, result_path):\n\n # Create / get results file in the local directory \"\"\"\n db = self.GetResultFile()\n\n if db is not None:\n debug('result : {}'.format(result))\n\n # Check if file exists\n try:\n o = db[str(result_path + '/' + result.GetName())]\n except KeyError:\n print(\"No object in {}/{}\".format(result_path, result.GetName()))\n o = None\n\n if o is not None:\n print(\"Replacing {}/{}\".format(result_path, result.GetName()))\n del o\n\n db[str(result_path + '/' + result.GetName())] = result\n if db[str(result_path + '/' + result.GetName())] is not None:\n print(\"+++result {}/{} adopted\".format(result_path, result.GetName()))\n\n else:\n error(\"Could not adopt result {}\".format(result.GetName()))\n db.close()\n return\n\n else:\n error(\"Error creating result file\")\n db.close()\n return\n\n db.close()", "def serialize(self):\n return json.dumps(self.request_data)", "def __call__(self, request):\n\n if request.method == 'POST':\n response = self.process_request(request)\n\n else:\n response = self.get_smd(request.get_full_path())\n\n return json.dumps(response)", "async def send_result(\n self,\n rpc_message: RpcMessage,\n result_message: ResultMessage,\n return_path: str,\n bus_client: \"BusClient\",\n ):\n raise NotImplementedError()", "def store_result(result: dict, filepath: str):\n\n raise NotImplementedError", "def __call__(self, rv):\n if isinstance(rv, ResponseBase):\n return rv\n data, status, headers = unpack(rv)\n resp = flask.make_response(self._encoder(data, **self.json_settings),\n status, {'Content-Type': self.content_type})\n resp.headers.extend(headers)\n return resp", "def set_result(uid, obj, progress=0):\n if isinstance(obj, tuple) and isinstance(obj[1], Exception):\n # Wrap the tb so it can be transported and re-raised.\n et, ev, tb = obj\n obj = (et, ev, Traceback(tb))\n obj = dill.dumps(obj)\n result = {\n 'uid': uid,\n 'obj': obj,\n 'ts': time.time(),\n 'progress': progress,\n }\n cache = caches[settings.FUTURES_CACHE_BACKEND]\n cache.set('futures:%s' % uid, result, settings.FUTURES_CACHE_TTL)", "def new_result():\n return ActionResult()", "def handle_request(self, given_request: Request):\n with open(request.output, mode=\"w\", encoding='utf-8') as file:\n file.write(request.result)\n return True", "def JSONResponse(payload):\n return HttpResponse(json.dumps(payload), mimetype='application/json')", "def result_message(iden: int, result: Any = None) -> dict[str, Any]:\n return {\"id\": iden, \"type\": const.TYPE_RESULT, \"success\": True, \"result\": result}", "def to_http_response(self) -> HttpResponse:\n response = (\n JsonResponse(self.body)\n if (self.headers or {}).get(\"Content-Type\") == \"application/json\"\n else HttpResponse(self.body)\n )\n response.headers = self.headers\n return response", "def result(self, result: osbuild.pipeline.BuildResult):", "def send_reply(self, (request, result)):\n if not 'callback' in request.args:\n return self.send_json(request, result)\n else:\n return self.send_jsonp(request.args['callback'][0], request, result)", "def post(self):\n\n json_data = json.loads(self.request.body)\n response = PluginHelper.request_post(json_body=json_data)\n if (\n response.status_code == 200\n and response.json()[\"status\"] == \"ok\"\n ):\n result_json = {\n \"results\": response.json()[\"results\"],\n }\n else:\n raise exc.BadRequest(\"Bad host query: {}\".format(\n self.request.body\n ))\n\n self.success(result_json)" ]
[ "0.72616404", "0.7128404", "0.66393524", "0.6588018", "0.65028614", "0.6434777", "0.63020784", "0.62061685", "0.6200206", "0.6102137", "0.60957694", "0.608075", "0.591243", "0.58815205", "0.5878156", "0.5844654", "0.58311284", "0.5830024", "0.5819786", "0.57798034", "0.57742906", "0.5771639", "0.5757208", "0.5757208", "0.57488954", "0.57435054", "0.571782", "0.571782", "0.5715872", "0.56778675", "0.56488276", "0.56447804", "0.561163", "0.5596351", "0.5579025", "0.5578498", "0.5569997", "0.5569997", "0.55283", "0.5527402", "0.55206996", "0.549895", "0.54783726", "0.5455365", "0.5452009", "0.54436064", "0.5435623", "0.54320353", "0.5424685", "0.5414896", "0.54137665", "0.5406951", "0.5406951", "0.54040414", "0.54023653", "0.54016435", "0.53998154", "0.5392907", "0.53859", "0.5366757", "0.5362433", "0.5360627", "0.53578746", "0.5353447", "0.53468746", "0.53442854", "0.53422225", "0.5323314", "0.53177136", "0.53082985", "0.5306818", "0.5305151", "0.53040236", "0.53034645", "0.5285567", "0.5265151", "0.5265151", "0.5252538", "0.52492756", "0.5248196", "0.52437705", "0.5240873", "0.52308536", "0.5230446", "0.52234447", "0.5216755", "0.521551", "0.5213697", "0.52042365", "0.5201437", "0.51998293", "0.51982117", "0.5187561", "0.5185343", "0.5181857", "0.51755124", "0.51717436", "0.5165391", "0.516001", "0.5155233" ]
0.6903042
2
Maps a L{CODE} constant to a HTTP code.
def _mapErrorCodeToStatus(code): if code == 103: return http.NOT_FOUND return http.INTERNAL_SERVER_ERROR
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_code():\n return jsonify({\"status\": \"0\", \"code\": code_status})", "def setResponseCode(code, message=None):", "def send_code(self, code: str) -> Dict:\n raise NotImplementedError", "async def with_code_header():\n return jsonify(language=request.headers.get(\"Lang\")), 203, {\"X\": 233}", "def set_code(self, code):\n self.set_payload(code)", "def _get_request_code(self, data) -> int:\n return int(self._request_code)", "def code(self, code: int):\n\n self._code = code", "def http_return_code(res_data) -> (int, str):\n\n start = re.search(\"[0-9]{3}\", res_data).start()\n end_of_line = res_data.find(\"\\r\\n\")\n code = int(res_data[start:start+3])\n if end_of_line == -1:\n end_of_line = len(res_data)\n meaning = res_data[start+4:end_of_line]\n return code, meaning", "def reply_with_code(self, code: int) -> None:", "def code(self, value: str) -> None:\n self._code = value", "def assign_message_code(success: bool):\n return (HTTPStatus.OK.phrase, HTTPStatus.OK) if success\\\n else (HTTPStatus.INTERNAL_SERVER_ERROR.phrase, HTTPStatus.INTERNAL_SERVER_ERROR)", "def code(self, code):\n if code is None:\n raise ValueError(\"Invalid value for `code`, must not be `None`\")\n\n self._code = code", "def ircode(self, code):\n if code.lower() in self.codes:\n self._sendCommand('IRCODE ' + self.codes[code.lower()])\n else:\n print 'No such code: %s' % code", "def code(self, code: str):\n\n self._code = code", "def code(self, code: \"str\"):\n if code is None:\n raise ValueError(\"Invalid value for `code`, must not be `None`\")\n self._attrs[\"code\"] = code", "def view_status_code(codes):\n\n if \",\" not in codes:\n try:\n code = int(codes)\n except ValueError:\n return Response(\"Invalid status code\", status=400)\n return status_code(code)\n\n choices = []\n for choice in codes.split(\",\"):\n if \":\" not in choice:\n code = choice\n weight = 1\n else:\n code, weight = choice.split(\":\")\n\n try:\n choices.append((int(code), float(weight)))\n except ValueError:\n return Response(\"Invalid status code\", status=400)\n\n code = weighted_choice(choices)\n\n return status_code(code)", "def get_code(self):\n return self.code", "def get_code(self):\n return self.code", "def get_code(self):\n return self.code", "def get_code(self):\n return self.code", "def set_status( code ):", "def code(self, code):\n\n self._code = code", "def code_to_name(code):\n upper_code = code.upper()\n if upper_code in code_dict:\n return code_dict[upper_code]\n else:\n return code", "def update_code(self):\n print ('update code')\n self.query_dict.update({'code':code.value})", "def decode(self, code):\n raise NotImplementedError", "def _putCode(self, code):\n assert(type(code) == int)\n self.code[self.codeptr] = code\n self.codeptr += 1", "def code(self):\n\t\treturn self.status_code", "def get_status_code(status):\n return dict(const.STATUS_CODES).get(status)", "def response(code):\n\n def decorator(func):\n func.wsgi_code = code\n return func\n return decorator", "def map_marital_status(code):\n status = MaritalStatus\n mapping = {\n \"MARRIED\": status.Married.value,\n \"SINGLE\": status.Unmarried.value,\n \"WIDOWED\": status.Widowed.value,\n \"SEPARATED\": status.LegallySeparated.value,\n \"DIVORCED\": status.Divorced.value,\n \"UNKNOWN\": status.Unknown.value,\n }\n if code in mapping.keys():\n return mapping[code]\n elif utils.is_empty(code):\n return status.Unknown.value\n else:\n logging.warning(\"In {}, args {} not recognised\".format(\"marital_status\", code))\n return status.Unknown.value", "def getResponseCode(self) -> int:\n ...", "def setResponseCode(self, code, message=None):\n assert not self.written, \"Response code cannot be set after data has been written: %s.\" % \"@@@@\".join(self.written)\n self.responseCode = code\n self.responseMessage = message", "def update_code(self, new_code):\n\n self.code = new_code", "def update_code(self, new_code):\n\n self.code = new_code", "def http_redirect_code(self) -> typing.Optional[str]:\n return self._values.get('http_redirect_code')", "def update_code(self, new_code):\n\n new_code = self.code", "def CODE(string):\n return ord(string[0])", "def add_code(self, code):\n self.code += code", "def get_status_code(self, ttype, status_val) -> str:\n # get the status code from __status_code or __default_code\n pass", "def get_status_code(self, status_line):\n try:\n return int(status_line.split(' ')[1])\n except ValueError:\n return 400\n except IndexError:\n return 404", "def gather_http_status_code(self):\n\n if self.status.ipv6_syntax_validation:\n self.status.http_status_code = PyFunceble.lookup.HTTPCode(\n self.subject, \"ipv6\"\n ).get()\n else:\n self.status.http_status_code = PyFunceble.lookup.HTTPCode(\n self.subject, self.subject_type\n ).get()", "def get_opcode(self, code):\r\n opcode = int(str(code)[-2:])\r\n return opcode", "def status_code(self):\n return int(self.status.split()[1])", "def test_find_codes(session, client, jwt, app, code: Code):\n rv = client.get(f'/api/v1/codes/{code.value}', headers={})\n assert rv.status_code == 200", "def add_code(self, id, code):\n self.codes[id] = code", "def update_code(self, new_code):\n self.code = new_code\n\n # Fill in the rest", "def __get_reply_code(self, reply_code_str):\n if reply_code_str in self.__reply_codes:\n return self.__reply_codes[reply_code_str]", "def update_code(self, new_code):\n\n # Fill in the rest\n self.code = new_code", "def update_code(self, new_code):\n\n # Fill in the rest\n self.code = new_code", "def code(self) -> int:\n return self._code", "def code(self) -> int:\n return self._code", "def code(self) -> Optional[str]:\n return pulumi.get(self, \"code\")", "def code(self) -> Optional[str]:\n return pulumi.get(self, \"code\")", "def code(self) -> Optional[str]:\n return pulumi.get(self, \"code\")", "def code(self) -> Optional[str]:\n return pulumi.get(self, \"code\")", "def add_status_code(code):\n def class_decorator(cls):\n cls.status_code = code\n return cls\n return class_decorator", "def exchange_code(self, code):\n data = {\n 'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'grant_type': 'authorization_code',\n 'code': code,\n 'redirect_uri': self.redirect_uri,\n 'scope': 'identify'\n }\n\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n\n access_token = self.http_client.post(\n f'{self.api_endpoint}/oauth2/token', headers, data=data)\n return access_token", "def fetch_code(url):\n status, response = http_request(url)\n\n if status != 200:\n writer(\n f\"\\nError: HTTP status {status} returned, 200 expected\\n - {url}\\n\",\n FORMAT[\"ERROR\"]\n )\n sys.exit(1)\n\n code_type = classify_response(response)\n\n return response, code_type", "def get_int_code(self):\n if not self.code:\n return 0\n\n if isinstance(self.code, int):\n return self.code\n\n tmp = str(self.code)\n char = tmp[0]\n if char == '*' or char == '%':\n return 0\n res = ''\n\n index = 0\n while char.isdigit() and index <= len(tmp) - 1:\n res += char\n index += 1\n if index < len(tmp):\n char = tmp[index]\n return int(res)", "def status_code(self):\r\n return int(self._status[:3])", "def http_response(status_code: int) -> Tuple[dict, int]:\n return ({'message': HTTP_STATUS_CODES.get(status_code, '')}, status_code)", "def get_response_status(response_code):\n if is_success(response_code):\n return 'success'\n return 'error'", "def _send_code(self, phone, code, case):\n raise NotImplementedError", "def code(self):\n return self._getCode()", "def set_HttpRedirectCode(self, value):\n super(PutBucketWebsiteRedirectInputSet, self)._set_input('HttpRedirectCode', value)", "def returnCode(code):\n context = {}\n if code == 101:\n context = {\n 'code': 101,\n 'message': 'login success',\n }\n elif code == 102:\n context = {\n 'code': 102,\n 'message': 'error in username or password',\n }\n elif code == 103:\n context = {\n 'code': 103,\n 'message': 'username or password is null',\n }\n elif code == 104:\n context = {\n 'code': 104,\n 'message': 'registration error',\n }\n elif code == 105:\n context = {\n 'code': 105,\n 'message': 'registration success',\n }\n elif code == 106:\n context = {\n 'code': 106,\n 'message': 'username already exists',\n }\n elif code == 107:\n context = {\n 'code': 107,\n 'message': 'save user exception, please contact manager',\n }\n elif code == 108:\n context = {\n 'code': 108,\n 'message': 'friend information exception',\n }\n elif code == 109:\n context = {\n 'code': 109,\n 'message': 'friend information success',\n }\n elif code == 111:\n context = {\n 'code': 111,\n 'message': 'user information exception',\n }\n elif code == 112:\n context = {\n 'code': 112,\n 'message': 'quasi through success',\n }\n elif code == 113:\n context = {\n 'code': 113,\n 'message': 'quasi refused success',\n }\n elif code == 114:\n context = {\n 'code': 114,\n 'message': 'quasi operating error',\n }\n elif code == 115:\n context = {\n 'code': 115,\n 'message': 'edit my head photo success',\n }\n elif code == 116:\n context = {\n 'code': 116,\n 'message': 'edit my head photo error',\n }\n elif code == 117:\n context = {\n 'code': 117,\n 'message': 'edit my password error',\n }\n elif code == 118:\n context = {\n 'code': 118,\n 'message': 'old password error in edit my password',\n }\n elif code == 119:\n context = {\n 'code': 119,\n 'message': 'edit my password success',\n }\n else:\n context = {\n 'code': 999,\n 'message': 'system error!!! please contact manager, 3Q',\n }\n return context", "def error_code(self) -> CustomErrorCode:\n enforce(self.is_set(\"error_code\"), \"'error_code' content is not set.\")\n return cast(CustomErrorCode, self.get(\"error_code\"))", "def status_code(self):\n return int(self._status[:3])", "def code(self) -> str:\n return pulumi.get(self, \"code\")", "def code(self) -> str:\n return pulumi.get(self, \"code\")", "def code(self) -> str:\n return pulumi.get(self, \"code\")", "def code(self):\n return self._code", "def code(self):\n return self._code", "def code(self):\n return self._code", "def code(self):\n return self._code", "def code(self):\n\n return self._code or self._default_code", "def count_response_codes():\n code = request.args.get('code', 200)\n log_lines = request.args.get('log_lines')\n\n if log_lines:\n lines_list = json.loads(log_lines)\n count = count_by_code(lines_list, code)\n else:\n count = 0\n\n response = str(count)\n return response", "def code(self) -> \"str\":\n return self._attrs.get(\"code\")", "def process_option_code(self, code, option):\n return code", "def find_key_code(self, data):\n match = re.match(self.code_re, data)\n if match:\n return match.groups()[0]\n return None", "def response_code(self, response_code):\n\n self._response_code = response_code", "def response_code(self, response_code):\n\n self._response_code = response_code", "def resolvablehttpcode(httpcode):\n resolvable = False\n validhttpcodesstart = [\"2\", \"3\", \"400\", \"403\", \"5\"]\n for code in validhttpcodesstart:\n if httpcode.startswith(code):\n resolvable = True\n logging.debug('Resolvable, HTTP code: ' + httpcode)\n if resolvable == False:\n logging.debug('NOT resolvable, HTTP code: ' + httpcode)\n return resolvable", "def test_find_code(session, client, jwt, app):\n rv = client.get('/api/v1/codes/errors', headers={})\n code = rv.json.get('codes')[0].get('type')\n rv = client.get(f'/api/v1/codes/errors/{code}', headers={})\n assert rv.json.get('type') == code", "def get(self):\r\n self.code = self.code.replace(\"PORT\", str(self.port))\r\n\r\n # Apply xor encoding.\r\n self.code = self.code if self.args.xor is 0 else xor_wrapper(self.name, self.code, self.args)\r\n\r\n # Apply base64 encoding.\r\n self.code = base64_wrapper(self.name, self.code, self.args)\r\n\r\n # Apply url-encoding\r\n if self.args.urlencode is True:\r\n self.code = to_urlencode(self.code)\r\n \r\n return self.code", "def count_by_code(_input, code):\n log_parser = ApacheLogParser(_input)\n count = log_parser.count_response_codes(code)\n return count", "def resolve_code(obj, _):\n return obj.code.decode()", "def _validate_code(self, key, code):\n \n if code is None:\n code = self.name\n \n if not isinstance(code, (str, unicode)):\n raise TypeError(\"Sequence.code should be an instance of str or \"\n \"unicode, not %s\" % type(code))\n \n code = Project._condition_code(code)\n \n return code", "def check_for_get_code(self, code, url):\r\n resp = self.client.get(url)\r\n self.assertEqual(resp.status_code, code,\r\n \"got code %d for url '%s'. Expected code %d\"\r\n % (resp.status_code, url, code))\r\n return resp", "def _get_status_code(response: Response) -> int:\n status_code = response.status_code\n if isinstance(status_code, HTTPStatus):\n return status_code.value\n else:\n return status_code", "def response_code(self,code,argument):\n\t\tresponse_code = f\"{code!s} {argument}\\r\\n\"\n\t\tself.wfile.write(bytes(response_code,\"ascii\"))", "def code(self) -> Optional[pulumi.Input[Union[str, 'Code']]]:\n return pulumi.get(self, \"code\")", "def exchange_code(self, code):\n params = {'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'grant_type': 'authorization_code',\n 'code': code}\n result = self._send_request(EXCHANGE_URL, params=params, method='POST',\n data_field=None)\n self.access_token = result['access_token']\n self.refresh_token = result['refresh_token']\n return self.access_token, self.refresh_token", "def code(self):\n return self.m_errorCode", "def _get_response_message(code=200, reason=None):\n return {'reason': reason}, code", "def map_event_code(event_code):\n event_code = int(event_code)\n\n # Honestly, these are just guessing based on the below event list.\n # It could be wrong, I have no idea.\n if 1100 <= event_code <= 1199:\n return ALARM_GROUP\n\n if 3100 <= event_code <= 3199:\n return ALARM_END_GROUP\n\n if 1300 <= event_code <= 1399:\n return PANEL_FAULT_GROUP\n\n if 3300 <= event_code <= 3399:\n return PANEL_RESTORE_GROUP\n\n if 1400 <= event_code <= 1499:\n return DISARM_GROUP\n\n if 3400 <= event_code <= 3799:\n return ARM_GROUP\n\n if 1600 <= event_code <= 1699:\n return TEST_GROUP\n\n if 5000 <= event_code <= 5099:\n return CAPTURE_GROUP\n\n if 5100 <= event_code <= 5199:\n return DEVICE_GROUP\n\n if 5200 <= event_code <= 5299:\n return AUTOMATION_GROUP\n\n if 6000 <= event_code <= 6100:\n return ARM_FAULT_GROUP\n\n return None", "def __dec_status(self, status_code):\n ret = self.status_codes.get(status_code)\n if ret == None:\n return \"Unknown\"\n else:\n return ret", "def gen_estring(ecode):\n ec=atoi(str(ecode))\n if BaseHTTPRequestHandler.responses.has_key(ec):\n return \"HTTP/1.1 %s %s\" %(ec, BaseHTTPRequestHandler.responses[ec][0])\n else:\n return \"HTTP/1.1 %s\" %(ec)", "def match(code, x):\n return decode(code)(x)", "def gen_headers(self, http_code):\n\n if http_code == 200:\n http_headers = \"HTTP/1.1 200 OK\\n\"\n elif http_code == 400:\n http_headers = \"HTTP/1.1 400 Bad Request\\n\"\n elif http_code == 404:\n http_headers = \"HTTP/1.1 404 Not Found\\n\"\n\n utc_datetime = datetime.datetime.utcnow().strftime(\"%a, %d %b %Y %H:%M:%S\")\n http_headers += dedent(\"\"\"\\\n Date: %s GMT\n Content-type: text/html; charset=UTF-8\n Server: pydb.py\n Connection: close\\n\\n\"\"\" % utc_datetime)\n\n return http_headers" ]
[ "0.6723069", "0.6591856", "0.65714204", "0.6495424", "0.6402399", "0.63924485", "0.6334081", "0.6325058", "0.6201443", "0.6191107", "0.616254", "0.616247", "0.6142737", "0.61202556", "0.6110832", "0.6099238", "0.6080944", "0.6080944", "0.6080944", "0.6080944", "0.60740554", "0.59960765", "0.5989822", "0.59863806", "0.5973325", "0.5954982", "0.5953482", "0.5953162", "0.5900504", "0.5864886", "0.5822019", "0.58166504", "0.579222", "0.579222", "0.5781725", "0.57686335", "0.57655495", "0.5756831", "0.5750795", "0.5743373", "0.57376814", "0.5725441", "0.57233745", "0.5722698", "0.5719877", "0.57117015", "0.5707604", "0.5704654", "0.5704654", "0.56705904", "0.56705904", "0.5669924", "0.5669924", "0.5669924", "0.5669924", "0.5669696", "0.5654988", "0.56258935", "0.56207824", "0.5616333", "0.56119686", "0.56107366", "0.5610184", "0.56087", "0.5602819", "0.55988973", "0.55975115", "0.5594963", "0.55917335", "0.55917335", "0.55917335", "0.55753493", "0.55753493", "0.55753493", "0.55753493", "0.5548235", "0.552061", "0.5519804", "0.55147564", "0.5510298", "0.5510106", "0.5510106", "0.5493785", "0.54909384", "0.54904395", "0.54723203", "0.5460473", "0.5454337", "0.5447886", "0.54451495", "0.54440016", "0.54331434", "0.54275197", "0.54253733", "0.5419027", "0.5405248", "0.53997505", "0.5381626", "0.5380431", "0.53757405" ]
0.77379066
0
Serializes a L{Failure} to JSON and writes it to the C{request}
def _writeJSONErrorResponse(f, request): code = getattr(f.value, 'code', CODE.UNKNOWN) _writeJSONResponse( result=f.getErrorMessage().decode('ascii'), request=request, code=code, status=_mapErrorCodeToStatus(code)) raise f
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def failure(self, validation_failure):\n \n self.request.response.status_int = 400\n return validation_failure.error.asdict()", "def failure(self, error, rc, msg):\n self.module.fail_json(msg=msg, rc=rc, err=error)", "def _FailureResponse(args_dict=None):\n if args_dict is None:\n args_dict = {}\n args_dict[\"code\"] = \"Fail\"\n return CGateway._DumpResponse(args_dict)", "def serialize_error(success, object, reason):\n\n return json.dumps({\"success\": success, \"object\": object, \"status\": reason}, indent=2, sort_keys=True)", "def make_json_error(ex):\n if isinstance(ex, HTTPException):\n return ex;\n elif isinstance(ex, ResourceException):\n info = ex.to_dict()\n status_code = ex.http_status\n info[\"type\"] = \"exception\"\n else:\n message = \"There was an internal server error. Please try again later.\"\n info = {\"code\": \"internal_server_error\", \"message\": message, \"type\": \"exception\"}\n status_code = 500\n # generally we should log these 500 errors with the stacktrace somewhere -- we used splunk at Box.\n\n response = jsonify(**info)\n response.status_code = status_code\n return response", "def on_response_validation_error(err):\n return jsonify(message='Bad response'), 500", "def testFailureReturnsInternalErrorCode(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'fail',\n 'params': {}})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n result = yield resource.deferred_render_POST(request)\n response = loads(result)\n self.assertEqual({'code': JSONRPC_INTERNAL_ERROR,\n 'message': 'Internal error.'},\n response['error'])\n self.assertIn('exceptions.RuntimeError', self.log.getvalue())", "def failure(self, error):\n \n self.request.response.status_int = 400\n return None", "def failure(reason):\n return BencodedResponse(\n {\n 'failure reason': reason\n }\n )", "def jsonify_exception(error: HTTPException) -> Response:\n exc_resp = error.get_response()\n response: Response = jsonify(reason=error.description)\n response.status_code = exc_resp.status_code\n return response", "def log_failure(self, obj, message):\n super().log_failure(obj=obj, message=message)", "def set_failed(self, exception):\n self.logger.info(\"status: FAILED\")\n self._callback('on_failed', exception)\n return self.update_response(self.encoder.encode_failed(exception))", "def json_err(msg: str) -> Response:\n return jsonify({\"success\": False, \"error\": msg})", "def testSimpleFailingMethodLogsTheResponsePayload(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'fail',\n 'params': {}})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n yield resource.deferred_render_POST(request)\n self.assertIn('Response payload: ', self.log.getvalue())", "def handle_invalid_usage(error):\n\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response", "def _error_response(self):\r\n response_dict = {'success': False, 'version': 1}\r\n self.send_response(\r\n 400, content=json.dumps(response_dict),\r\n headers={'Content-type': 'application/json'}\r\n )", "def handle_invalid_usage(error):\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response", "def handle_invalid_usage(error):\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response", "def handle_invalid_usage(error):\n logging.warn(error.message)\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response", "def testSimpleFailingMethodReturnsErrorWithCodeAndMessage(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'fail',\n 'params': {}})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n result = yield resource.deferred_render_POST(request)\n response = loads(result)\n self.assertTrue('code' in response['error'])\n self.assertTrue('message' in response['error'])", "def testSimpleFailingMethodLogsJSONRPCError(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'fail',\n 'params': {}})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n yield resource.deferred_render_POST(request)\n self.assertIn('JSON RPC error.', self.log.getvalue())", "def http_exception(error):\n data = {'error': str(error)}\n return app.response_class(\n response=json.dumps(data),\n status=error.code,\n mimetype='application/json'\n )", "def response_error(error, status=400):\n\n response = {\n 'status': 'failed',\n 'error': error\n }\n\n return response_json(response, status=400)", "def renderHTTP_exception(request, failure):", "def fail_json(*args, **kwargs):\n kwargs['failed'] = True\n raise AnsibleFailJson(kwargs)", "def test_add_failure_details(self):\n self.protocol.addFailure(\n self.test, details=self.sample_tb_details)\n self.assertThat([\n compat._b((\"failure: %s [ multipart\\n\"\n \"Content-Type: text/plain\\n\"\n \"something\\n\"\n \"F\\r\\nserialised\\nform0\\r\\n\"\n \"Content-Type: \"\n \"text/x-traceback;charset=utf8,language=python\\n\"\n \"traceback\\n\" + _remote_exception_str_chunked +\n \"]\\n\") % self.test.id()),\n compat._b((\"failure: %s [ multipart\\n\"\n \"Content-Type: text/plain\\n\"\n \"something\\n\"\n \"F\\r\\nserialised\\nform0\\r\\n\"\n \"Content-Type: \"\n \"text/x-traceback;language=python,charset=utf8\\n\"\n \"traceback\\n\" + _remote_exception_str_chunked +\n \"]\\n\") % self.test.id()),\n ],\n matchers.Contains(self.io.getvalue())),", "def push_failure():\n\n response.view = 'generic.json'\n fail_file = api_utils.get_failed_push_filepath(request)\n if os.path.exists(fail_file):\n try:\n blob = read_as_json(fail_file)\n except:\n blob = {'message': 'could not read push fail file'}\n blob['pushes_succeeding'] = False\n else:\n blob = {'pushes_succeeding': True}\n return json.dumps(blob)", "def test_bad_error(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['errors'] = [\n {\n 'code': '1',\n 'name': 'error-name-one',\n 'title': 'Error Name',\n 'description': 'Error Description',\n 'category': 'data'\n }\n ]\n json_data = {\n 'manifest': manifest,\n 'configuration': self.configuration\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertFalse(results['is_valid'])\n self.assertEqual(len(results['errors']), 1)\n self.assertEqual(results['errors'][0]['name'], 'JSON_VALIDATION_ERROR')", "def write_error(self, status_code, exc_info, **kwargs):\n response = {\n \"data\": None,\n \"errors\": [ str(exc_info[1]) ]\n }\n\n self.set_status(status_code)\n self.write(json.dumps(response))", "def mark_failed(self):\r\n self.require_item()\r\n\r\n url = '{0}/mark_failed'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n\r\n return request, parsers.parse_empty", "def get_failure_object(msg):\n\n return {\"status\": \"FAILED\", \"error_msg\": msg}", "def internal_error(error):\n return jsonify({'error': \"Internal Server Error. \"\n \"Bitte die Logdatei für Details anschauen.\"}), 500", "def failure_detail(self) -> 'outputs.FailureDetailResponse':\n return pulumi.get(self, \"failure_detail\")", "def _Error(message):\n return json.dumps({\n 'success': False,\n 'error': message,\n })", "def handle_api_exception(error):\n response = flask.jsonify(error.to_dict())\n response.status_code = error.status_code\n return response", "def handle_error(error):\n response = jsonify(error.to_dict())\n response.status_code = error.status_code\n return response", "def _fail(self, msg, err=None):\n if self.session:\n self.session.cleanup()\n\n if err:\n self.module.fail_json(msg=msg + \"\\n\" + str(err), **self.result)\n else:\n self.module.fail_json(msg=msg, **self.result)", "def generic_errors(error, code):\n errors = {}\n errors[\"error\"] = error\n response = jsonify(errors)\n response.status_code = code\n return response", "def testSimpleFailingMethodLogsTheRequestPayload(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'fail',\n 'params': {}})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n yield resource.deferred_render_POST(request)\n self.assertIn('Request payload: ', self.log.getvalue())", "def jsonify_http_exception(exception: HTTPException):\n return jsonify(exception.description, exception.code)", "def log_failure(self, request):\n self.log_file.write(self.TYPE_FAILURE + \",%f,,,%f,,\\n\" %\n (float(request.resources[0]['amount']),\n float(request.offer)))", "def json_error(message):\n return json_response(isError=True, message=message)", "def gen_http_error(self, status, msg):\n self.clear()\n self.set_status(status)\n self.write(json.dumps(dict(error=str(msg))))\n self.finish()", "def _reportError(self, failure):\r\n self._connection.reportError(failure.getErrorMessage())", "def bad_request(self, error):\n return jsonify({'error': 'BAD REQUEST'}), 400", "def test_add_failure(self):\n self.protocol.addFailure(\n self.test, pysubunit.RemoteError(compat._u(\"boo qux\")))\n self.assertEqual(\n self.io.getvalue(),\n compat._b(\n ('failure: %s [\\n' +\n _remote_exception_str + ': boo qux\\n]\\n')\n % self.test.id()))", "def invalid_response():\n return Response(\n '{\"error\": \"Invalid request\"}',\n status=400,\n mimetype='application/json'\n )", "def _response_failure(self, failure, msgID):\r\n if not self._status:\r\n # Can not help it if the response takes some time and in the mean\r\n # time the interface is disabled; therefore, don't raise an error\r\n # instead just skip sending the response\r\n return\r\n\r\n # TODO: Return something useful to the cloud here!\r\n print('Service call failed.')", "def internal_server_error(error):\n return flask.jsonify({\"error\": \"Internal Server Error\"}), 500", "def resource_bad_request(error_msg):\n return jsonify(error=str(error_msg))", "def AsJson(self):\n\n return json.dumps(self._errors)", "def on_failure(self, exc: BaseException) -> None:", "def assertJSONError(self, response, exception):\n # first of all, try to find the exception\n full_exception_name = exception + 'Exception'\n exception_cls = getattr(exceptions, full_exception_name)\n self.assertStatus(response, exception_cls.status)\n\n expected = {'code': exception,\n 'message': exception_cls.message}\n self.assertJSON(response, expected)\n return", "def write_error(error):\n data = {\n 'result':'error',\n 'error':unicode(error),\n }\n key = _get_key()\n key.set_contents_from_string(json.dumps(data))", "def _err_response(msg):\r\n return HttpResponse(json.dumps({'success': False, 'error': msg}),\r\n mimetype=\"application/json\")", "def error_response(error_text):\n return Response(json.dumps({'error' : error_text}), status=404, mimetype='application/json')", "def record_failure(self, now=None) -> None:\n logging.info('Recording failure at %r', now or int(time.time()))\n self.failure_timestamp = now or int(time.time())\n self.put()", "def write_response(self):\n response = {\n \"data\": self.data,\n \"errors\": self.errors,\n }\n self.write(json.dumps(response))", "def failure(self):\n self.logger.debug(\"Logging failure for %s\", self.key)\n self.failures = self.driver.failure(self.key)", "def return_json_error(msg, status_code):\n return Response(response=json.dumps({'message': str(msg)}), status=status_code, mimetype=\"application/json\")", "def test_failed_job(self):\n failed_job = json.loads(BASE_JSON % (FAILURE, 1433166610, 1, 1433166609))[0]\n self.assertEquals(self.query_api.get_job_status(failed_job), FAILURE)", "def internal_server_error(error_msg):\n return jsonify(error=str(error_msg))", "def test_error_no_json(self, app, data_queues, metricsmock):\n res = self._call(app, \"\\xae\", method=\"post\", status=400)\n detail = \"JSONDecodeError('Expecting value: line 1 column 1 (char 0)')\"\n self.check_response(data_queues, res, \"parse_error\", details={\"decode\": detail})\n metricsmock.assert_incr_once(\n self.metric_type + \".request\", tags=[self.metric_path, \"key:test\"]\n )", "def indicate_failure(self):\n pass", "def internal_error_400(error):\n return jsonify({'error':\n \"Die Anfrage wurde syntaktisch falsch erstellt.\"}), 400", "def json_response(f):\n \n def wrapped(*args, **kwargs):\n result = f(*args, **kwargs)\n \n response = HttpResponse(json.dumps(result))\n \n if type(result) == dict and \"error\" in result:\n response.status_code = 500\n \n \n return response", "def bad_request(error):\n return jsonify({\n 'success': False,\n 'error': STATUS_BAD_REQUEST,\n 'message': ERROR_MESSAGES[STATUS_BAD_REQUEST]\n }), STATUS_BAD_REQUEST", "def failure(self) -> 'outputs.EndConditionResponse':\n return pulumi.get(self, \"failure\")", "def failure(self, result):\r\n raise NotImplementedError", "def internal_server_error(error):\n return jsonify({\n 'success': False,\n 'error': STATUS_INTERNAL_SERVER_ERROR,\n 'message': ERROR_MESSAGES[STATUS_INTERNAL_SERVER_ERROR]\n }), STATUS_INTERNAL_SERVER_ERROR", "def create_error_response(data: Dict[str, str], status_code: int) -> Response:\n resp = jsonify(data)\n resp.status_code = status_code\n return resp", "def add_fail(self, data, message):\n rv = self.post(self.add_url, data)\n assert in_response(rv, 'Add {}'.format(self.nice_name))\n assert in_response(rv, message)\n return rv", "def un_processable_422(error):\n return jsonify({\n 'success': False,\n 'message': 'request cannot be processed',\n 'error': 422\n }), 422", "def to_response_data(self) -> typing.Any:\n v = self.value or {}\n error_code = v.get(\"code\", \"GenericLobotomyError\")\n error_message = v.get(\"message\", \"There was an error.\")\n return {\"Error\": {\"Code\": error_code, \"Message\": error_message}}", "def handle_exception(e):\r\n # start with the correct headers and status code from the error\r\n response = e.get_response()\r\n # replace the body with JSON\r\n response.data = json.dumps({\r\n \"code\": e.code,\r\n \"name\": e.name,\r\n \"description\": e.description,\r\n })\r\n response.content_type = \"application/json\"\r\n return response", "def test_error_post(self):\n Parameters = Parameters()\n response = self.client.open(\n '/error',\n method='POST',\n data=json.dumps(Parameters),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def markFailure(self, error):\n try:\n erval = error.value.status\n except AttributeError:\n erval = False\n self.add(erval)\n return error", "def server_error(error=None):\n return jsonify({\n 'Error': 'Check if the request causes a server error'\n }), 500", "def unprocessable_entity(error):\n return jsonify({\n 'success': False,\n 'error': STATUS_UNPROCESSABLE_ENTITY,\n 'message': ERROR_MESSAGES[STATUS_UNPROCESSABLE_ENTITY]\n }), STATUS_UNPROCESSABLE_ENTITY", "def unprocessable_entity(error): # pylint: disable=unused-argument\n response = jsonify(\n {\n \"success\": False,\n \"error_code\": 422,\n \"message\": \"Unprocessable Entity\",\n }\n )\n return response, 422", "def handle_exception(e):\n # start with the correct headers and status code from the error\n response = e.get_response()\n # replace the body with JSON\n response.data = json.dumps({\n \"code\": e.code,\n \"name\": e.name,\n \"description\": e.description,\n })\n response.content_type = \"application/json\"\n return response", "def handle_exception(error):\n return make_response(jsonify({'message': error.description}), 400)", "def response_json_error_info(func):\n\n def wrapper(req):\n try:\n return func(req)\n except Exception as ex:\n return get_json_response({\n \"status\": \"error\",\n \"error_info\": str(ex),\n \"trace_back\": traceback.format_exc()\n })\n\n return wrapper", "def _rest_error(self, status_code, error_code, message):\n return {\"status_code\": status_code, \"error_code\": error_code, \"message\": message}", "def _err_response(self, msg):\r\n return {'success': False, 'error': msg}", "def create_failure(test, time, failure):\n info = _TestInfo(test, time)\n info._failure = failure\n return info", "def error_handler(self, failure):\n log.error(failure)", "def _on_tracking_failure(self, response, data):\n try:\n response = json.loads(response)\n except:\n # the response should be in JSON, but in case it can't be parsed just try another attempt\n logging.debug(\"cannot parse tracker response, should be valid JSON\")\n return response\n\n # remove the successfully tracked hits from payload\n tracked = response['tracked']\n data['requests'] = data['requests'][tracked:]\n\n return response['message']", "def error_json(self, number=None, payload=None):\n try:\n spayload = json.dumps(payload)\n # spayload = payload.replace('\\\"','').replace('\\'','')\n except Exception:\n spayload = '\"\"'\n\n vals = (error_codes[number], str(number), spayload)\n self.debug(\"ERROR %s - %s - payload: %s\", *vals)\n\n return json.loads('{ \"Error\":\"%s\", \"Err\":\"%s\", \"Payload\":%s }' % vals)", "def test_invalid_json(self):\r\n data = {\"Testing invalid\"}\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'),\r\n data=data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB: testing',\r\n HTTP_DATE='testdate'\r\n )\r\n self.assertIn('Invalid JSON', response.content)\r\n self.assertEqual(response.status_code, 400)", "def iftttError(code, error):\n return {\n \"statusCode\": code,\n \"body\": json.dumps({\n \"errors\": [\n {\n \"message\":error\n }\n ],\n }),\n }", "def feedback_failed_response(self, classname, method, commit_id, commit_log):\n logging.debug(\"feedback_failed_response(%s, %s, %s, %s)\", classname, method, commit_id, commit_log)\n\n logging.info(\"Testcase %s %s failed %s commit is blamed\", classname, method, commit_id)\n logging.info(\"Commit:\\n\\n%s\", commit_log)\n\n with open(self.html_file, \"a\") as result_file:\n result_file.write(\"<br/><b>%s - %s is broken by %s</b><br/><pre>%s</pre>\\n\" % (classname, method, commit_id, commit_log))\n with open(self.txt_file, \"a\") as result_file:\n result_file.write(\"%s - %s is broken by %s\\n%s\\n\" % (classname, method, commit_id, commit_log))\n with open(self.summary_file, \"a\") as result_file:\n result_file.write(\"<br/>Broken %s#%s by %s\\n\" % (classname.split(\".\")[-1], method, commit_id))", "def mark_failed(self):\n self.status = self.FAILED\n self.traceback = self._format_traceback()\n self.save(update_fields={'status', 'traceback', 'updated_at'})", "def handle_exception(e):\r\n # start with the correct headers and status code from the error\r\n response = e.get_response()\r\n # replace the body with JSON\r\n response.data = json.dumps({\r\n \"code\": e.code,\r\n \"name\": e.name,\r\n \"description\": e.description,\r\n })\r\n response.content_type = \"application/json\"\r\n return response", "def test_270(self):\n e = exceptions.ServiceFailure(\n '123.456.789', description='test description',\n traceInformation='test traceInformation'\n )\n se = e.serialize()\n self.assertEqual(\n se, u'<?xml version=\"1.0\" encoding=\"utf-8\"?>'\n u'<error detailCode=\"123.456.789\" errorCode=\"500\" name=\"ServiceFailure\">'\n u'<description>test description</description>'\n u'<traceInformation>test traceInformation</traceInformation></error>'\n )", "def addFailure(self, test, err):\n self.failures.append((proto_test(test), proto_error(err)))", "def test_failed_job(self):\n\n failed_job = json.loads(TREEHERDER_JOB % (\"testfailed\", \"completed\"))\n self.assertEquals(self.query_api.get_job_status(failed_job), FAILURE)", "def test_bad_param(self):\n url = '/%s/job-types/validation/' % self.api\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['name'] = None\n json_data = {\n 'manifest': manifest,\n 'configuration': self.configuration\n }\n\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertFalse(results['is_valid'])\n self.assertEqual(len(results['errors']), 1)\n self.assertEqual(results['errors'][0]['name'], 'JSON_VALIDATION_ERROR')", "def error_return(content, status):\n content = '{' + '\"status\":{},\"message\":\"{}\"'.format(status, content) + '}'\n return Response(content, status=status, mimetype='application/json')", "def internal_error(error):\n return jsonify(error='configuration could not be generated')" ]
[ "0.6702617", "0.66657233", "0.640679", "0.6245065", "0.6203946", "0.6131184", "0.6113463", "0.61089534", "0.6089362", "0.6044126", "0.5987193", "0.5950327", "0.5941476", "0.592693", "0.5907787", "0.586536", "0.5850635", "0.5850635", "0.5835833", "0.58124745", "0.58044654", "0.57942754", "0.5777458", "0.5768118", "0.5756885", "0.57527953", "0.57517254", "0.5749282", "0.57290274", "0.5725369", "0.5725221", "0.5683908", "0.5678758", "0.56506777", "0.564803", "0.5636583", "0.56300473", "0.5630028", "0.5620662", "0.56146866", "0.5604771", "0.5599093", "0.55928224", "0.55843526", "0.5563792", "0.5552997", "0.5552267", "0.5548042", "0.5541276", "0.552103", "0.5515397", "0.5507448", "0.5484235", "0.54839957", "0.54825664", "0.5475836", "0.54590434", "0.54522973", "0.5443946", "0.54339695", "0.54324585", "0.5418313", "0.54148823", "0.54123837", "0.5403883", "0.5396433", "0.53910244", "0.5389184", "0.5384334", "0.5371535", "0.5359333", "0.5357231", "0.5353246", "0.53452283", "0.53366446", "0.53350997", "0.53177553", "0.5313526", "0.5313226", "0.5312352", "0.5302397", "0.5299567", "0.5297499", "0.5296685", "0.5296162", "0.5290085", "0.52869636", "0.5286772", "0.5286437", "0.5274326", "0.52737683", "0.5271583", "0.52685577", "0.5267431", "0.52588797", "0.5256547", "0.5254472", "0.5247244", "0.52460945", "0.524437" ]
0.6412482
2
Decorator for render_ methods. Serializes the return value or exception to JSON and then writes it to the request object.
def jsonResult(f): def _inner(self, request): d = maybeDeferred(f, self, request) d.addCallback(_writeJSONResponse, request) d.addErrback(_writeJSONErrorResponse, request) return NOT_DONE_YET return _inner
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jsonify(f):\n @wraps(f)\n def decorated_function(*args, **kwargs):\n result = f(*args, **kwargs)\n data = json.dumps(result, indent=None if request.is_xhr else 2)\n return app.response_class(data, mimetype='application/json')\n return decorated_function", "def json(f):\n if dsettings.DEBUG:\n ct = 'text/plain'\n j = lambda d: simplejson.dumps(d, indent = 2)\n else:\n ct = 'application/json'\n j = simplejson.dumps\n def wrapper(func, *args, **kw):\n try:\n result = func(*args, **kw)\n except Exception, e:\n result = j(str(e))\n status = 500\n else:\n if isinstance(result, http.HttpResponse):\n return result\n else:\n result = j(result)\n status = 200\n return http.HttpResponse(content = result, content_type = ct, status = status)\n return decorator(wrapper, f)", "def json_response(func):\n\t@wraps(func)\n\tdef decorated_view(*args, **kwargs):\n\t\tdata = func(*args, **kwargs)\n\t\tdata = json.dumps(data)\n\t\tresponse = make_response(data)\n\t\tresponse.headers['Content-Type'] = 'application/json'\n\t\treturn response\n\treturn decorated_view", "def json_response(func):\n def decorator(request, *args, **kwargs):\n objects = func(request, *args, **kwargs)\n if isinstance(objects, HttpResponse):\n return objects\n try:\n data = simplejson.dumps(objects)\n if 'callback' in request.GET:\n data = '%s(%s);' % (request.GET['callback'], data)\n except:\n data = simplejson.dumps(str(objects))\n if 'just_the_json_plz' in kwargs:\n return data\n if 'just_the_data_plz' in kwargs:\n return objects\n if 'callback' in request.GET or 'callback' in request.POST:\n #jsonp\n return HttpResponse(data, \"text/javascript\")\n else:\n #json\n return HttpResponse(data, \"application/json\")\n return decorator", "def json_response(func):\n def decorator(request, *args, **kwargs):\n objects = func(request, *args, **kwargs)\n if isinstance(objects, HttpResponse):\n return objects\n try:\n data = json.dumps(objects, default=json_serialize)\n if 'callback' in request.REQUEST:\n # a jsonp response!\n data = '%s(%s);' % (request.REQUEST['callback'], data)\n return HttpResponse(data, \"text/javascript\")\n except Exception as e:\n print (e)\n data = json.dumps(str(objects))\n return HttpResponse(data, \"application/json\")\n return decorator", "def jsonify(func):\n\n @functools.wraps(func)\n def convert(*args, **kwargs):\n\n success = True\n code = 200 # default status code - success!\n\n try:\n result = func(*args, **kwargs)\n\n if isinstance(result, BaseResponse):\n return result\n\n except exc.HTTPException as ex:\n # i'd like to be able to just re-raise e here, but the body of the\n # response is e.get_body() instead of e.description - so we have to\n # just set up the response ourselves\n result = { 'message' : ex.description }\n code = ex.code\n\n except Exception as ex:\n result = { 'message' : 'Internal Server Error', 'system_message' : ex.message }\n code = 500\n\n # build a response object, and change the content type header to json\n response = make_response(json.dumps(result))\n response.headers['Content-Type'] = 'application/json'\n response.status_code = code\n\n return response\n\n # return the function that is taking the place of (or masquerading as) our decorated function\n return convert", "def response_json(func):\n def wrapper(request):\n try:\n return get_json_response(func(request))\n except Exception as ex:\n return get_json_response({\n \"status\": \"error\",\n \"error_info\": str(ex),\n \"trace_back\": traceback.format_exc()\n })\n\n return wrapper", "def jsonify(function):\n @wraps(function)\n def inner(*args, **kwargs):\n \"\"\"\n This docstring will be overridden by @wraps decorator.\n \"\"\"\n return Response(\n dumps(function(*args, **kwargs)),\n mimetype='application/json'\n )\n return inner", "def decorated_function(request, *args, **kwargs):\n user_for_login(request)\n response['data'] = f(*args, **kwargs)\n response = json.dumps(response)\n return response", "def json_response(func):\n\n def decorator(request, *args, **kwargs):\n objects = func(request, *args, **kwargs)\n if isinstance(objects, HttpResponse):\n return objects\n try:\n data = simplejson.dumps(objects)\n if 'callback' in request.REQUEST:\n # a jsonp response!\n data = '%s(%s);' % (request.REQUEST['callback'], data)\n return HttpResponse(data, \"text/javascript\")\n except:\n data = simplejson.dumps(str(objects))\n return HttpResponse(data, \"application/json\")\n\n return decorator", "def json_response(func):\n def decorator(request, *args, **kwargs):\n objects = func(request, *args, **kwargs)\n if isinstance(objects, HttpResponse):\n return objects\n try:\n data = simplejson.dumps(objects)\n if 'callback' in request.REQUEST:\n # a jsonp response!\n data = '%s(%s);' % (request.REQUEST['callback'], data)\n return HttpResponse(data, \"text/javascript\")\n except:\n data = simplejson.dumps(str(objects))\n return HttpResponse(data, \"application/json\")\n return decorator", "def json_response(func):\n def decorator(request, *args, **kwargs):\n objects = func(request, *args, **kwargs)\n if isinstance(objects, HttpResponse):\n return objects\n\n data = json.dumps(objects)\n if 'callback' in request:\n # a jsonp response!\n data = '%s(%s);' % (request['callback'], data)\n return HttpResponse(data, \"text/javascript\")\n\n return HttpResponse(data, \"application/json\")\n return decorator", "def json_response(func):\n def wrapper(*args, **kwargs):\n try:\n ret_val = func(*args, **kwargs)\n if isinstance(ret_val, dict):\n result = {\"code\": 0, \"msg\": \"\", \"data\": ret_val}\n return JsonResponse(result)\n else:\n result = {\"code\": -20002, \"msg\": u\"视图函数返回值类型必须是字典\"}\n return JsonResponse(result)\n\n except Exception as err:\n logger.exception(\"func name: %s, error: %s\" % (func.__name__, err))\n result = {\"code\": -20001, \"msg\": str(err)}\n return JsonResponse(result)\n return wrapper", "def json_decorator(f):\n def decorator(*args, **kwargs):\n return jsonify(f(*args, **kwargs))\n return decorator", "def json_response(f):\n \n def wrapped(*args, **kwargs):\n result = f(*args, **kwargs)\n \n response = HttpResponse(json.dumps(result))\n \n if type(result) == dict and \"error\" in result:\n response.status_code = 500\n \n \n return response", "def render_to(template):\n\tdef renderer(func):\n\t\t@wraps(func)\n\t\tdef wrapper(request, *args, **kw):\n\t\t\tfrom django.core.exceptions import PermissionDenied\n\t\t\tfrom django.contrib.auth.decorators import login_required\n\t\t\toutput = func(request, *args, **kw)\n\t\t\tcontext = RequestContext(request)\n\t\t\tcontext['keywords'] = kw\n\t\t\tcontext['args'] = args\n\t\t\tif isinstance(output, (list, tuple)):\n\t\t\t\treturn render_to_response(output[1], output[0], context)\n\t\t\telif isinstance(output, dict):\n\t\t\t\treturn render_to_response(template, output, context)\n\t\t\treturn output\n\t\treturn wrapper\n\treturn renderer", "def render_to(template_name):\n def renderer(func):\n def wrapper(request, *args, **kw):\n output = func(request, *args, **kw)\n if not isinstance(output, dict):\n return output\n return render_to_response(template_name, output,\n context_instance=RequestContext(request))\n return wrapper\n return renderer", "def render_to(template):\n def renderer(func):\n def wrapper(request, *args, **kw):\n output = func(request, *args, **kw)\n if isinstance(output, (list, tuple)):\n return render(request, output[1], output[0])\n elif isinstance(output, dict):\n return render(request, template, output)\n return output\n return wrapper\n return renderer", "def returns_json(f):\n @wraps(f)\n def decorated_function(*args, **kwargs):\n try:\n r = f(*args, **kwargs)\n except HTTPException as e:\n # monkey-patch the headers / body to be json\n headers = e.get_headers()\n for header in headers:\n if 'Content-Type' in header:\n headers.remove(header)\n headers.append(('Content-Type', 'application/json'))\n e.get_headers = lambda x: headers\n e.get_body = lambda x: json.dumps({\"message\": e.description})\n raise e\n if isinstance(r, tuple):\n return Response(r[0], status=r[1], content_type='application/json')\n else:\n return Response(r, content_type='application/json')\n return decorated_function", "def rendered(func):\n @wraps(func)\n def render_function(request, *args, **kwargs):\n response = func(request, *args, **kwargs)\n if isinstance(response, HttpResponse) or isinstance(response,\n HttpResponseRedirect):\n return response\n template_name, items = response\n return render_to_response(template_name, items,\n context_instance=RequestContext(request))\n return render_function", "def render_response(self, context):\n\n # if object is a string just return as is\n if isinstance(context, basestring):\n self.response.write(context)\n # else attempt to serialise and return\n else:\n context = json.dumps(context)\n self.response.write(context)\n # set the right content-type header\n self.response.headers['Content-Type'] = 'application/json'", "def response_json(func):\n\n @wraps(func)\n def set_response(*args, **kwargs):\n res = func(*args, **kwargs)\n if type(res) is not dict:\n return res\n else:\n return Response(json.dumps(res), content_type=\"application/json; charset=utf-8\")\n return set_response", "def render_exception_json(exception_data):\n return json.dumps(exception_data, default=_json_serializer)", "def render_to_response(self, context, **response_kwargs):\n return JsonResponse(context)", "def wrapper(*args, **kwargs):\n results = view_method(*args, **kwargs)\n if results[0]:\n # json api\n is_success, error_msg, content = bool(\n results[1]), results[2], results[3]\n return jsonify({\n \"is_success\": is_success,\n \"error_msg\": error_msg,\n \"content\": content\n })\n else:\n # render html\n template_name = results[1]\n context = results[2]\n # template render\n projects = current_app.config.db.get_all_projects()\n context[\"projects\"] = projects\n return render_template(template_name, **context)", "def render_to(template):\n def renderer(func):\n def wrapper(request, *args, **kw):\n output = func(request, *args, **kw)\n if isinstance(output, (list, tuple)):\n output, tpl = output\n else:\n tpl = template\n ct = 'text/html'\n if tpl.endswith('xml'):\n ct = 'text/xml' if dsettings.DEBUG else 'application/xml'\n if isinstance(output, dict):\n if request.is_ajax() and settings.TEMPLATE_FOR_AJAX_REQUEST:\n tpl = ('%s_body%s' % os.path.splitext(tpl), tpl)\n return render_to_response(tpl, output, RequestContext(request), mimetype=ct)\n else:\n return output\n return wrapper\n return renderer", "def render_to(template_path):\n\n def decorator(func):\n def wrapper(request, *args, **kwargs):\n output = func(request, *args, **kwargs)\n if not isinstance(output, dict):\n return output\n ctx = RequestContext(request)\n return render_to_response(template_path, output,\n context_instance=ctx)\n return wrapper\n return decorator", "def response_json(func):\n\n def wrapper(req):\n try:\n\n return get_json_response(func(req))\n except Exception as ex:\n return get_json_response({\n \"status\": \"error\",\n \"error_info\": str(ex),\n \"trace_back\": traceback.format_exc()\n })\n\n return wrapper", "def render_json(self, obj):\n self.response.content_type = \"application/json\"\n self.response.out.write(json.encode(obj))", "def json_service(f):\n\n @wraps(f)\n def decorated_function(*args, **kwargs):\n try:\n results = f(*args, **kwargs)\n if results is None:\n results = {}\n if not isinstance(results, dict):\n results = {'data': results}\n if 'success' not in results:\n results['success'] = True\n return jsonify(results)\n except Exception as e:\n print \"error in: \", f.__name__\n print traceback.print_exc()\n return jsonify({'success': False, 'error': str(e)})\n\n return decorated_function", "def render_json(object):\r\n return HttpResponse(jsonify(object), content_type='application/json')", "def render_to_json_response(self, data: Optional[Dict] = {}, meta: Optional[Dict] = {},\n error: Optional[str] = '', status=HTTPStatus.OK, **response_kwargs):\n response_data = {\"body\": data, \"meta\": meta, \"error\": error}\n return JsonResponse(response_data, status=status, **response_kwargs)", "def inner(*args, **kwargs):\n return Response(\n dumps(function(*args, **kwargs)),\n mimetype='application/json'\n )", "def render_to_json_response(self, context, **response_kwargs):\n response_kwargs.update(dict(json_dumps_params=dict(ensure_ascii=False)))\n return JsonResponse(self.safe_json(context), **response_kwargs)", "def render( request, etype, value, tb ):", "def dispatch(self, request, *args, **kwargs):\n # Wrap the dispatch method, so that we autoencode JSON\n response = super(JSONRestView, self).dispatch(request, *args, **kwargs)\n # If this is not an HTTPResponseBase object (Base class for responses) \n if not isinstance(response, HttpResponseBase):\n response = json_response(response)\n\n return response", "def render_to(template=None, mimetype=\"text/html\"):\n def renderer(function):\n @wraps(function)\n def wrapper(request, *args, **kwargs):\n output = function(request, *args, **kwargs)\n if not isinstance(output, dict):\n return output\n tmpl = output.pop('TEMPLATE', template)\n return render_to_response(tmpl, output, \\\n context_instance=RequestContext(request), mimetype=mimetype)\n return wrapper\n return renderer", "def render(data, template=None, content_type=b'application/json', i18n=None, **kw):\n\t\n\treturn content_type, dumps(data, **kw)", "def render(self, *a, **kw):\r\n try:\r\n res = Wrapped.render(self, *a, **kw)\r\n if is_api():\r\n res = json_respond(res)\r\n elif self.space_compress:\r\n res = spaceCompress(res)\r\n c.response.content = res\r\n except NoTemplateFound, e:\r\n # re-raise the error -- development environment\r\n if g.debug:\r\n s = sys.exc_info()\r\n raise s[1], None, s[2]\r\n # die gracefully -- production environment\r\n else:\r\n abort(404, \"not found\")\r\n return c.response", "def view(cls):\n @wraps(cls)\n def wrapper(request, **kwargs):\n if hasattr(cls, 'as_view'):\n return cls.as_view()(request, **kwargs)\n obj = cls(request, **kwargs)\n handler = getattr(obj, request.method.lower(), None)\n if handler is None:\n return HttpResponseNotAllowed('%s not allowed' % request.method)\n res = obj.setup(obj.c) or handler(obj.c) or obj.render(obj.c)\n if isinstance(res, (dict, list)):\n return JsonResponse(res, safe=False)\n return res\n return wrapper", "def render(self, r):\n raise NotImplementedError", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(\n self.get_data(context),\n **response_kwargs\n )", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(self.get_data(context))", "def response_json_error_info(func):\n def wrapper(request):\n try:\n return func(request)\n except Exception as ex:\n return get_json_response({\n \"status\": \"error\",\n \"error_info\": str(ex),\n \"trace_back\": traceback.format_exc()\n })\n\n return wrapper", "def render_to_json_response(self, context, **response_kwargs):\n return HttpResponse(\n self.convert_context_to_json(context),\n content_type='application/json',\n **response_kwargs\n )", "def render_to_json_response(self, context, **response_kwargs):\n return HttpResponse(\n self.convert_context_to_json(context),\n content_type='application/json',\n **response_kwargs\n )", "def render(self, data, accepted_media_type=None, renderer_context=None):\n\n if '(e.g:bbox=xmin,ymin,xmax,ymax)' in str(data):\n rendered = {'error': str(data)}\n return json.dumps(rendered)\n if data is None:\n return ''\n\n if 'error' in data:\n rendered = data\n elif isinstance(data, dict):\n rendered = self.render_single(data)\n else:\n rendered = self.render_many(data)\n\n return json.dumps(rendered, separators=self.separators)", "def as_json(func):\n @functools.wraps(func)\n def inner(*args, **kwargs):\n json_response = _as_json(func(*args, **kwargs))\n if isinstance(json_response, tuple):\n response, code = json_response\n if isinstance(response, GenericYetiError):\n return jsonify({response.type: response.message}), code\n return jsonify(response), code\n return jsonify(json_response)\n return inner", "def render_to_response(self, context):\n\t\treturn self.get_json_response(self.convert_context_to_json(context))", "def wrapper(self, *args, **kwd):\n try:\n retval = function(self, *args, **kwd)\n except (ValueError, AttributeError), log:\n LOG('SlapTool', INFO, 'Converting ValueError to NotFound, real error:',\n error=True)\n raise NotFound(log)\n except SoftwareInstanceNotReady, log:\n self.REQUEST.response.setStatus(408)\n self.REQUEST.response.setHeader('Cache-Control', 'private')\n return self.REQUEST.response\n except ValidationFailed:\n LOG('SlapTool', INFO, 'Converting ValidationFailed to ValidationFailed,'\\\n ' real error:',\n error=True)\n raise ValidationFailed\n except Unauthorized:\n LOG('SlapTool', INFO, 'Converting Unauthorized to Unauthorized,'\\\n ' real error:',\n error=True)\n raise Unauthorized\n\n self.REQUEST.response.setHeader('Content-Type', 'text/xml; charset=utf-8')\n return '%s' % retval", "def render_to_response(self, context):\n return self.get_json_response(self.convert_context_to_json(context))", "def wrapper(*args, **kwargs):\n response = {\n \"meta\": {\n \"status\": kwargs.pop(\"status\", True),\n \"verbose\": kwargs.pop(\"verbose\", \"OK\")\n },\n \"content\": None\n }\n if not response[\"meta\"][\"status\"]:\n cherrypy.response.headers['Content-Type'] = 'application/json'\n cherrypy.response.status = 400\n return json.dumps(response)\n return method(*args, **kwargs)", "def render_to_json_response(self, context, **response_kwargs):\n return JsonResponse(self.get_data(**context), **response_kwargs)", "def render( *args, **kwargs ):", "def render(self, *args, **kwargs):\r\n raise NotImplementedError", "def render_to(template=None):\r\n def decorator(func):\r\n template_default = template or template_guess(func)\r\n\r\n @wraps(func)\r\n def wrapper(request, *args, **kwargs):\r\n output = func(request, *args, **kwargs)\r\n if not isinstance(output, dict):\r\n return output\r\n\r\n output.setdefault('TEMPLATE', template_default)\r\n response = render_dict(request, output)\r\n if 'COOKIES' in output:\r\n cookies = (output['COOKIES'],) if isinstance(output['COOKIES'], dict) else output['COOKIES']\r\n for i in cookies:\r\n response.set_cookie(**i)\r\n return response\r\n\r\n return wrapper\r\n return decorator", "def render(self, field, key, value, REQUEST, render_prefix=None):\n return self._render(field, key, value, REQUEST, render_prefix=render_prefix)", "def render(data_dict, *args, **kwargs):", "def render(self, data, *args, **kwargs):\n pass # pragma: nocover", "def jsonable_error(status=500, message=\"The Studio servers encountered an error\"):\r\n def outer(func):\r\n @functools.wraps(func)\r\n def inner(request, *args, **kwargs):\r\n if request.is_ajax():\r\n content = json.dumps({\"error\": message})\r\n return HttpResponse(content, content_type=\"application/json\",\r\n status=status)\r\n else:\r\n return func(request, *args, **kwargs)\r\n return inner\r\n return outer", "def req_as_decorator(req_output, *args, **kwargs):\r\n return req_output(dummy_func)(*args, **kwargs)", "def json_response(self, request, *args, **kwargs):\n\n return HttpResponse(self.construct_json(),\n content_type='application/json',\n mimetype='application/json', status=self.status)", "def json_response(*args, **kwargs):\n data = stringify(*args, **kwargs)\n return Response(data, mimetype='application/json')", "def jsonify(func, *args, **kwargs): \n adict = func(*args, **kwargs)\n if not isinstance(adict, dict):\n return adict\n \n \n #: getting updates from session and database\n \n updates = list(session['callback_updates']) \n updates.extend(models.CallbackUpdate.dump())\n \n if updates:\n if not adict.get('type') == 'composite':\n adict = beans._wrap('composite', [adict]) \n \n adict['result'].extend(updates)\n \n json = simplejson.dumps(adict)\n response = make_response(json) \n response.headers['Content-Type'] = 'application/json'\n session['callback_updates'] = []\n db.session.commit() \n return response", "def render(request, *args, **kw):", "def _render_result(self, errno, errmsg, data=None):\n self.set_header(\"Content-Type\", \"application/json; charset=utf-8\")\n if self._finished:\n return\n self.write(tornado.escape.json_encode({\n \"errno\": errno,\n \"errmsg\": errmsg,\n \"logid\": self.logid,\n \"data\": data,\n }))", "def render(self, data, accepted_media_type=None, renderer_context=None):\n if data is None:\n return bytes()\n\n return orjson.dumps(\n data, \n default=self.encoder_class().encode,\n option=orjson.OPT_SERIALIZE_UUID | \\\n orjson.OPT_SERIALIZE_NUMPY | \\\n orjson.OPT_OMIT_MICROSECONDS | \\\n orjson.OPT_NON_STR_KEYS,\n )", "def cached_api(*args, **kwargs):\n def decorator(func):\n kwargs['request_gatekeeper'] = lambda request: not getattr(cached_view, 'never_cache', False)\n kwargs['response_gatekeeper'] = _response_gatekeeper\n\n def response_wrapper(ret):\n ret = loads(ret)\n ret['success'] = True\n ret = client_dumps(ret)\n return HttpResponse(ret, 'application/json')\n\n cache_func = cached_view(*args,\n cached_response_wrapper=response_wrapper,\n serializer=client_dumps,\n **kwargs)(func)\n cache_func.arg_spec = ArgSpec(func)\n\n return cache_func\n return decorator", "def render(self, request, context, template_name):\n status_code = context.pop('status_code', httplib.OK)\n additional_headers = context.pop('additional_headers', {})\n\n self.set_renderers(request)\n\n for renderer in request.renderers:\n response = renderer(self, request, context, template_name)\n if response is NotImplemented:\n continue\n response.status_code = status_code\n response.renderer = renderer\n break\n else:\n tried_mimetypes = list(itertools.chain(*[r.mimetypes for r in request.renderers]))\n response = self.http_not_acceptable(request, tried_mimetypes)\n response.renderer = None\n for key, value in additional_headers.iteritems():\n response[key] = value\n\n # We're doing content-negotiation, so tell the user-agent that the\n # response will vary depending on the accept header.\n patch_vary_headers(response, ('Accept',))\n return response", "def func_wrapper(event, context):\n req = Request(event, context)\n\n try:\n resp = func(req)\n\n if not isinstance(resp, Response):\n message = (\n 'Invalid return value from handler. '\n 'It should be either Response or Exception'\n )\n raise TypeError(message)\n except ServerlessError as e:\n status_code = e.status_code\n message = e.message if e.message else e.__class__.__name__\n\n resp = to_error_response(message, e.errors, status_code)\n except Exception as e: # pylint: disable=W0703\n logger.exception(e)\n status_code = 500\n message = 'InternalServerError'\n errors = tuple()\n\n resp = to_error_response(message, errors, status_code)\n return resp.to_lambda_output()", "def output(self, resource):\n @wraps(resource)\n def wrapper(*args, **kwargs):\n rv = resource(*args, **kwargs)\n rv = self.responder(rv)\n return rv\n\n return wrapper", "def render_to_response(self, context, **response_kwargs):\n json = self.convert_context_to_json(context)\n # If callback is specified, serve as JSONP\n callback = self.request.GET.get('callback', None)\n if callback:\n response_kwargs['content_type'] = 'application/javascript'\n json = \"%s(%s);\" % (callback, json)\n return self.response_class(json, **response_kwargs)", "def decorator(func):\n def wrapper(resource, request, ** kwargs):\n \"\"\" wraps the method with common api response's routines, like\n checking if it's authenticated or packing the response in an api\n friendly way\n\n \"\"\"\n # ckech if everything is ok, before proceding\n resource.method_check(request, allowed=expected_methods)\n resource.is_authenticated(request)\n resource.throttle_check(request)\n\n # call the decorated method\n result = func(resource, request, **kwargs)\n\n # if a single response is expected\n if single:\n if returns_extra_data:\n objt = result[0]\n else:\n objt = result\n bundle = resource.build_bundle(obj=objt, request=request)\n to_be_serialized = resource.full_dehydrate(bundle)\n if returns_extra_data:\n to_be_serialized.data.update(result[1])\n else: # if we are expecting an array of objects\n # we need to paginante\n paginator = resource._meta.paginator_class(\n request.GET,\n result,\n resource_uri=resource.get_resource_uri(),\n limit=resource._meta.limit,\n max_limit=resource._meta.max_limit,\n collection_name=resource._meta.collection_name)\n\n to_be_serialized = paginator.page()\n\n bundles = [resource.build_bundle(obj=obj, request=request)\n for obj in to_be_serialized['objects']]\n\n to_be_serialized['objects'] = [resource.full_dehydrate(bnd)\n for bnd in bundles]\n\n resource.log_throttled_access(request)\n return resource.create_response(request, to_be_serialized)\n return wrapper", "def render_view(self, field, value, REQUEST, render_prefix=None):\n return self._render(field, None, value, REQUEST, render_prefix=render_prefix)", "def response_transform_decorator(original_func):\n def response_transformer_wrapper(*args, **kwargs):\n \"\"\"\n Log errors and apply transformation in response_handler_func\n \"\"\"\n try:\n response = original_func(*args, **kwargs)\n response.raise_for_status()\n\n except requests.exceptions.HTTPError:\n help_string = ('Please consult the Coursera Data '\n 'Exports Guide for further assistance: '\n 'https://partner.coursera.help/hc/en-us/articles/360021121132.') # noqa\n\n if (response.status_code == 403):\n help_string = ('Please authorize this application '\n 'by running:\\n'\n '\\t$ courseraoauth2client config authorize --app manage_research_exports\\n' # noqa\n 'See https://github.com/coursera/courseraoauth2client ' # noqa\n 'for more information on authorization.\\n'\n 'For further assistance, consult the '\n 'Coursera Data Exports Guide '\n 'https://partner.coursera.help/hc/en-us/articles/360021121132.') # noqa\n\n logging.error(\n 'Request to {url} with body:\\n\\t{body}\\nreceived response'\n ':\\n\\t{text}\\n'\n '{help_string}\\n'\n .format(url=response.url,\n text=response.text,\n body=(response.request and response.request.body),\n help_string=help_string))\n raise\n\n return response_transformer(response)\n return response_transformer_wrapper", "def __call__(self, rv):\n if isinstance(rv, ResponseBase):\n return rv\n data, status, headers = unpack(rv)\n resp = flask.make_response(self._encoder(data, **self.json_settings),\n status, {'Content-Type': self.content_type})\n resp.headers.extend(headers)\n return resp", "def render_diagnostics(request, diagnostics_dict, status=200):\n return HttpResponse(json.dumps(diagnostics_dict), status=status)", "def catch_exception(func):\n def wrapper(*args, **kwargs):\n try:\n ret_val = func(*args, **kwargs)\n return ret_val\n except Exception as err:\n logger.exception(\"func name: %s, error: %s\" % (func.__name__, err))\n result = {\"code\": -20001, \"msg\": str(err)}\n return JsonResponse(result)\n return wrapper", "def wrapper_view_error(\n view: Any = None, class_exception: Any = None, status: int = None\n) -> Any:\n\n def _decorate(function):\n @functools.wraps(function)\n def wrapped_function(*args, **kwargs):\n try:\n return function(*args, **kwargs)\n except class_exception as obj_exception:\n return Response(data={\"error\": obj_exception.message}, status=status)\n\n return wrapped_function\n\n if view:\n return _decorate(view)\n return _decorate", "def handle_errors(func):\n def wrapper(*args, **kwargs):\n try:\n response = func(*args, **kwargs)\n except Exception as e:\n response = jsonify({\"success\": False, \"message\": str(e)})\n return response\n wrapper.func_name = func.func_name\n return wrapper", "def render(self, data, accepted_media_type=None, renderer_context=None):\n if data is None:\n return bytes()\n renderer_context = renderer_context or {}\n indent = self.get_indent(accepted_media_type, renderer_context)\n\n if indent is None:\n separators = SHORT_SEPARATORS if self.compact else LONG_SEPARATORS\n else:\n separators = INDENT_SEPARATORS\n\n ret = ujson.dumps(data, ensure_ascii=self.ensure_ascii,\n double_precision=self.double_precision, escape_forward_slashes=self.escape_forward_slashes)\n # On python 2.x json.dumps() returns bytestrings if ensure_ascii=True,\n # but if ensure_ascii=False, the return type is underspecified,\n # and may (or may not) be unicode.\n # On python 3.x json.dumps() returns unicode strings.\n if isinstance(ret, six.text_type):\n # We always fully escape \\u2028 and \\u2029 to ensure we output JSON\n # that is a strict javascript subset. If bytes were returned\n # by json.dumps() then we don't have these characters in any case.\n # See: http://timelessrepo.com/json-isnt-a-javascript-subset\n ret = ret.replace('\\u2028', '\\\\u2028').replace('\\u2029', '\\\\u2029')\n return bytes(ret.encode('utf-8'))\n return ret", "def render(self, data, accepted_media_type=None, renderer_context=None):\n if data is None:\n return b''\n\n renderer_context = renderer_context or {}\n indent = self.get_indent(accepted_media_type, renderer_context)\n\n if indent is None:\n separators = SHORT_SEPARATORS if self.compact else LONG_SEPARATORS\n else:\n separators = INDENT_SEPARATORS\n\n ret = json.dumps(\n data, cls=self.encoder_class,\n indent=indent, ensure_ascii=self.ensure_ascii,\n allow_nan=not self.strict, separators=separators\n )\n\n # We always fully escape \\u2028 and \\u2029 to ensure we output JSON\n # that is a strict javascript subset.\n # See: http://timelessrepo.com/json-isnt-a-javascript-subset\n ret = ret.replace('\\u2028', '\\\\u2028').replace('\\u2029', '\\\\u2029')\n return ret.encode()", "def _json_default_encoder(func):\n\n @wraps(func)\n def inner(self, o):\n try:\n return o._redpipe_struct_as_dict # noqa\n except AttributeError:\n pass\n return func(self, o)\n\n return inner", "def render_to_response(self, context, **kwargs):\n kwargs = {}\n additional_headers = {}\n #create response headers\n if 'header' in context:\n for key in context['header']:\n if key == 'Content-Type':\n kwargs['content_type'] = context['header'][key]\n elif key.lower() == 'status':\n kwargs['status'] = context['header'][key]\n else:\n additional_headers[key] = context['header'][key]\n del context['header']\n \n #return json if not header\n if not 'content_type' in kwargs:\n kwargs['content_type'] = 'application/json'\n \n if 'pointer' in context: #return file\n context['pointer'].seek(0)\n kwargs['content'] = context['pointer'].read()\n context['volume'].close(context['pointer'], context['info']['hash'])\n elif 'raw' in context and context['raw'] and 'error' in context and context['error']: #raw error, return only the error list\n kwargs['content'] = context['error']\n elif kwargs['content_type'] == 'application/json': #return json\n kwargs['content'] = json.dumps(context)\n else: #return context as is!\n kwargs['content'] = context\n \n response = HttpResponse(**kwargs)\n for key, value in additional_headers.items():\n response[key] = value\n\n return response", "def render(self, **kwargs) -> str:\n return self.renderable(**kwargs).render()", "def response_json_error_info(func):\n\n def wrapper(req):\n try:\n return func(req)\n except Exception as ex:\n return get_json_response({\n \"status\": \"error\",\n \"error_info\": str(ex),\n \"trace_back\": traceback.format_exc()\n })\n\n return wrapper", "def json_response(func):\n async def wrapped(*args, **kwargs):\n content, status = await func(*args, **kwargs)\n return web.json_response(data=content, status=status)\n return wrapped", "def render_response(template, *args, **kwargs):\n\treturn render_template(template, *args, user=current_user(), **kwargs)", "def error_handler(func):\n def handle_caller():\n try:\n response, headers = func()\n if not isinstance(response, dict):\n raise ValueError(\n \"Call function %s (method %s) returned non-dict\"\n % (func.methods_to_viewfunc.get(request.method),\n request.method)\n )\n if 'success' not in response:\n response['success'] = True\n resp = jsonify(response)\n resp.headers.extend(headers)\n return resp\n except APICodingError:\n APP.logger.exception(\"API Coding error occured\")\n return jsonify({\n 'success': False,\n 'error': 'Internal coding error',\n }), 500\n except APIError as err:\n APP.logger.warning(\"API error occured, code: %s, msg: %s\",\n err.code,\n err.internal or err.message)\n response = {'success': False,\n 'error': err.message}\n return jsonify(response), err.code, err.headers\n except Exception:\n APP.logger.exception(\"Unexpected error during request processing\")\n return jsonify({\n 'success': False,\n 'error': 'Internal server error',\n }), 500\n return handle_caller", "def ajax_request(func):\n def wrapper(request, *args, **kwargs):\n if request.method == 'POST':\n response = func(request, *args, **kwargs)\n else:\n response = {'error': {'type': 405,\n 'message': 'Accepts only POST request'}}\n if isinstance(response, dict):\n resp = JsonResponse(response)\n if 'error' in response:\n resp.status_code = response['error'].get('type', 500)\n return resp\n return response\n wrapper.__name__ = func.__name__\n wrapper.__module__ = func.__module__\n wrapper.__doc__ = func.__doc__\n return wrapper", "def render(self):\n raise RenderNotImplemented('Render function is not implemented.')", "def ajax_call(json_form=True):\n\n def dec(func):\n\n @wraps(func)\n def deco(*a, **b):\n try:\n result = func(*a, **b)\n if json_form:\n return json.dumps(result)\n return result\n\n except Exception, excep:\n if isinstance(excep, cherrypy.HTTPRedirect):\n raise\n logger = get_logger(\"tvb.interface.web.controllers.base_controller\")\n logger.error(\"Encountered exception when calling asynchronously :\" + str(func))\n logger.exception(excep)\n raise\n\n return deco\n return dec", "def json_response(obj):\n return HttpResponse(json.dumps(obj), content_type=\"application/json\")", "def jsonify(obj):\n raise NotImplementedError", "def json(data, *args, **kwargs):\n return HttpResponseBehaviour(JsonResponse, data, *args, **kwargs)", "def _render(cls, request, code, ctype, msg):\r\n request.setResponseCode(code)\r\n request.setHeader('content-type', ctype)\r\n request.write(msg)\r\n request.finish()", "def passthrough(f):\r\n def wrapper(response):\r\n f()\r\n return response\r\n\r\n return wrapper" ]
[ "0.70818603", "0.70353097", "0.70183825", "0.69342184", "0.68645805", "0.67931926", "0.67370886", "0.6686487", "0.6680778", "0.66791755", "0.6660768", "0.6641343", "0.6585666", "0.658487", "0.65263444", "0.6510545", "0.64600986", "0.64438164", "0.63704526", "0.6299694", "0.6270659", "0.6261757", "0.6256613", "0.62542385", "0.6244726", "0.6240712", "0.6208953", "0.61913466", "0.6151469", "0.6147376", "0.612958", "0.612564", "0.61065334", "0.60980374", "0.60763466", "0.607167", "0.60576266", "0.60552585", "0.6046988", "0.602667", "0.6008193", "0.6002419", "0.6002419", "0.6002419", "0.6002419", "0.6002419", "0.5998523", "0.5990425", "0.5972395", "0.5972395", "0.5950141", "0.5934318", "0.59311515", "0.590731", "0.5867755", "0.58673334", "0.5841541", "0.58233976", "0.5804998", "0.5738163", "0.5706287", "0.5696795", "0.56888944", "0.56687796", "0.5665021", "0.56610394", "0.5649286", "0.5642116", "0.56410056", "0.5626388", "0.56257695", "0.5624225", "0.5594135", "0.55858916", "0.55770767", "0.55729795", "0.5564827", "0.5552689", "0.55406564", "0.55369085", "0.5535671", "0.5529676", "0.55233604", "0.5516039", "0.55031145", "0.5487283", "0.54838204", "0.54836893", "0.5468542", "0.5462731", "0.54627234", "0.5459182", "0.54509646", "0.5446055", "0.5435646", "0.5430218", "0.5425617", "0.5424568", "0.5422912", "0.5421439", "0.5413947" ]
0.0
-1
Verify PayPal IPN data.
def verify(self, request): paypalURL = 'https://www.sandbox.paypal.com/cgi-bin/webscr' if not self.SANDBOX: paypalURL = 'https://www.paypal.com/cgi-bin/webscr' def _cb(response): if response == 'INVALID': raise PaypalError( 'IPN data invalid. data: %s', (data,)) elif response == 'VERIFIED': return True else: raise PaypalError('Unrecognized verification response: %s', (response,)) data = request.content.read() params = '?cmd=_notify-validate&' + data d = getPage(paypalURL+params, method='POST') d.addCallback(_cb) return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def verify_ipn(data):\n data = dict(data)\n data['cmd'] = '_notify-validate'\n resp = requests.post(app.config['PAYPAL']['endpoint'], data=data)\n if resp.text == 'VERIFIED':\n return True\n return False", "def validate_with_paypal(request, validate_type):\n if validate_type == 'PDT':\n # we are on return url\n # need to verify if payment is completed\n # MERCHANT_TXN_KEY is your PDT identity token\n params = {\n 'cmd': '_notify-synch',\n 'tx': request.GET.get('tx', ''),\n 'at': settings.MERCHANT_TXN_KEY\n }\n data = urllib.urlencode(params)\n\n # Sample response:\n # SUCCESS\n # first_name=Jane+Doe\n # last_name=Smith\n # payment_status=Completed payer_email=janedoesmith%40hotmail.com\n # payment_gross=3.99\n # mc_currency=USD custom=For+the+purchase+of+the+rare+book+Green+Eggs+%26+Ham\n\n # If the response is FAIL, PayPal recommends making sure that:\n # The Transaction token is not bad.\n # The ID token is not bad.\n # The tokens have not expired.\n\n else: # IPN\n data = 'cmd=_notify-validate&%s' % request.POST.urlencode()\n\n # The response is one single-word: VERIFIED or INVALID\n\n headers = {\"Content-type\": \"application/x-www-form-urlencoded\",\n 'encoding': 'utf-8',\n \"Accept\": \"text/plain\"}\n request = urllib2.Request(settings.PAYPAL_POST_URL,\n data,\n headers)\n response = urllib2.urlopen(request)\n data = response.read()\n\n if validate_type == 'PDT':\n return parse_pdt_validation(data)\n else:\n return data.strip('\\n').lower() == 'verified', None", "def ipn(request, item_check_callable=None):\n flag = None\n ins_obj = None\n form = ClickBankINSForm(request.POST)\n if form.is_valid():\n try:\n ins_obj = form.save(commit=False)\n except Exception, e:\n flag = \"Exception while processing. (%s)\" % e\n else:\n flag = \"Invalid form. (%s)\" % form.errors\n\n if ins_obj is None:\n ins_obj = ClickBankINS()\n\n ins_obj.initialize(request)\n\n if flag is not None:\n ins_obj.set_flag(flag)\n else:\n if not ins_obj.verify_hash():\n return HttpResponse(\"INVALID HASH\")\n\n ins_obj.save()\n return HttpResponse(\"OKAY\")", "def nexmo_verify(request):\n number = request.validated[\"querystring\"][\"number\"]\n\n sender_id = nexmo_conf(request, \"sender_id\")\n params = {\n \"api_key\": nexmo_conf(request, \"api_key\"),\n \"api_secret\": nexmo_conf(request, \"api_secret\"),\n \"sender_id\": sender_id,\n \"code_length\": nexmo_conf(request, \"code_length\"),\n \"pin_expiry\": nexmo_conf(request, \"state_ttl_seconds\"),\n \"number\": number,\n \"brand\": nexmo_conf(request, \"brand\"),\n }\n\n verify_url = \"{}/verify/json\".format(\n nexmo_conf(request, \"api_endpoint\").rstrip(\"/\")\n )\n\n try:\n resp = requests.get(verify_url, params=params)\n except requests.exceptions.ConnectionError:\n logger.exception(\n \"A connection error occured when starting the nexmo auth process\"\n )\n error_msg = \"The Nexmo API is not ready, please retry later.\"\n return http_error(\n httpexceptions.HTTPServiceUnavailable(),\n errno=ERRORS.BACKEND,\n message=error_msg,\n )\n\n try:\n resp.raise_for_status()\n except requests.exceptions.HTTPError:\n logger.exception(\"An error occured when starting the auth process\")\n error_msg = \"The Nexmo API is not ready, please retry later.\"\n return http_error(\n httpexceptions.HTTPServiceUnavailable(),\n errno=ERRORS.BACKEND,\n message=error_msg,\n )\n\n data = resp.json()\n\n if data[\"status\"] == \"10\":\n description = (\n f\"An authentication request is already in progress for this number. \"\n f\"{data['error_text']}\"\n )\n error_details = {\n \"name\": \"number\",\n \"location\": \"querystring\",\n \"description\": description,\n }\n raise_invalid(request, **error_details)\n elif data[\"status\"] != \"0\":\n if data[\"status\"] in [\"6\", \"16\", \"19\"]: # pragma: no cover\n logger.info(\"Nexmo Verify Request failed: {}\".format(data))\n else:\n logger.error(\"Nexmo Verify Request failed: {}\".format(data))\n description = \"Something went wrong when trying to authenticate this number.\"\n error_details = {\n \"name\": \"number\",\n \"location\": \"querystring\",\n \"description\": description,\n }\n raise_invalid(request, **error_details)\n\n state = persist_state(request, {\"request_id\": data[\"request_id\"], \"number\": number})\n\n return {\"state\": state, \"sender_id\": sender_id}", "def has_paypal(self):\n from django.core.validators import validate_email\n try:\n validate_email(self.paypal_email)\n return True\n except ValidationError:\n return False", "def is_unverified(self):\n return self.get_status() == self.STATUS_UNVERIFIED", "def is_unverified(self):\n return self.get_status() == self.STATUS_UNVERIFIED", "def test_check_nip(client):\n is_assigned, request_id = client.check_nip(\n \"8655104670\", \"41146786026458860703735932\"\n )\n\n assert is_assigned", "def verify():", "def verify(self, response):", "def verify_payload():\n return True", "def verify():\n if flask.request.method == 'POST':\n req = flask.request.get_json(force=True)\n phone = req.get('phone')\n code = req['code']\n return check_verification(phone, code)", "def payment_verification(payload):\n response = requests.post(url, data=payload)\n return response.json()", "def update_paypal(sender, **kwargs):\n ipn_obj = sender\n try:\n payment = json.loads(ipn_obj.custom)\n\n # try to get payment. if not exist, exception will be catched\n p = Payment.objects.filter(id=payment.get('id'), token=payment.get('token')).get()\n\n # update payment\n p.method = constants.PAYPAL\n p.ipn = ipn_obj\n p.save()\n\n # if payment is completed, so valid\n if ipn_obj.payment_status == ST_PP_COMPLETED:\n # check correct price , currency and mail\n if int(ipn_obj.mc_gross) == int(p.price.price) and \\\n ipn_obj.mc_currency == 'EUR' and \\\n ipn_obj.business == settings.PAYPAL_RECEIVER_EMAIL:\n # all is OK, update state\n p.state = True\n p.save()\n sendmail_payment_success(p)\n else:\n # TODO: send alert / mail\n return\n except Payment.DoesNotExist:\n # TODO: send alert / mail\n pass\n except:\n # TODO: send alert / mail\n pass", "def verify_receipt(receipt_data, user=None):\n #data = json.dumps({'receipt-data': '{' + receipt_data + '}'})\n data = '{{\\n \"receipt-data\" : \"{}\" \\n}}'.format(receipt_data)\n\n def verify(url):\n tries = 3\n for try_ in range(1, tries + 1):\n try:\n req = urllib2.Request(url, data)\n resp = urllib2.urlopen(req, timeout=18) # app timeout is supposed to be 60\n return json.loads(resp.read())\n except (urllib2.URLError, socket_error) as e:\n if try_ == tries:\n raise e\n\n cleaned_data = verify(settings.IAP_VERIFICATION_URL)\n\n # See: http://developer.apple.com/library/ios/#technotes/tn2259/_index.html\n if cleaned_data['status'] == 21007:\n cleaned_data = verify(settings.IAP_VERIFICATION_SANDBOX_URL)\n\n if cleaned_data['status'] != 0:\n extra = {'status': cleaned_data['status']}\n if user is not None and user.is_authenticated():\n extra['username'] = user.username\n extra['response_from_apple'] = json.dumps(cleaned_data)\n client.captureMessage('IAP receipt validation failed', extra=extra)\n raise ValidationError(\"Your purchase went through, but there was an error processing it. Please contact support: support@example.com\")\n\n return cleaned_data['receipt']", "def test_successful_verification(self):\n for i in (-2, -1, 0, 1, 2):\n\n description = \"TOTP not verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertTrue(confirmed, description)\n\n self.relate.confirm = False", "def __verify(self):\r\n code = self.request.get('code')\r\n email = None\r\n error = False\r\n # resend if code is not given or in case of some error\r\n if code is not None and code != '':\r\n email = User.verify(code, self.request.remote_addr)\r\n if email is None:\r\n error = True\r\n\r\n if email is None:\r\n template_values = {\r\n 'user_email': self.user_email,\r\n 'error': error\r\n }\r\n template = self.jinja2_env.get_template('verification.html')\r\n self.response.out.write(template.render(template_values))\r\n\r\n # message\r\n template_values = {\r\n 'user_email': self.user_email,\r\n 'message': self.gettext('THANK_YOU')\r\n }\r\n template = self.jinja2_env.get_template('staticmessage.html')\r\n self.response.out.write(template.render(template_values))", "def verify_payment(self, order_id, ref_id):\n try:\n client = Client(self.service_address)\n res = client.service.bpVerifyRequest(terminalId=self.terminalId,\n userName=self.userName,\n userPassword=self.userPassword,\n orderId=order_id,\n saleOrderId=order_id,\n saleReferenceId=ref_id)\n return True, res\n except WebFault as f:\n return False, f.fault.faultstring\n except Exception as e:\n return False, e.message", "def paypal_notification(request, payment_mode='paypal'):\r\n try:\r\n data = request.POST\r\n _log.debug(\"PayPal IPN data: %s\", repr(data))\r\n\r\n if not paypal.verify_ipn_request(request):\r\n return HttpResponse()\r\n\r\n if data.get('payment_status', None) != \"Completed\":\r\n # Do not insert payments whose status is not \"Completed\".\r\n _log.debug(\"Ignored IPN data for incomplete payment.\")\r\n return HttpResponse()\r\n\r\n currency = data.get('mc_currency', settings.CURRENCY_DEFAULT)\r\n if currency.upper() not in settings.CURRENCIES_SUPPORTED:\r\n # We do not support anything other than USD.\r\n _log.debug(\"Ignored IPN data for unsupported currency %s\", currency)\r\n return HttpResponse()\r\n\r\n pending_contribution_id, username = data['custom'].split('~') # pending_contrib_id~buyer's_username\r\n is_anon = username == 'anonymous'\r\n transaction_id = data['txn_id']\r\n qty = data['quantity']\r\n artist_email = data['receiver_email']\r\n campaign_id = data['item_number']\r\n amount = data['mc_gross']\r\n is_test = data.get('test_ipn', 0) == 1\r\n\r\n contribs = Contribution.objects.filter(transaction_id=transaction_id, payment_mode=payment_mode).count()\r\n if not contribs:\r\n # This transaction hasn't already been processed.\r\n # Process it and update the ``memo`` field if it has been provided by the buyer.\r\n if is_anon:\r\n _log.debug(\"Processing anonymous contribution\")\r\n contributor = User.objects.get(username='anonymous')\r\n campaign = Campaign.objects.get(pk=campaign_id)\r\n contrib = campaign.contribution_set.create(\r\n contributor=contributor,\r\n amount=amount,\r\n qty=qty,\r\n payment_mode=payment_mode,\r\n transaction_id=transaction_id,\r\n memo=data.get('memo', '')\r\n )\r\n _log.info(\"PayPal (tx: %s) anonymous contribution recorded: %s\", transaction_id, contrib)\r\n else:\r\n pending_contrib = PendingContribution.objects.get(pk=pending_contribution_id,\r\n contributor__username=username,\r\n campaign=campaign_id,\r\n amount=amount,\r\n qty=qty,\r\n payment_mode=payment_mode)\r\n if pending_contrib:\r\n contrib = pending_contrib.process_payment_notification(transaction_id, data.get('memo', ''))\r\n _log.info(\"PayPal transaction %s resolved. Contribution recorded: %s\", transaction_id, contrib)\r\n else:\r\n _log.error(\"PayPal transaction %s could not be resolved.\", transaction_id)\r\n except:\r\n _log.exception(''.join(format_exception(*exc_info())))\r\n return HttpResponse()", "def verify_email(uid, token):\n return True", "def test_verify_email(live_server):\n user = get_user_model().objects.create_user(username=\"test\")\n email = models.EmailAddress.objects.create(\n address=\"test@example.com\", user=user\n )\n verification = models.EmailVerification.objects.create(email=email)\n\n data = {\"token\": verification.token}\n url = f\"{live_server}/rest/email-verifications/\"\n response = requests.post(url, data)\n\n assert response.status_code == 201\n assert response.json() == {}", "def verified(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"verified\")", "def verify(self, timeout=15):\n processed_host = (self.host.replace('sftp://', '')\n .replace('ftp://', '')\n #.replace('www.', '')\n .replace('https://', '')\n .replace('http://', '')\n .strip())\n protocol = self.protocol\n if protocol in ('ftp', 'ftps'):\n f = self._verify_ftp\n elif protocol == 'sftp':\n f = self._verify_sftp\n else:\n f = self._verify_spurious\n\n self.verified, self.verification_message = f(processed_host, timeout)\n self.last_verified = timezone.now()\n self.save(update_fields=['verified', 'verification_message',\n 'last_verified'])", "def verify_postcode_api(self):\n\n assert type(self.postcodes) == str, \"To use this method, the postcode cannot be an iterable.\"\n request_path = requests.get(self.path + self.postcodes, verify=False)\n response_code = str(request_path)\n\n if response_code == '<Response [200]>':\n verification_status = 'Verified'\n elif response_code == '<Response [404]>':\n verification_status = 'Invalid Postcode'\n elif response_code == '<Response [400]':\n verification_status = 'No Postcode Submitted'\n elif response_code == '<Response [500]':\n verification_status = 'Server error'\n else:\n verification_status = 'Invalid Postcode'\n return verification_status", "def verify_token(vial_http: urllib3.connectionpool.ConnectionPool) -> bool:\n verify_resp = vial_http.request(\"GET\", \"/api/verifyToken\")\n return verify_resp.status == 200", "def do_verify(self, args):\n\n pn = \\\n self._get_choice_(\"pn\", self.promissory_notes, \"Which promissory note needs to be verified?\")\n\n try:\n verify_promissory_note(pn)\n except Exception as e:\n self._print_exception_(e)\n return\n\n print(\"Promissory note is correct.\\n\")", "def verify(self):\n data = [\"rfc\", \"tel\", \"email\", \"name\", \"use\"]\n state = False\n for item in data:\n if getattr(self, item + \"Input\").text() != \"\":\n state = True\n else:\n return False\n return state", "def verify(self):\n if self.geturl():\n return True\n return False", "def send_verification(self):\n pass", "def test_process_postpay_not_accepted(self):\r\n student1 = UserFactory()\r\n student1.save()\r\n\r\n order1 = Order.get_cart_for_user(student1)\r\n params = {\r\n 'card_accountNumber': '1234',\r\n 'card_cardType': '001',\r\n 'billTo_firstName': student1.first_name,\r\n 'orderNumber': str(order1.id),\r\n 'orderCurrency': 'usd',\r\n 'decision': 'REJECT',\r\n 'ccAuthReply_amount': '0.00',\r\n 'reasonCode': '207'\r\n }\r\n result = process_postpay_callback(params)\r\n self.assertFalse(result['success'])\r\n self.assertEqual(result['order'], order1)\r\n self.assertEqual(order1.status, 'cart')\r\n self.assertIn(REASONCODE_MAP['207'], result['error_html'])", "def sident_verify(self, connection):\n sident_verify_msg = {'type':'sident_verify',\n 'timestamp':calendar.timegm(time.gmtime())}\n self._send_queue.put((sident_verify_msg, connection))\n return True", "async def verify(token: TextData, background_tasks: BackgroundTasks):\n token_data = token.data\n mail, subject, body = await AccountProcessor.confirm_email(token_data)\n background_tasks.add_task(Utility.validate_and_send_mail, email=mail, subject=subject, body=body)\n return {\"message\": \"Account Verified!\"}", "def verify_email_address(\n email,\n from_host='i3visio.com',\n from_email='verify@i3visio.com'\n ):\n e = VerifyEmail()\n\n try:\n status = e.verify(email, from_host, from_email)\n if status == e.EMAIL_FOUND:\n return 1\n except Exception:\n return -1\n return 0", "def isin_valid(record):\n country_of_issuance = record['country_of_issuance']\n cusip = str(record['cusip'])\n expected_isin = country_of_issuance + cusip + '4'\n assert record['isin'] == expected_isin", "def verifies_ok(email, val, verification):\r\n if verification.get(\"VerifyMethod\") != \"FoldItVerify\":\r\n log.debug(\"VerificationMethod in %r isn't FoldItVerify\", verification)\r\n return False\r\n hash_str = verification.get(\"Verify\")\r\n\r\n return verify_code(email, val) == hash_str", "def test_validate_ip(self, ip, version, expected_result):\n # Call method under test\n test_result = validate_ip(ip, version)\n\n # Assert\n self.assertEqual(expected_result, test_result)", "def check(secret,\n response,\n remote_ip=None,\n check_url=DEFAULT_RECAPTCHA_CHECK_URL):\n return check_detailed(secret,\n response,\n remote_ip,\n check_url)['success']", "def has_verified_email(self):\n url = (\"https://api.imgur.com/3/account/{0}/\"\n \"verifyemail\".format(self.name))\n return self._imgur._send_request(url, needs_auth=True)", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def verify(self):\n pass", "def verify(self):\n pass", "def check_new_payment_authcode(self, request: HttpRequest):\n return self.check_authcode_params(\n request,\n (\n \"RETURN_CODE\",\n \"ORDER_NUMBER\",\n \"SETTLED\",\n \"CONTACT_ID\",\n \"INCIDENT_ID\",\n ),\n )", "def test_process_postpay_accepted(self):\r\n student1 = UserFactory()\r\n student1.save()\r\n\r\n order1 = Order.get_cart_for_user(student1)\r\n params = {\r\n 'card_accountNumber': '1234',\r\n 'card_cardType': '001',\r\n 'billTo_firstName': student1.first_name,\r\n 'orderNumber': str(order1.id),\r\n 'orderCurrency': 'usd',\r\n 'decision': 'ACCEPT',\r\n 'ccAuthReply_amount': '0.00'\r\n }\r\n result = process_postpay_callback(params)\r\n self.assertTrue(result['success'])\r\n self.assertEqual(result['order'], order1)\r\n order1 = Order.objects.get(id=order1.id) # reload from DB to capture side-effect of process_postpay_callback\r\n self.assertEqual(order1.status, 'purchased')\r\n self.assertFalse(result['error_html'])", "def verify_email(self, request, *args, **kwargs):\n verified_key_text = getattr(settings, \"VERIFIED_KEY_TEXT\", None)\n\n if not verified_key_text:\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n redirect_url = request.query_params.get(\"redirect_url\")\n verification_key = request.query_params.get(\"verification_key\")\n response_message = _(\"Missing or invalid verification key\")\n if verification_key:\n registration_profile = None\n try:\n registration_profile = RegistrationProfile.objects.select_related(\n \"user\", \"user__profile\"\n ).get(activation_key=verification_key)\n except RegistrationProfile.DoesNotExist:\n with use_master:\n try:\n registration_profile = (\n RegistrationProfile.objects.select_related(\n \"user\", \"user__profile\"\n ).get(activation_key=verification_key)\n )\n except RegistrationProfile.DoesNotExist:\n pass\n\n if registration_profile:\n registration_profile.activation_key = verified_key_text\n registration_profile.save()\n\n username = registration_profile.user.username\n set_is_email_verified(registration_profile.user.profile, True)\n # Clear profiles cache\n safe_delete(f\"{USER_PROFILE_PREFIX}{username}\")\n\n response_data = {\"username\": username, \"is_email_verified\": True}\n\n if redirect_url:\n query_params_string = urlencode(response_data)\n redirect_url = f\"{redirect_url}?{query_params_string}\"\n\n return HttpResponseRedirect(redirect_url)\n\n return Response(response_data)\n\n return HttpResponseBadRequest(response_message)", "def test_verified_consumer(self):\n self.prep_consumer()\n self.consumer.is_email_verified = True\n self.consumer.save()\n UnqualifiedConsumerEmailTask().run(test_mode=self.consumer)\n self.common_asserts()\n self.assertTrue('Use this link to confirm your email address.' not in\n mail.outbox[0].body)\n self.assertTrue('Confirm your email address with a single click.' not in\n mail.outbox[0].alternatives[0][0])\n self.assertEqual(mail.outbox[0].cc, [])\n self.assertTrue('Provide your cell phone number' \n in mail.outbox[0].alternatives[0][0])\n self.assertTrue('Provide your cell phone number. Follow this link:' \n in mail.outbox[0].body)", "def verify(self):\r\n pass", "def verify(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"verify\")", "def test_authorize_pending_payment(self):\n pass", "def nexmo_check(request):\n state = request.validated[\"querystring\"][\"state\"]\n code = request.validated[\"querystring\"][\"code\"]\n\n # Require on-going session\n state_info = request.registry.cache.get(state)\n\n if not state_info:\n error_msg = \"The Nexmo session was not found, please re-authenticate.\"\n return http_error(\n httpexceptions.HTTPRequestTimeout(),\n errno=ERRORS.MISSING_AUTH_TOKEN,\n message=error_msg,\n )\n else:\n state_info = json.loads(state_info)\n\n params = {\n \"api_key\": nexmo_conf(request, \"api_key\"),\n \"api_secret\": nexmo_conf(request, \"api_secret\"),\n \"request_id\": state_info[\"request_id\"],\n \"code\": code,\n }\n\n check_url = \"{}/verify/check/json\".format(\n nexmo_conf(request, \"api_endpoint\").rstrip(\"/\")\n )\n\n try:\n resp = requests.get(check_url, params=params)\n except requests.exceptions.ConnectionError:\n logger.exception(\n \"A connection error occured when trying to validate the auth code\"\n )\n error_msg = \"The Nexmo API is not ready, please retry later.\"\n return http_error(\n httpexceptions.HTTPServiceUnavailable(),\n errno=ERRORS.BACKEND,\n message=error_msg,\n )\n\n try:\n resp.raise_for_status()\n except requests.exceptions.HTTPError:\n logger.exception(\"An error occured when trying to validate the auth code\")\n error_msg = \"The Nexmo API is not ready, please retry later.\"\n return http_error(\n httpexceptions.HTTPServiceUnavailable(),\n errno=ERRORS.BACKEND,\n message=error_msg,\n )\n\n data = resp.json()\n\n if data[\"status\"] != \"0\":\n logger.info(\"Nexmo Code Validation Failed: {}\".format(data))\n error_details = {\n \"name\": \"code\",\n \"location\": \"querystring\",\n \"description\": \"Nexmo code validation failed.\",\n }\n raise_invalid(request, **error_details)\n\n # Make sure we cannot try twice with the same state\n request.registry.cache.delete(state)\n\n exp = datetime.utcnow() + timedelta(\n seconds=int(nexmo_conf(request, \"cache_ttl_seconds\"))\n )\n\n # Build JWT Access Token\n access_token = jwt.encode(\n {\"number\": state_info[\"number\"], \"exp\": exp},\n nexmo_conf(request, \"jwt_secret\"),\n algorithm=\"HS256\",\n ).decode(\"utf-8\")\n\n return {\"access_token\": access_token}", "def verify(self):\n self.verify_checksums()\n self.verify_apk_signature()\n self.verify_apk_signature_fprs()", "def verify_pending_survey_token(token):\n logger.info(\"Attempting to verify share/transfer survey token with party service\", token=token)\n\n url = f\"{app.config['PARTY_URL']}/party-api/v1/pending-survey/verification/{token}\"\n response = requests.get(url, auth=app.config[\"BASIC_AUTH\"])\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError:\n logger.error(\"Failed to verify share/transfer survey token\", token=token)\n raise ApiError(logger, response)\n\n logger.info(\"Successfully verified token\", token=token)\n return response", "def test_pass_result(self):\r\n data = {\r\n \"EdX-ID\": self.receipt_id,\r\n \"Result\": \"PASS\",\r\n \"Reason\": \"\",\r\n \"MessageType\": \"You have been verified.\"\r\n }\r\n json_data = json.dumps(data)\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'), data=json_data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',\r\n HTTP_DATE='testdate'\r\n )\r\n attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)\r\n self.assertEqual(attempt.status, u'approved')\r\n self.assertEquals(response.content, 'OK!')", "def payment_check(payload):\n response = requests.post(url, data=payload)\n return response.json()", "def verify_email(request, uidb64, token):\n user = request.user\n try:\n uid = force_text(urlsafe_base64_decode(uidb64))\n associated_email = AssociatedEmail.objects.get(pk=uid)\n except (TypeError, ValueError, OverflowError, AssociatedEmail.DoesNotExist):\n associated_email = None\n\n if associated_email is not None and associated_email.user == user:\n # Test that the token is correct\n if associated_email.check_token(token):\n associated_email.verification_date = timezone.now()\n associated_email.is_verified = True\n associated_email.save()\n if not user.is_credentialed:\n check_legacy_credentials(user, associated_email.email)\n logger.info('User {0} verified another email {1}'.format(user.id, associated_email))\n messages.success(request, 'The email address {} has been verified.'.format(\n associated_email))\n return redirect('edit_emails')\n\n logger.warning('Invalid Verification Link')\n return render(request, 'user/verify_email.html',\n {'title':'Invalid Verification Link', 'isvalid':False})", "def check(request, response_key='response'):\n response = (request.POST.get(response_key, None)\n or request.GET.get(response_key, None))\n remote_ip = get_ip(request)\n return base_check(settings.RECAPTCHA_SHARED_SECRET,\n response,\n remote_ip)", "def send_verification_email(self):\n url = (\"https://api.imgur.com/3/account/{0}\"\n \"/verifyemail\".format(self.name))\n self._imgur._send_request(url, needs_auth=True, method='POST')", "def verify_remote_site_has_policy(self, tenant_name, l3out_name, instp_name):\n site2 = Session(SITE2_URL, SITE2_LOGIN, SITE2_PASSWORD)\n resp = site2.login()\n self.assertTrue(resp.ok)\n\n query = ('/api/mo/uni/tn-%s/out-%s/instP-%s.json' % (tenant_name, l3out_name, instp_name))\n resp = site2.get(query)\n self.assertTrue(resp.ok)\n\n found = False\n for item in resp.json()['imdata']:\n if 'l3extInstP' in item:\n found = True\n break\n if not found:\n return False\n return True", "def send_verification(self):\n secret_key = app.config['CONTACT_VERIFY_SECRET']\n base_url = app.config['URLS']['BASE_URL']\n verify_url = contact_verify_url(self.contact.id, base_url, secret_key)\n variables = {\n \"username\": self.contact.user.username,\n \"verify_link\": verify_url\n }\n send_template_email(recipients=[self.identifier],\n subject=\"Verify your Rmnd.in Contact\",\n from_address=\"accounts@rmnd.in\",\n variables=variables,\n template=\"email/verify_contact_email\")", "def test_validate_ip_ok():\n ip = '1.1.1.1'\n assert howisresolved.validate_ip(ip) is None", "def verify(self, code) -> bool:\n totp = self.__initialize_totp()\n return totp.verify(code)", "def verify_email(request):\n user = User.objects.get(username=request.user)\n if request.method == 'POST':\n otp = request.data.get('otp')\n if not otp:\n return Response({'message':\"We cannot find your otp\"}, status=status.HTTP_400_BAD_REQUEST)\n\n #Get token\n qs = ResetRequests.objects.filter(user=user, token=otp, use_case = 'account confirmation')\n if not qs.exists():\n return Response({'message':'Wrong Token.'}, status=status.HTTP_400_BAD_REQUEST)\n\n #Grab the last token\n token_request = qs.last()\n timer = token_request.created_at\n\n #Check token expiry\n if timezone.now() > timer + timezone.timedelta(minutes=10):\n return Response({'message':'Token Expired. Request another please.'}, status=status.HTTP_400_BAD_REQUEST)\n\n #Check whether token has been used.\n if token_request.consumed:\n return Response({\"message\":\"Pin has been used already\"}, status=status.HTTP_400_BAD_REQUEST)\n\n if int(otp) == int(token_request.token):\n #Set user as verified\n user.email_verified = True\n user.save()\n #Set token as consumed\n token_request.consumed = True\n token_request.save()\n\n #Send Confirmation Mail\n email_subject = \"SpendWise - Account Verified.\"\n email_msg = \"Your account has been verified. Welcome to the SpendWise Ecosystem\"\n try:\n sendEmail(user, email_subject, \"Account Verified\", information=email_msg)\n return Response({'message':'User account successfully verified.'}, status=status.HTTP_200_OK)\n except:\n return Response({'message':'We could not send a confirmation email'}, status=status.HTTP_200_OK)\n\n\n if request.method == 'GET':\n to = User.objects.get(username=request.user).email\n pin = random.randint(0, 1000000)\n #presumes this link is only reachable cos the user already has an email.\n to = user.email\n try:\n subject = \"Account Confirmation.\"\n message = f\"Your Account Confirmation code is {pin}\\n\\nExpires in 10 minutes.\"\n sendEmail(user, subject, \"Account Confirmation\", information=message, otp=pin)\n\n #Write to user's record\n ResetRequests.objects.create(\n user = user,\n token = pin,\n use_case = 'account confirmation'\n )\n #Add password reset request date here\n return Response({'message':'Token sent to registered email.',\n 'email' : to},\n status=status.HTTP_200_OK)\n except Exception as e:\n return Response({'message':'We could not send an email', 'error':e},\n status=status.HTTP_400_BAD_REQUEST)\n\n #Do the actual verification\n #Verified is alrady possibly True via sms. What happens now?", "def verify(self):", "async def verify(self, data, signature):\n\t\tsignature_struct = NTLMSSP_MESSAGE_SIGNATURE.from_bytes(signature)\n\t\tcalc_sig = self.MAC(self.crypthandle_server.encrypt, self.SignKey_server, signature_struct.SeqNum, data)\n\t\t#print('server signature : %s' % signature)\n\t\t#print('calculates signature: %s' % calc_sig)\n\t\treturn signature == calc_sig", "def test_signup_verification(self):\n resp = self.client.post(self.signup_url, self.test_credential)\n\n self.assertEqual(len(mail.outbox), 1)\n email_message = str(mail.outbox[0].message())\n # Verification model instance should be created.\n sv = SignupVerification.objects.get(user__username=self.test_credential['username'])\n self.assertTrue(sv.key)\n self.assertIn(sv.key, email_message)", "def test_check_email(self):\n url = reverse('check_email')\n data = {\"emails\": [\"shashank.shekhar@vgmail.in\"]}\n response_data = {\"results\": [{\"email\": \"shashank.shekhar@vgmail.in\", \"blocked\": True}], \"success\": True}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data, response_data)", "def test_default_unsuccessful_verify_request(self, cred):\n # make the initial request\n resp = requests.get(verify_url.format('xml', cred[0], cred[1],\n 'TestApp', test_number))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'text/plain'\n assert resp.text.startswith('<?xml version=\"1.0\" encoding=\"UTF-8\"?>')\n tree = ElementTree.fromstring(resp.text)\n assert tree[0].tag == 'request_id' and len(tree[0].text) <= 32\n assert tree[1].tag == 'status' and tree[1].text == '0'\n # now enter invalid verify code 3 times to terminate verification process\n # first invalid code check\n request_id = tree[0].text\n resp = requests.get(check_url.format('xml', cred[0], cred[1],\n request_id, '00000'))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'text/plain'\n assert resp.text.startswith('<?xml version=\"1.0\" encoding=\"UTF-8\"?>')\n tree = ElementTree.fromstring(resp.text)\n assert tree[0].tag == 'request_id' and tree[0].text == request_id\n assert tree[1].tag == 'status' and tree[1].text == '16'\n assert tree[2].tag == 'error_text' and tree[2].text == code_does_not_match_msg\n # second invalid check\n resp = requests.get(check_url.format('xml', cred[0], cred[1],\n request_id, '00000'))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'text/plain'\n assert resp.text.startswith('<?xml version=\"1.0\" encoding=\"UTF-8\"?>')\n tree = ElementTree.fromstring(resp.text)\n assert tree[0].tag == 'request_id' and tree[0].text == request_id\n assert tree[1].tag == 'status' and tree[1].text == '16'\n assert tree[2].tag == 'error_text' and tree[2].text == code_does_not_match_msg\n # third invalid check\n resp = requests.get(check_url.format('xml', cred[0], cred[1],\n request_id, '00000'))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'text/plain'\n assert resp.text.startswith('<?xml version=\"1.0\" encoding=\"UTF-8\"?>')\n tree = ElementTree.fromstring(resp.text)\n # assert 'request_id' not in [child.tag for child in tree]\n assert tree[1].tag == 'status' and tree[1].text == '17'\n assert tree[2].tag == 'error_text' and tree[2].text == workflow_terminated_msg", "def verify_trust(TrustId=None):\n pass", "def verify_mail(self):\n raise NotImplementedError", "def _verify(self):\n pass", "def verify(verification_code):\n verification.verify(verification_code)", "def verify_vn_in_api_server(self):\n self.api_verification_flag = True\n self.api_s_vn_obj = self.api_s_inspect.get_cs_vn(\n domain=self.domain_name, project=self.project_name,\n vn=self.vn_name, refresh=True)\n if not self.api_s_vn_obj:\n self.logger.debug(\"VN %s is not found in API-Server\" %\n (self.vn_name))\n self.api_verification_flag = self.api_verification_flag and False\n return False\n if self.api_s_vn_obj['virtual-network']['uuid'] != self.uuid:\n self.logger.warn(\n \"VN Object ID %s in API-Server is not what was created\" % (self.uuid))\n self.api_verification_flag = self.api_verification_flag and False\n return False\n\n subnets = list()\n for ipam in self.api_s_vn_obj['virtual-network']['network_ipam_refs']:\n subnets.extend(ipam['attr']['ipam_subnets'])\n for vn_subnet in self.vn_subnets:\n subnet_found = False\n vn_subnet_cidr = str(IPNetwork(vn_subnet['cidr']).ip)\n for subnet in subnets:\n if subnet['subnet']['ip_prefix'] == vn_subnet_cidr:\n subnet_found = True\n if not subnet_found:\n self.logger.warn(\n \"VN Subnet IP %s not found in API-Server for VN %s\" %\n (vn_subnet_cidr, self.vn_name))\n self.api_verification_flag = self.api_verification_flag and False\n return False\n # end for\n self.api_s_route_targets = self.api_s_inspect.get_cs_route_targets(\n vn_id=self.uuid)\n if not self.api_s_route_targets:\n errmsg = \"Route targets not yet found in API-Server for VN %s\" % self.vn_name\n self.logger.error(errmsg)\n self.api_verification_flag = self.api_verification_flag and False\n return False\n self.rt_names = self.api_s_inspect.get_cs_rt_names(\n self.api_s_route_targets)\n\n if not self.rt_names:\n self.logger.debug(\n 'RT names not yet present for VN %s', self.vn_name)\n return False\n\n if self.rt_number:\n if not any(item.endswith(self.rt_number) for item in self.rt_names):\n self.logger.debug('RT %s is not found in API Server RT list %s ' %(\n self.rt_number, self.rt_names))\n self.api_verification_flag = self.api_verification_flag and False\n return False\n\n self.api_s_routing_instance = self.api_s_inspect.get_cs_routing_instances(\n vn_id=self.uuid)\n if not self.api_s_routing_instance:\n msg = \"Routing Instances not found in API-Server for VN %s\" % self.vn_name\n self.logger.warn(msg)\n self.api_verification_flag = self.api_verification_flag and False\n return False\n self.ri_ref = self.api_s_routing_instance['routing_instances'][0]['routing-instance']\n if not self.verify_network_id():\n return False\n self.api_verification_flag = self.api_verification_flag and True\n self.logger.info(\"Verifications in API Server for VN %s passed\" %\n (self.vn_name))\n return True", "def request_verification_bypass(request, env, email):\n if request.method == 'POST':\n oauth_client = OAUTHCLIENT(env)\n token = oauth_client.get_token()\n content = {'message': email + \" has been requested for By-pass to \" + env}\n\n if 'access_token' in token:\n if env == 'qa32':\n host = 'http://qajb101.p2pcredit.local/users/email/'\n elif env == 'stg':\n host = 'http://stage-api-proxy-A.vip.c1.stg/users/email/'\n elif env == 'qa20':\n host = 'http://np97.c1.dev/users/email/'\n\n # create header with access token\n headers = {'Authorization': token['token_type'] + ' ' + token['access_token']}\n\n # request email verification by-pass with access-token\n response = requests.get(\n host + email,\n headers=headers\n )\n\n response_json = response.json()\n\n # build response message\n if response_json['email_exists']:\n if response_json['activation_key'] == \"\":\n content['result'] = \"VERIFIED\"\n content['message'] = email + \" is auto-verified on \" + env\n else:\n content['result'] = \"NOT VERIFIED\"\n content['message'] = email + \" is not verified yet on \" + env + \\\n \". Please verify your email by clicking 'Verify Email' link.\"\n else:\n content['result'] = \"USER NOT FOUND\"\n content['message'] = email + \" is not found on \" + env\n\n response_status = status.HTTP_200_OK\n content['response'] = response_json\n else:\n content['result'] = str(token)\n response_status = status.HTTP_500_INTERNAL_SERVER_ERROR\n content['response'] = 'No token generated'\n\n return Response(content, status=response_status)", "def verify_key(self, providerkey = None):\n h = Https(API_DOMAIN)\n\n data = {'apikey' : self.apikey}\n\n if providerkey is not None:\n data['providerkey'] = providerkey\n\n h.request( \"GET\",\n \"/publicapi/verify\"+ urlencode(data),\n headers=self.headers)\n\n request_status = h.getresponse().status\n\n if request_status != 200:\n raise Exception(\"Invalid API Key %s\" % self.apikey)", "def verify_capture(self, capture):\n info = None\n seen = set()\n for packet in capture:\n try:\n self.logger.debug(ppp(\"Got packet:\", packet))\n ip = packet[IP]\n icmp = packet[ICMP]\n payload_info = self.payload_to_info(packet[Raw])\n packet_index = payload_info.index\n if packet_index in seen:\n raise Exception(ppp(\"Duplicate packet received\", packet))\n seen.add(packet_index)\n self.assertEqual(payload_info.dst, self.src_dst_if.sw_if_index)\n info = self._packet_infos[packet_index]\n self.assertIsNotNone(info)\n self.assertEqual(packet_index, info.index)\n saved_packet = info.data\n self.assertEqual(ip.src, saved_packet[IP].dst)\n self.assertEqual(ip.dst, saved_packet[IP].src)\n self.assertEqual(icmp.type, 0) # echo reply\n self.assertEqual(icmp.id, saved_packet[ICMP].id)\n self.assertEqual(icmp.payload, saved_packet[ICMP].payload)\n except Exception:\n self.logger.error(ppp(\"Unexpected or invalid packet:\", packet))\n raise\n for index in self._packet_infos:\n self.assertIn(\n index, seen, \"Packet with packet_index %d not received\" % index\n )", "def check_detailed(secret,\n response,\n remote_ip=None,\n check_url=DEFAULT_RECAPTCHA_CHECK_URL):\n check_data = {\n 'secret': secret,\n 'response': response}\n if remote_ip:\n check_data['remoteip'] = remote_ip\n reply = requests.post(check_url, check_data).json()\n result = {\n 'success': reply['success'],\n 'timestamp': parse_date(reply['challenge_ts']),\n 'hostname': reply['hostname'],\n }\n if 'error-codes' in reply:\n result['error'] = reply['error-codes']\n return result", "def verify_email(request):\n return HttpResponse('Not implemented yet.')", "def test_successful_email_verification(self):\n self.signup_a_user(self.user_data)\n time = datetime.now() + timedelta(hours=24)\n token = jwt.encode({\n \"email\": self.user_data['user']['email'],\n \"username\": self.user_data['user']['username'],\n \"exp\": int(time.strftime('%s'))\n }, settings.SECRET_KEY, algorithm='HS256').decode('utf-8')\n verification_url = reverse(\n 'authentication:verify_email', kwargs={'token': token})\n\n response = self.client.get(\n verification_url,\n HTTP_AUTHORIZATION=f'token {token}'\n )\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def verify_email(nickname, quiet):\n\n try:\n account = Account.query.filter_by(nickname=nickname).one()\n except NoResultFound:\n print(f\"Account {nickname} not found\")\n return\n gmail = GmSync.from_account(account, load_config(not quiet))\n gmail.verify()", "def render_POST(self, request):\n log.msg(\"Paypal callback:\")\n log.msg(request.args)\n\n d = self.verify(request)\n d.addCallback(lambda ign: self._process(request.args))\n d.addErrback(log.err)\n return ''", "def PostVerifyEmail(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def verify_phone_number(request, domain, couch_user_id):\n if 'phone_number' not in request.GET:\n return Http404('Must include phone number in request.')\n phone_number = urllib.unquote(request.GET['phone_number'])\n user = CouchUser.get_by_user_id(couch_user_id, domain)\n\n # send verification message\n smsverify.send_verification(domain, user, phone_number)\n\n # create pending verified entry if doesn't exist already\n user.save_verified_number(domain, phone_number, False, None)\n\n return HttpResponseRedirect(reverse(\"user_account\", args=(domain, couch_user_id )))", "def verify(request):\n return HttpResponse(status=200)", "def post(self):\n data = EmailAddressValidationSchema().load(request.json)\n email_lowercase = data[\"email\"].lower()\n verification_token = data[\"verificationToken\"]\n UserRegistrationService.validate_email_address_verification_token(email_lowercase, verification_token)", "def processTrustResult(request):\n # Get the request from the session so we can construct the\n # appropriate response.\n openid_request = getRequest(request)\n\n # The identifier that this server can vouch for\n response_identity = getViewURL(request, idPage)\n\n # If the decision was to allow the verification, respond\n # accordingly.\n allowed = 'allow' in request.POST\n\n # Generate a response with the appropriate answer.\n openid_response = openid_request.answer(\n allowed, identity=response_identity)\n\n # Send Simple Registration data in the response, if appropriate.\n if allowed:\n sreg_data = {\n 'fullname': 'Example User',\n 'nickname': 'example',\n 'dob': '1970-01-01',\n 'email': 'invalid@example.com',\n 'gender': 'F',\n 'postcode': '12345',\n 'country': 'ES',\n 'language': 'eu',\n 'timezone': 'America/New_York',\n }\n\n sreg_req = sreg.SRegRequest.fromOpenIDRequest(openid_request)\n sreg_resp = sreg.SRegResponse.extractResponse(sreg_req, sreg_data)\n openid_response.addExtension(sreg_resp)\n\n pape_response = pape.Response()\n pape_response.setAuthLevel(pape.LEVELS_NIST, 0)\n openid_response.addExtension(pape_response)\n\n return displayResponse(request, openid_response)", "def test_deny_pending_payment(self):\n pass", "def check_snyk_link(context):\n json_data = context.response.json()\n print(json_data['registration_link'])\n assert \"registration_link\" in json_data, \"No snyk link found in the result\"", "def test_vpn_id():\n for v in vpns:\n assert len(v.id) > 0", "async def verify(self,ctx,ign='',region=''):\r\n if ign =='' or region =='':\r\n await self.bot.say(\"Please type in a ign and region.\")\r\n return\r\n if not ctx.message.channel.is_private: #Makes sure channel is private\r\n await self.bot.say('Sorry. But this process must be done in a private message, to continue please dm the bot ```{}```'.format(ctx.message.content))\r\n return\r\n try:\r\n pattern = verify.start(ctx.message.author.id, ign,region)\r\n except Exception as e:\r\n await self.bot.say('Error: ' + str(e)+'\\n\\nJoin http://discord.me for more info.')\r\n return\r\n pattern_ = '{} Halcyon Potions, {} Weapon Infusions, and {} Crystal Infusions'.format(str(pattern.count(0)), str(pattern.count(1)), str(pattern.count(2)))\r\n await self.bot.say(\"Awesome. To complete the authorization process.\\n• Enter a **blitz** match\\n• Buy **{}** for your first {} items.\\n• **You can sell them immediately at the same price.**\\n• This must be your next match.\\n• **Once you are done please type {}check to complete authorization process.** Once this is done, your account will be linked and authenticated permanantly.\".format(pattern_,len(pattern), self.bot.command_prefix[0]))\r\n\r\n await asyncio.sleep(345)\r\n\r\n await self.bot.send_message(ctx.message.author, verify.check(ctx.message.author.id))", "def otp_validation(payment_id=None, transaction_id=None,eci_flag=None):\n\n url = purchase_endpoint + '/api/v3/purchases/otps/auths'\n content_type = 'application/json'\n token = getAccessToken()['access_token']\n authorisation = 'Bearer {}'.format(token)\n signature, nonce,time_stamp = signatureCipher(url=url)\n signature_method = 'SHA1'\n authkeyversion = '1'\n\n headers = {'Content-Type':content_type, 'Authorization':authorisation, 'Timestamp':time_stamp,'Nonce':nonce,\n 'Signature':signature, 'SignatureMethod':signature_method, 'AuthKeyVersion':authkeyversion}\n data = {'paymentId': str(payment_id),'transactionId':str(transaction_id), 'eciFlag':str(eci_flag)}\n\n make_request = requests.post(url, data=json.dumps(data), headers=headers)\n visa_purchase = make_request.json()\n return visa_purchase", "def test_default_unsuccessful_verify_request(self, cred):\n # make the initial request\n resp = requests.get(verify_url.format('json', cred[0], cred[1],\n 'TestApp', test_number))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '0'\n assert len(resp.json()['request_id']) <= 32\n # now enter invalid verify code 3 times to terminate verification process\n # first invalid code check\n request_id = resp.json()['request_id']\n resp = requests.get(check_url.format('json', cred[0], cred[1],\n request_id, '00000'))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '16'\n assert resp.json()['request_id'] == request_id\n assert resp.json()['error_text'] == code_does_not_match_msg\n # second invalid check\n resp = requests.get(check_url.format('json', cred[0], cred[1],\n request_id, '00000'))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '16'\n assert resp.json()['request_id'] == request_id\n assert resp.json()['error_text'] == code_does_not_match_msg\n # third invalid check\n resp = requests.get(check_url.format('json', cred[0], cred[1],\n request_id, '00000'))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json'\n assert resp.json()['status'] == '17'\n assert 'request_id' not in resp.json().keys()\n assert resp.json()['error_text'] == workflow_terminated_msg", "def test_verify(self):\n self.testObject.verify()", "def check_otp(email, otp_code):\n\tprint \"Inside check_otp\"\n\totp_key, qrcode_data = get_otp_key(email)\n\tprint \"DEBUG qrcode_data: \", qrcode_data\n\ttotp = pyotp.TOTP(otp_key)\n\n\tprint \"otp_code = \", otp_code\n\tprint \"otp_key = \", otp_key\n\tprint \"totp.now() = \", totp.now()\n\tprint \"TOTP provisioning_uri = \", totp.provisioning_uri(email) \n\n\tif totp.verify(otp_code):\n\t\tprint \"totp.verify() = True\"\n\t\treturn True\n\tprint \"totp.verify() = False\"\n\treturn False", "def test_get_pay_in_details(self):\n pass", "def pp_confirm_payment_interactive(self, approval_url):\n from selenium import webdriver\n from selenium.webdriver.common.keys import Keys\n display = Display(visible=0, size=(800,600))\n display.start()\n br = webdriver.Firefox()\n br.get(approval_url)\n e = br.find_element_by_id('loadLogin')\n e.click()\n e = br.find_element_by_id('login_email')\n e.send_keys(settings.PAYPAL_TESTUSER)\n e = br.find_element_by_id('login_password')\n e.send_keys(settings.PAYPAL_TESTUSER_PASSWORD)\n e = br.find_element_by_id('submitLogin')\n e.click()\n e = br.find_element_by_id('continue')\n e.click()\n # response url is of the form\n # http://<return_url>?paymentId=PAY-6RV70583SB702805EKEYSZ6Y&token=EC-60U79048BN7719609&PayerID=7E7MGXCWTTKK2\n # see https://developer.paypal.com/docs/integration/web/accept-paypal-payment/#get-payment-approval\n response_url = br.current_url\n parts = urlparse.urlparse(response_url)\n authorization = urlparse.parse_qs(parts.query)\n br.get_screenshot_as_file('payment.png')\n br.quit()\n display.stop()\n return authorization", "def verify_token(event):\n if event['token'] != VERIFICATION_TOKEN:\n print('Presented with invalid token - ignoring message...')\n return False\n return True", "def test_verification_with_valid_token(self) -> None:\n\n secret_key = str(self.author.secret_key)\n verification_url = reverse('author:verify', kwargs={'secret_key': str(secret_key)})\n\n # Make sure URL's don't change.\n self.assertEqual(verification_url, f'/api/authors/verify/{secret_key}/')\n\n # Make valid request and get response\n response: Response = self.client.get(verification_url)\n\n self.assertEqual(response.status_code, 302)\n\n # Now test if the method \"verify\" was called\n self.assertEqual(Author.objects.get().verified, True)\n # We don't wanna give him too many privileges\n self.assertEqual(self.author.is_staff, False)", "def test_submit_iso20022_payment_instruction(self):\n pass", "def google_verify(request):\n return {}", "def verify(assertion, audience):\n verify_url = getattr(settings, 'BROWSERID_VERIFICATION_URL',\n DEFAULT_VERIFICATION_URL)\n\n log.info(\"Verification URL: %s\" % verify_url)\n\n result = _verify_http_request(verify_url, urllib.urlencode({\n 'assertion': assertion,\n 'audience': audience\n }))\n\n if result['status'] == OKAY_RESPONSE:\n return result\n\n log.error('BrowserID verification failure. Response: %r '\n 'Audience: %r' % (result, audience))\n log.error(\"BID assert: %r\" % assertion)\n return False", "def verifysubscriptionstatusinaccounttab():\n pass" ]
[ "0.8481473", "0.6472825", "0.5787242", "0.5769118", "0.5757081", "0.5683456", "0.5683456", "0.5624004", "0.5467733", "0.54500043", "0.544273", "0.5417485", "0.54154587", "0.5396169", "0.5375768", "0.53615075", "0.5322293", "0.53124595", "0.52979654", "0.5271814", "0.5205165", "0.5192838", "0.5158461", "0.51520795", "0.514784", "0.51356816", "0.5104729", "0.51040757", "0.5098069", "0.5095029", "0.50910115", "0.5090742", "0.5083637", "0.5077369", "0.5066333", "0.50658303", "0.50618017", "0.5050655", "0.50497377", "0.5030002", "0.5030002", "0.5029919", "0.5028467", "0.502167", "0.5019032", "0.5017491", "0.5014417", "0.4997441", "0.4994951", "0.49826583", "0.49824914", "0.49720025", "0.49716058", "0.49713796", "0.49682695", "0.49517465", "0.49445298", "0.4942552", "0.4929741", "0.4929653", "0.49284583", "0.492364", "0.4920922", "0.49098635", "0.4905775", "0.4890954", "0.48844287", "0.48828474", "0.48634884", "0.485224", "0.48471686", "0.48426664", "0.48378366", "0.48347715", "0.4834653", "0.48183772", "0.48131555", "0.48047867", "0.48018607", "0.48007226", "0.47997117", "0.47913158", "0.4785877", "0.4784565", "0.4778905", "0.47774935", "0.47765926", "0.47714373", "0.47714093", "0.47684515", "0.4764601", "0.47603315", "0.47578338", "0.47571346", "0.47490606", "0.47482407", "0.47449133", "0.4737199", "0.47364956", "0.47357854" ]
0.75808173
1
Recieves and verifies PayPal callbacks.
def render_POST(self, request): log.msg("Paypal callback:") log.msg(request.args) d = self.verify(request) d.addCallback(lambda ign: self._process(request.args)) d.addErrback(log.err) return ''
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_process_postpay_accepted(self):\r\n student1 = UserFactory()\r\n student1.save()\r\n\r\n order1 = Order.get_cart_for_user(student1)\r\n params = {\r\n 'card_accountNumber': '1234',\r\n 'card_cardType': '001',\r\n 'billTo_firstName': student1.first_name,\r\n 'orderNumber': str(order1.id),\r\n 'orderCurrency': 'usd',\r\n 'decision': 'ACCEPT',\r\n 'ccAuthReply_amount': '0.00'\r\n }\r\n result = process_postpay_callback(params)\r\n self.assertTrue(result['success'])\r\n self.assertEqual(result['order'], order1)\r\n order1 = Order.objects.get(id=order1.id) # reload from DB to capture side-effect of process_postpay_callback\r\n self.assertEqual(order1.status, 'purchased')\r\n self.assertFalse(result['error_html'])", "def awaiting_payment(self):", "def validate_with_paypal(request, validate_type):\n if validate_type == 'PDT':\n # we are on return url\n # need to verify if payment is completed\n # MERCHANT_TXN_KEY is your PDT identity token\n params = {\n 'cmd': '_notify-synch',\n 'tx': request.GET.get('tx', ''),\n 'at': settings.MERCHANT_TXN_KEY\n }\n data = urllib.urlencode(params)\n\n # Sample response:\n # SUCCESS\n # first_name=Jane+Doe\n # last_name=Smith\n # payment_status=Completed payer_email=janedoesmith%40hotmail.com\n # payment_gross=3.99\n # mc_currency=USD custom=For+the+purchase+of+the+rare+book+Green+Eggs+%26+Ham\n\n # If the response is FAIL, PayPal recommends making sure that:\n # The Transaction token is not bad.\n # The ID token is not bad.\n # The tokens have not expired.\n\n else: # IPN\n data = 'cmd=_notify-validate&%s' % request.POST.urlencode()\n\n # The response is one single-word: VERIFIED or INVALID\n\n headers = {\"Content-type\": \"application/x-www-form-urlencoded\",\n 'encoding': 'utf-8',\n \"Accept\": \"text/plain\"}\n request = urllib2.Request(settings.PAYPAL_POST_URL,\n data,\n headers)\n response = urllib2.urlopen(request)\n data = response.read()\n\n if validate_type == 'PDT':\n return parse_pdt_validation(data)\n else:\n return data.strip('\\n').lower() == 'verified', None", "def verify(self, request):\n paypalURL = 'https://www.sandbox.paypal.com/cgi-bin/webscr'\n if not self.SANDBOX:\n paypalURL = 'https://www.paypal.com/cgi-bin/webscr'\n\n def _cb(response):\n if response == 'INVALID':\n raise PaypalError(\n 'IPN data invalid. data: %s', (data,))\n\n elif response == 'VERIFIED':\n return True\n\n else:\n raise PaypalError('Unrecognized verification response: %s', (response,))\n\n data = request.content.read()\n params = '?cmd=_notify-validate&' + data\n\n d = getPage(paypalURL+params, method='POST')\n d.addCallback(_cb)\n return d", "def process_postpay_callback(params):\r\n try:\r\n verify_signatures(params)\r\n result = payment_accepted(params)\r\n if result['accepted']:\r\n # SUCCESS CASE first, rest are some sort of oddity\r\n record_purchase(params, result['order'])\r\n return {'success': True,\r\n 'order': result['order'],\r\n 'error_html': ''}\r\n else:\r\n return {'success': False,\r\n 'order': result['order'],\r\n 'error_html': get_processor_decline_html(params)}\r\n except CCProcessorException as error:\r\n return {'success': False,\r\n 'order': None, # due to exception we may not have the order\r\n 'error_html': get_processor_exception_html(error)}", "def test_refund_cert_callback_before_expiration_email(self):\r\n course = CourseFactory.create(org='refund_before_expiration', number='test', run='course', display_name='one')\r\n course_key = course.id\r\n many_days = datetime.timedelta(days=60)\r\n\r\n course_mode = CourseMode(course_id=course_key,\r\n mode_slug=\"verified\",\r\n mode_display_name=\"verified cert\",\r\n min_price=self.cost,\r\n expiration_datetime=datetime.datetime.now(pytz.utc) + many_days)\r\n course_mode.save()\r\n\r\n CourseEnrollment.enroll(self.user, course_key, 'verified')\r\n cart = Order.get_cart_for_user(user=self.user)\r\n CertificateItem.add_to_order(cart, course_key, self.cost, 'verified')\r\n cart.purchase()\r\n\r\n mail.outbox = []\r\n with patch('shoppingcart.models.log.error') as mock_error_logger:\r\n CourseEnrollment.unenroll(self.user, course_key)\r\n self.assertFalse(mock_error_logger.called)\r\n self.assertEquals(len(mail.outbox), 1)\r\n self.assertEquals('[Refund] User-Requested Refund', mail.outbox[0].subject)\r\n self.assertEquals(settings.PAYMENT_SUPPORT_EMAIL, mail.outbox[0].from_email)\r\n self.assertIn('has requested a refund on Order', mail.outbox[0].body)", "def paynova_callback(request):\n\n log.info('Callback. %s ' % request.POST)\n\n # check DIGEST\n\n if not _ehn_checksum(request.POST):\n log.error('EHN DIGEST hash is not verified. %s' % request.POST)\n return HttpResponseBadRequest()\n\n # check EVENT_TYPE\n # TODO: process other events\n\n if request.POST.get('EVENT_TYPE') != 'PAYMENT':\n log.error('Unexpected EVENT_TYPE. %s' % request.POST)\n return HttpResponseBadRequest()\n\n # get PaynovaPayment from model\n\n try:\n pp = PaynovaPayment.objects.get(order_id=request.POST.get('ORDER_ID'), session_id=request.POST.get('SESSION_ID'))\n except PaynovaPayment.DoesNotExist:\n log.error('Unknown ORDER_ID. %s' % request.POST)\n return HttpResponseNotFound()\n\n pp.status = request.POST.get('PAYMENT_STATUS')\n pp.status_reason = request.POST.get('PAYMENT_STATUS_REASON')\n pp.params_ehn = request.POST\n pp.save()\n\n # send signal\n\n log.debug('Process paynova_payment signal')\n\n paynova_payment.send(sender=pp, params=request.POST, status=request.POST.get('PAYMENT_STATUS'))\n\n log.debug('Signal paynova_payment processed')\n\n return HttpResponse()", "def checkPayComplete(*args, **kwargs):\n initParam = {}\n transaction = kwargs.get('transaction')\n if transaction:\n p = driver.PayPal()\n #Check whether use has paid successfully.\n result = p.check_ap_payment_status(transaction.pay_key)\n if result['status'][0] != 'COMPLETED':\n #Do something after user payed successfully.\n executeMethod = kwargs.pop('executeMethod', None)\n if executeMethod:\n initParam['transaction_id'] = transaction.id\n initParam['pay_key'] = transaction.pay_key\n initParam['buyer_account'] = result['senderEmail'][0]\n executeMethod(initParam=initParam)", "def paypal_gateway(self):\n\n print(request.form)\n\n # Gather information from callback response\n first_name = request.form.get(\"first_name\", None)\n last_name = request.form.get(\"last_name\", None)\n payer_id = request.form.get(\"payer_id\", None)\n payer_email = request.form.get(\"payer_email\", None)\n item_name = request.form.get(\"item_name\", None)\n item_number = request.form.get(\"item_number\", None)\n custom = request.form.get(\"custom\", None)\n payment_gross = request.form.get(\"payment_gross\", None)\n\n ## Generate Token and store in database\n gen_uuid = str(uuid.uuid4())\n\n try:\n t = Token()\n t.uuid = gen_uuid\n t.email = payer_email\n t.active = True\n t.package = item_name\n t.package_id = item_number\n\n db.session.add(t)\n db.session.commit()\n except:\n import traceback\n db.session.rollback()\n traceback.print_exc()\n\n ## Send email to user with unique link\n try:\n msg = Message(\n \"Guildbit - Order Confirmation\",\n sender=settings.DEFAULT_MAIL_SENDER,\n recipients=[payer_email])\n\n msg.html = render_template(\"emails/payment_thankyou.html\", package=item_name, uuid=gen_uuid)\n mail.send(msg)\n except:\n import traceback\n traceback.print_exc()\n\n return jsonify({\n \"status\": \"received\"\n })", "def test_authorize_pending_payment(self):\n pass", "def callback(self):\n\n # Gather information from callback response\n data = json.loads(request.data)\n order = data.get(\"order\", None)\n customer = data.get(\"customer\", None)\n\n email = customer[\"email\"]\n id = order[\"id\"]\n status = order[\"status\"]\n custom = order[\"custom\"]\n button = order[\"button\"]\n button_name = button[\"name\"]\n\n ## Generate Token and store in database\n gen_uuid = str(uuid.uuid4())\n\n try:\n t = Token()\n t.uuid = gen_uuid\n t.email = email\n t.active = True\n t.package = custom\n\n db.session.add(t)\n db.session.commit()\n except:\n import traceback\n db.session.rollback()\n traceback.print_exc()\n\n ## Send email to user with unique link\n try:\n msg = Message(\n \"Guildbit - Order Confirmation\",\n sender=settings.DEFAULT_MAIL_SENDER,\n recipients=[email])\n\n # msg.html = template\n msg.html = render_template(\"emails/payment_thankyou.html\", package=button_name, uuid=gen_uuid)\n mail.send(msg)\n except:\n import traceback\n traceback.print_exc()\n\n return jsonify({\n \"status\": \"received\"\n })", "def postpay_callback(request):\r\n params = request.POST.dict()\r\n result = process_postpay_callback(params)\r\n if result['success']:\r\n return HttpResponseRedirect(reverse('shoppingcart.views.show_receipt', args=[result['order'].id]))\r\n else:\r\n return render_to_response('shoppingcart/error.html', {'order': result['order'],\r\n 'error_html': result['error_html']})", "def test_process_postpay_not_accepted(self):\r\n student1 = UserFactory()\r\n student1.save()\r\n\r\n order1 = Order.get_cart_for_user(student1)\r\n params = {\r\n 'card_accountNumber': '1234',\r\n 'card_cardType': '001',\r\n 'billTo_firstName': student1.first_name,\r\n 'orderNumber': str(order1.id),\r\n 'orderCurrency': 'usd',\r\n 'decision': 'REJECT',\r\n 'ccAuthReply_amount': '0.00',\r\n 'reasonCode': '207'\r\n }\r\n result = process_postpay_callback(params)\r\n self.assertFalse(result['success'])\r\n self.assertEqual(result['order'], order1)\r\n self.assertEqual(order1.status, 'cart')\r\n self.assertIn(REASONCODE_MAP['207'], result['error_html'])", "async def test_receive_post_ok(self):\n await self.webhook_connection.connect()\n assert self.webhook_connection.is_connected is True\n payload = {\"hello\": \"world\"}\n call_task = self.loop.create_task(self.call_webhook(\"test_topic\", json=payload))\n envelope = await asyncio.wait_for(self.webhook_connection.receive(), timeout=10)\n\n assert envelope\n\n message = cast(HttpMessage, envelope.message)\n dialogue = self.skill_dialogues.update(message)\n assert dialogue is not None\n assert message.method.upper() == \"POST\"\n assert message.body.decode(\"utf-8\") == json.dumps(payload)\n await call_task", "def pay_and_callback(payment_request: dict):\n log.info('pay_and_callback received', payment_request=payment_request)\n payment_request = PaymentRequest(payment_request)\n with lock(redis_conn, 'payment:{}'.format(payment_request.id), blocking_timeout=120):\n try:\n payment = pay(payment_request)\n except Exception as e:\n enqueue_payment_failed_callback(payment_request, str(e))\n raise # crash the job\n else:\n enqueue_payment_callback(payment_request.callback, payment, 'send')", "def process_webhook(self):\n if self.token:\n self.verify = VerificationMethod.TOKEN\n if self.secret:\n self.verify = VerificationMethod.HMAC\n return True", "def handle_response(self, callback):\n\n self.log.info(\"Received callback for subscription %s\", self.service_id)\n self.log.info(callback)\n\n # handle callbacks\n self.handle_callbacks()", "def test_handle_notify_request_success(\n bambora_provider_base_config,\n order: Order,\n):\n order.order_number = \"abc123\"\n order.status = OrderStatus.PAID\n order.lease.status = LeaseStatus.PAID\n order.lease.save()\n order.save()\n refund = OrderRefundFactory(\n order=order, refund_id=\"1234567\", amount=order.total_price\n )\n\n rf = RequestFactory()\n request = rf.get(\"/payments/notify_refund/\", notify_success_params)\n payment_provider = create_bambora_provider(bambora_provider_base_config, request)\n\n assert refund.status == OrderRefundStatus.PENDING\n\n returned = payment_provider.handle_notify_refund_request()\n\n refund = OrderRefund.objects.get(refund_id=notify_success_params.get(\"REFUND_ID\"))\n order = refund.order\n\n assert refund.status == OrderRefundStatus.ACCEPTED\n assert order.status == OrderStatus.REFUNDED\n assert order.lease.status == LeaseStatus.TERMINATED\n\n assert isinstance(returned, HttpResponse)\n assert returned.status_code == 204", "def test_set_verify_callback_reference(self):\n\n def callback(conn, cert, errnum, depth, ok): # pragma: no cover\n return ok\n\n tracker = ref(callback)\n\n context = Context(SSLv23_METHOD)\n context.set_verify(VERIFY_PEER, callback)\n del callback\n\n conn = Connection(context, None)\n context.set_verify(VERIFY_NONE)\n\n collect()\n collect()\n assert tracker()\n\n conn.set_verify(VERIFY_PEER, lambda conn, cert, errnum, depth, ok: ok)\n collect()\n collect()\n callback = tracker()\n if callback is not None: # pragma: nocover\n referrers = get_referrers(callback)\n if len(referrers) > 1:\n pytest.fail(\"Some references remain: %r\" % (referrers,))", "def payPalDoCheckOut(request, *args, **kwargs):\n initParam = {}\n id = request.GET.get(\"id\")\n token = request.GET.get(\"token\")\n payerID = request.GET.get(\"PayerID\")\n initParam['id'] = id\n initParam['token'] = token\n if token and payerID and id:\n #Check and get Service detail information\n checkMethod = kwargs.pop('checkMethod', None)\n if checkMethod:\n gateway = request.session.get('gateway', None)\n if gateway:\n del request.session['gateway']\n initParam['gateway'] = gateway\n serviceDetail = checkMethod(request, initParam=initParam)\n if serviceDetail:\n amount = serviceDetail.actual_amount\n currency = serviceDetail.app.currency.currency\n result, response = utils.process_payment_request(amount, currency, token, payerID)\n if result:\n #Do something after payment success.\n executeMethod = kwargs.pop('executeMethod', None)\n if executeMethod:\n initParam['serviceDetail_id'] = serviceDetail.id\n if executeMethod(request, initParam=initParam):\n success_page = request.session.get('success_page', None)\n back_page = request.session.get('back_page', None)\n if back_page:\n del request.session['back_page']\n if success_page:\n del request.session['success_page']\n initParam['success_page'] = success_page\n initParam['success_page_msg'] = request.session['success_page_msg']\n #For the value in paypal_success.html\n initParam['app'] = serviceDetail.app\n initParam['type'] = 'Payment'\n initParam['price'] = serviceDetail.actual_amount\n initParam['msg'] = _('Thank you for your payment, and your app will be listed according to the effective period your choosed.')\n log.info(_('Seller %(param1)s has paid service fee with service detail id %(param2)s.')\n % {'param1': request.user.username, 'param2': serviceDetail.id})\n return render_to_response(\"payment/paypal_success.html\", initParam, context_instance=RequestContext(request))\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, Execute method %(param3)s failed.')\n % {'param1': token, 'param2': payerID, 'param3': executeMethod.__name__})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, ExecuteMethod does not exist.')\n % {'param1': token, 'param2': payerID})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, %(param3)s : %(param4)s.')\n % {'param1': token, 'param2': payerID, 'param3': response.error, 'param4': response.error_msg})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, User: %(param3)s, Execute method %(param4)s failed.')\n % {'param1': token, 'param2': payerID, 'param3': request.user.username, 'param4': checkMethod.__name__})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, Gateway no exists in request.session.')\n % {'param1': token, 'param2': payerID})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, CheckMethod does not exist.')\n % {'param1': token, 'param2': payerID})\n else:\n log.error(_('Token or PayerID no exists.'))\n\n if request.session.get('gateway', None):\n del request.session['gateway']\n success_page = request.session.get('success_page', None)\n back_page = request.session.get('back_page', None)\n if success_page:\n del request.session['success_page']\n if back_page:\n del request.session['back_page']\n error_msg = driver.GENERIC_PAYPAL_ERROR\n page_msg = request.session['back_page_msg']\n return render_to_response('payment/paypal_cancel.html',\n {'error_msg': error_msg, 'back_page': back_page, 'back_page_msg': page_msg}, context_instance=RequestContext(request))\n else:\n error_msg = _('%(param1)s Please payment again.') % {'param1': driver.GENERIC_PAYPAL_ERROR}\n return render_to_response('payment/paypal_error.html',\n {\"error_msg\": error_msg}, context_instance=RequestContext(request))", "async def test_payment_pending_emails(app, session, stan_server, event_loop, client_id, events_stan, future):\n # Call back for the subscription\n from account_mailer.worker import cb_subscription_handler\n\n # vars\n user = factory_user_model_with_contact()\n org = factory_org_model()\n factory_membership_model(user.id, org.id)\n id = org.id\n\n events_subject = 'test_subject'\n events_queue = 'test_queue'\n events_durable_name = 'test_durable'\n with patch.object(notification_service, 'send_email', return_value=None) as mock_send:\n # register the handler to test it\n await subscribe_to_queue(events_stan,\n events_subject,\n events_queue,\n events_durable_name,\n cb_subscription_handler)\n\n # add an event to queue\n mail_details = {\n 'accountId': id,\n 'cfsAccountId': '12345678',\n 'transactionAmount': 20.00\n }\n await helper_add_event_to_queue(events_stan, events_subject, org_id=id,\n msg_type=MessageType.PAYMENT_PENDING.value,\n mail_details=mail_details)\n\n mock_send.assert_called\n assert mock_send.call_args.args[0].get('recipients') == 'foo@bar.com'\n assert mock_send.call_args.args[0].get('content').get(\n 'subject') == SubjectType.PAYMENT_PENDING.value\n assert mock_send.call_args.args[0].get('attachments') is None\n assert mock_send.call_args.args[0].get('content').get('body') is not None\n\n await helper_add_event_to_queue(events_stan, events_subject, org_id=id,\n msg_type=MessageType.PAYMENT_PENDING.value,\n mail_details=mail_details)", "def payReturn(request, *args, **kwargs):\n initParam = {}\n pay_key = request.session.get('pay_key', None)\n gateway = request.session.get('gateway', None)\n if pay_key and gateway:\n del request.session['pay_key']\n del request.session['gateway']\n #Check and get Transaction information\n checkMethod = kwargs.pop('checkMethod', None)\n if checkMethod:\n initParam['pay_key'] = pay_key\n initParam['gateway'] = gateway\n transaction = checkMethod(request, initParam=initParam)\n if transaction:\n p = driver.PayPal()\n #Check whether use has paid successfully.\n result = p.check_ap_payment_status(transaction.pay_key)\n if result['status'][0] == 'COMPLETED':\n #Do something after user payed successfully.\n executeMethod = kwargs.pop('executeMethod', None)\n if executeMethod:\n initParam['transaction_id'] = transaction.id\n initParam['buyer_account'] = result['senderEmail'][0]\n if executeMethod(initParam=initParam):\n success_page = request.session.get('success_page', None)\n back_page = request.session.get('back_page', None)\n if back_page:\n del request.session['back_page']\n if success_page:\n del request.session['success_page']\n initParam['success_page'] = success_page\n initParam['success_page_msg'] = request.session['success_page_msg']\n #For the value in paypal_success.html\n initParam['app'] = transaction.app\n initParam['price'] = transaction.price\n initParam['type'] = 'Transaction'\n initParam['msg'] = _('You have successfully paid the money. We have already sent an email to the app seller. In the meanwhile you can send private message to seller as well.')\n log.info(_('User %(param1)s has paid with transaction id %(param2)s.')\n % {'param1': request.user.username, 'param2': transaction.id})\n return render_to_response(\"payment/paypal_success.html\", initParam, context_instance=RequestContext(request))\n else:\n log.error(_('User %(param1)s has paid with transaction id %(param2)s, but execute method %(param3)s failed.')\n % {'param1': request.user.username, 'param2': transaction.id, 'param3': executeMethod.__name__})\n else:\n log.error(_('User %(param1)s has paid with transaction id %(param2)s, but ExecuteMethod does not exist.')\n % {'param1': request.user.username, 'param2': transaction.id})\n else:\n log.error(_('User %(param1)s has no paid with transaction id %(param2)s.')\n % {'param1': request.user.username, 'param2': transaction.id})\n else:\n log.error(_('PayKey %(param1)s, Gateway: %(param2)s, User: %(param3)s, Execute method %(param4)s failed.')\n % {'param1': pay_key, 'param2': gateway, 'param3': request.user.username, 'param4': checkMethod.__name__})\n else:\n log.error(_('PayKey %(param1)s, Gateway: %(param2)s, CheckMethod does not exist.')\n % {'param1': pay_key, 'param2': gateway})\n else:\n log.error(_('Pay. PayKey or Gateway no exists.'))\n\n success_page = request.session.get('success_page', None)\n back_page = request.session.get('back_page', None)\n if success_page:\n del request.session['success_page']\n if back_page:\n del request.session['back_page']\n error_msg = driver.GENERIC_PAYPAL_ERROR\n page_msg = request.session['back_page_msg']\n return render_to_response('payment/paypal_cancel.html',\n {'error_msg': error_msg, 'back_page': back_page, 'back_page_msg': page_msg}, context_instance=RequestContext(request))\n else:\n error_msg = _('%(param1)s Please transaction again.') % {'param1': driver.GENERIC_PAYPAL_ERROR}\n return render_to_response('payment/paypal_error.html',\n {\"error_msg\": error_msg}, context_instance=RequestContext(request))", "def process_payment():\n\n url = 'https://api.worldpay.com/v1/orders'\n headers = {'Authorization': environ.get('WORLDPAY_API_KEY'),\n 'Content-type': 'application/json'}\n body = {\n \"paymentMethod\": {\n \"type\": \"Card\",\n \"name\": session['caller_name'],\n \"expiryMonth\": session['expiry'][:2],\n \"expiryYear\": f\"20{session['expiry'][2:]}\",\n \"cardNumber\": session['card_number'],\n \"cvc\": session['cvv'],\n \"issueNumber\": \"1\"\n },\n \"orderType\": \"ECOM\",\n \"orderDescription\": session['call_sid'],\n \"amount\": session['payment_amount'],\n \"currencyCode\": \"GBP\"}\n r = requests.post(url, headers=headers, data=json.dumps(body))\n requests.post(environ.get('END_OF_INTERACTION_URL'), r.text)\n response = VoiceResponse()\n response.say(\"Payment processed, goodbye\")\n # If your flow started in Twilio Studio, redirect back to it to complete the call\n # response.redirect(\n # 'https://webhooks.twilio.com/v1/Accounts/ACfd0573f9f976b99746c693XXXXXXXXXX/Flows/FWbfdeda0a21644267231d3dXXXXXXXXXX?FlowEvent=return')\n return str(response)", "def paypal_notification(request, payment_mode='paypal'):\r\n try:\r\n data = request.POST\r\n _log.debug(\"PayPal IPN data: %s\", repr(data))\r\n\r\n if not paypal.verify_ipn_request(request):\r\n return HttpResponse()\r\n\r\n if data.get('payment_status', None) != \"Completed\":\r\n # Do not insert payments whose status is not \"Completed\".\r\n _log.debug(\"Ignored IPN data for incomplete payment.\")\r\n return HttpResponse()\r\n\r\n currency = data.get('mc_currency', settings.CURRENCY_DEFAULT)\r\n if currency.upper() not in settings.CURRENCIES_SUPPORTED:\r\n # We do not support anything other than USD.\r\n _log.debug(\"Ignored IPN data for unsupported currency %s\", currency)\r\n return HttpResponse()\r\n\r\n pending_contribution_id, username = data['custom'].split('~') # pending_contrib_id~buyer's_username\r\n is_anon = username == 'anonymous'\r\n transaction_id = data['txn_id']\r\n qty = data['quantity']\r\n artist_email = data['receiver_email']\r\n campaign_id = data['item_number']\r\n amount = data['mc_gross']\r\n is_test = data.get('test_ipn', 0) == 1\r\n\r\n contribs = Contribution.objects.filter(transaction_id=transaction_id, payment_mode=payment_mode).count()\r\n if not contribs:\r\n # This transaction hasn't already been processed.\r\n # Process it and update the ``memo`` field if it has been provided by the buyer.\r\n if is_anon:\r\n _log.debug(\"Processing anonymous contribution\")\r\n contributor = User.objects.get(username='anonymous')\r\n campaign = Campaign.objects.get(pk=campaign_id)\r\n contrib = campaign.contribution_set.create(\r\n contributor=contributor,\r\n amount=amount,\r\n qty=qty,\r\n payment_mode=payment_mode,\r\n transaction_id=transaction_id,\r\n memo=data.get('memo', '')\r\n )\r\n _log.info(\"PayPal (tx: %s) anonymous contribution recorded: %s\", transaction_id, contrib)\r\n else:\r\n pending_contrib = PendingContribution.objects.get(pk=pending_contribution_id,\r\n contributor__username=username,\r\n campaign=campaign_id,\r\n amount=amount,\r\n qty=qty,\r\n payment_mode=payment_mode)\r\n if pending_contrib:\r\n contrib = pending_contrib.process_payment_notification(transaction_id, data.get('memo', ''))\r\n _log.info(\"PayPal transaction %s resolved. Contribution recorded: %s\", transaction_id, contrib)\r\n else:\r\n _log.error(\"PayPal transaction %s could not be resolved.\", transaction_id)\r\n except:\r\n _log.exception(''.join(format_exception(*exc_info())))\r\n return HttpResponse()", "def post(self, request):\r\n if self._is_signature_valid(request.POST):\r\n return self._payment_page_response(request.POST, '/shoppingcart/postpay_callback/')\r\n\r\n else:\r\n return render_to_response('shoppingcart/test/fake_payment_error.html')", "def post(self):\n policies = json.loads(self.request.get('policies'))\n request = json.loads(self.request.get('request_json'))\n response = json.loads(self.request.get('response_json'))\n\n maybe_notify_backend('LEASED', response['hostname'], policies)\n maybe_notify_lessee(request, response)", "def test_set_verify_callback_reference(self):\n serverContext = Context(TLSv1_2_METHOD)\n serverContext.use_privatekey(\n load_privatekey(FILETYPE_PEM, root_key_pem)\n )\n serverContext.use_certificate(\n load_certificate(FILETYPE_PEM, root_cert_pem)\n )\n\n clientContext = Context(TLSv1_2_METHOD)\n\n clients = []\n\n for i in range(5):\n\n def verify_callback(*args):\n return True\n\n serverSocket, clientSocket = socket_pair()\n client = Connection(clientContext, clientSocket)\n\n clients.append((serverSocket, client))\n\n clientContext.set_verify(VERIFY_PEER, verify_callback)\n\n gc.collect()\n\n # Make them talk to each other.\n for serverSocket, client in clients:\n server = Connection(serverContext, serverSocket)\n server.set_accept_state()\n client.set_connect_state()\n\n for _ in range(5):\n for s in [client, server]:\n try:\n s.do_handshake()\n except WantReadError:\n pass", "def subscribe_verify(self,\n raw_response: Any,\n sub_mode: str = 'SAMPLE',\n *args,\n **kwargs):\n pass", "def test_validate_callback(self):\n with patch('requests.get') as mock:\n mock.return_value.status_code = 200\n try:\n ProxyGrantingTicket.objects.validate_callback('https://www.example.com', 'https://www.example.com/',\n self.pgtid, self.pgtiou)\n except InvalidProxyCallback:\n self.fail(\"Exception raised validating proxy callback URL\")", "def pp_confirm_payment_interactive(self, approval_url):\n from selenium import webdriver\n from selenium.webdriver.common.keys import Keys\n display = Display(visible=0, size=(800,600))\n display.start()\n br = webdriver.Firefox()\n br.get(approval_url)\n e = br.find_element_by_id('loadLogin')\n e.click()\n e = br.find_element_by_id('login_email')\n e.send_keys(settings.PAYPAL_TESTUSER)\n e = br.find_element_by_id('login_password')\n e.send_keys(settings.PAYPAL_TESTUSER_PASSWORD)\n e = br.find_element_by_id('submitLogin')\n e.click()\n e = br.find_element_by_id('continue')\n e.click()\n # response url is of the form\n # http://<return_url>?paymentId=PAY-6RV70583SB702805EKEYSZ6Y&token=EC-60U79048BN7719609&PayerID=7E7MGXCWTTKK2\n # see https://developer.paypal.com/docs/integration/web/accept-paypal-payment/#get-payment-approval\n response_url = br.current_url\n parts = urlparse.urlparse(response_url)\n authorization = urlparse.parse_qs(parts.query)\n br.get_screenshot_as_file('payment.png')\n br.quit()\n display.stop()\n return authorization", "def send_verification(self):\n pass", "def PostResendVerifyEmail(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def results_callback(request):\r\n body = request.body\r\n\r\n try:\r\n body_dict = json.loads(body)\r\n except ValueError:\r\n log.exception(\"Invalid JSON received from Software Secure:\\n\\n{}\\n\".format(body))\r\n return HttpResponseBadRequest(\"Invalid JSON. Received:\\n\\n{}\".format(body))\r\n\r\n if not isinstance(body_dict, dict):\r\n log.error(\"Reply from Software Secure is not a dict:\\n\\n{}\\n\".format(body))\r\n return HttpResponseBadRequest(\"JSON should be dict. Received:\\n\\n{}\".format(body))\r\n\r\n headers = {\r\n \"Authorization\": request.META.get(\"HTTP_AUTHORIZATION\", \"\"),\r\n \"Date\": request.META.get(\"HTTP_DATE\", \"\")\r\n }\r\n\r\n sig_valid = ssencrypt.has_valid_signature(\r\n \"POST\",\r\n headers,\r\n body_dict,\r\n settings.VERIFY_STUDENT[\"SOFTWARE_SECURE\"][\"API_ACCESS_KEY\"],\r\n settings.VERIFY_STUDENT[\"SOFTWARE_SECURE\"][\"API_SECRET_KEY\"]\r\n )\r\n\r\n _response, access_key_and_sig = headers[\"Authorization\"].split(\" \")\r\n access_key = access_key_and_sig.split(\":\")[0]\r\n\r\n # This is what we should be doing...\r\n #if not sig_valid:\r\n # return HttpResponseBadRequest(\"Signature is invalid\")\r\n\r\n # This is what we're doing until we can figure out why we disagree on sigs\r\n if access_key != settings.VERIFY_STUDENT[\"SOFTWARE_SECURE\"][\"API_ACCESS_KEY\"]:\r\n return HttpResponseBadRequest(\"Access key invalid\")\r\n\r\n receipt_id = body_dict.get(\"EdX-ID\")\r\n result = body_dict.get(\"Result\")\r\n reason = body_dict.get(\"Reason\", \"\")\r\n error_code = body_dict.get(\"MessageType\", \"\")\r\n\r\n try:\r\n attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)\r\n except SoftwareSecurePhotoVerification.DoesNotExist:\r\n log.error(\"Software Secure posted back for receipt_id {}, but not found\".format(receipt_id))\r\n return HttpResponseBadRequest(\"edX ID {} not found\".format(receipt_id))\r\n\r\n if result == \"PASS\":\r\n log.debug(\"Approving verification for {}\".format(receipt_id))\r\n attempt.approve()\r\n elif result == \"FAIL\":\r\n log.debug(\"Denying verification for {}\".format(receipt_id))\r\n attempt.deny(json.dumps(reason), error_code=error_code)\r\n elif result == \"SYSTEM FAIL\":\r\n log.debug(\"System failure for {} -- resetting to must_retry\".format(receipt_id))\r\n attempt.system_error(json.dumps(reason), error_code=error_code)\r\n log.error(\"Software Secure callback attempt for %s failed: %s\", receipt_id, reason)\r\n else:\r\n log.error(\"Software Secure returned unknown result {}\".format(result))\r\n return HttpResponseBadRequest(\r\n \"Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL\".format(result)\r\n )\r\n\r\n # If this is a reverification, log an event\r\n if attempt.window:\r\n course_id = attempt.window.course_id\r\n course = course_from_id(course_id)\r\n course_enrollment = CourseEnrollment.get_or_create_enrollment(attempt.user, course_id)\r\n course_enrollment.emit_event(EVENT_NAME_USER_REVERIFICATION_REVIEWED_BY_SOFTWARESECURE)\r\n\r\n return HttpResponse(\"OK!\")", "def callback():\n signature = request.headers['X-Line-Signature']\n body = request.get_data(as_text=True)\n logger.info('Request body: %s', body)\n try:\n handler.handle(body, signature)\n except InvalidSignatureError:\n logger.exception(\n 'Invalid signature. Please check your channel access token/channel secret.')\n abort(400)\n\n return 'OK'", "def payment_verification(payload):\n response = requests.post(url, data=payload)\n return response.json()", "def test_check_refund_authcode_success(bambora_payment_provider):\n rf = RequestFactory()\n request = rf.get(\"/payments/notify_refund/\", notify_success_params)\n assert bambora_payment_provider.check_new_refund_authcode(request)", "def payPalReturn(request, *args, **kwargs):\n initParam = {}\n token = request.GET.get('token')\n payerID = request.GET.get('PayerID')\n initParam['token'] = token\n initParam['payerid'] = payerID\n if token and payerID:\n p = driver.PayPal()\n EC_RETURNURL = '/'.join([common.getHttpHeader(request), 'payment/paypal_return'])\n EC_CANCELURL = '/'.join([common.getHttpHeader(request), 'payment/paypal_cancel'])\n res_dict = p.GetExpressCheckoutDetailsInfo(EC_RETURNURL, EC_CANCELURL, token)\n state = p._get_value_from_qs(res_dict, 'ACK')\n if state in [\"Success\", \"SuccessWithWarning\"]:\n #Show the list of service detail to user.\n executeMethod = kwargs.pop('executeMethod', None)\n if executeMethod:\n gateway = request.session.get('gateway', None)\n if gateway:\n initParam['gateway'] = gateway\n serviceDetail, serviceItems, discount_rate = executeMethod(request, initParam=initParam)\n if serviceDetail and serviceItems:\n initParam['serviceDetail'] = serviceDetail\n initParam['serviceItems'] = serviceItems\n initParam['discount_rate'] = discount_rate\n return render_to_response('payment/paypal_return.html', initParam, context_instance=RequestContext(request))\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, Execute method %(param3)s failed.')\n % {'param1': token, 'param2': payerID, 'param3': executeMethod.__name__})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s. Gateway no exists in request.session.')\n % {'param1': token, 'param2': payerID})\n else:\n log.error(_('Token %(param1)s, PayerID: %(param2)s, ExecuteMethod does not exist.')\n % {'param1': token, 'param2': payerID})\n else:\n error = p._get_value_from_qs(res_dict, 'L_SHORTMESSAGE0')\n log.error(_('Token %(param1)s, PayerID: %(param2)s, %(param3)s.')\n % {'param1': token, 'param2': payerID, 'param3': error})\n else:\n log.error(_('Token or PayerID no exists.'))\n\n if request.session.get('gateway', None):\n del request.session['gateway']\n success_page = request.session.get('success_page', None)\n back_page = request.session.get('back_page', None)\n if success_page:\n del request.session['success_page']\n if back_page:\n del request.session['back_page']\n error_msg = driver.GENERIC_PAYPAL_ERROR\n page_msg = request.session['back_page_msg']\n return render_to_response('payment/paypal_cancel.html',\n {'error_msg': error_msg, 'back_page': back_page, 'back_page_msg': page_msg}, context_instance=RequestContext(request))\n else:\n error_msg = _('%(param1)s Please payment again.') % {'param1': driver.GENERIC_PAYPAL_ERROR}\n return render_to_response('payment/paypal_error.html',\n {\"error_msg\": error_msg}, context_instance=RequestContext(request))", "def test_success(self, order_placed):\n session, basket_id, order_number = self.prepare_basket()\n data = cs_factories.build_accepted_reply_data(order_number)\n data = cs_factories.sign_reply_data(data)\n url = reverse('cybersource-reply')\n self.assertEqual(order_placed.call_count, 0)\n resp = self.client.post(url, data)\n\n self.assertRedirects(resp, reverse('checkout:thank-you'))\n\n self.assertEqual(len(mail.outbox), 1, 'Should send email')\n self.assertEqual(order_placed.call_count, 1, 'Should trigger order_placed signal')\n\n order = order_placed.call_args[1]['order']\n self.assertEqual(order.status, 'Authorized', 'Should set order status')\n self.assertEqual(order.basket.id, basket_id, 'Should use basket from session')\n self.assertEqual(order.number, order_number, 'Should use order number from CS request')\n\n session = self.client.session\n self.assertEquals(session[CHECKOUT_ORDER_ID], order.id, 'Should save order_id in session')\n\n self.assertEqual(order.sources.count(), 1, 'Should save pPaymentSource')\n source = order.sources.first()\n self.assertEqual(source.currency, 'USD')\n self.assertEqual(source.amount_allocated, D('99.99'))\n self.assertEqual(source.amount_refunded, D('0.00'))\n self.assertEqual(source.amount_debited, D('0.00'))\n\n self.assertEqual(source.transactions.count(), 1, 'Should save Transaction')\n transaction = source.transactions.first()\n self.assertEqual(transaction.log.data, data)\n self.assertEqual(transaction.token.log, transaction.log)\n self.assertEqual(transaction.token.masked_card_number, 'xxxxxxxxxxxx1111')\n self.assertEqual(transaction.token.card_type, '001')\n self.assertEqual(transaction.txn_type, 'Authorise')\n self.assertEqual(transaction.amount, D('99.99'))\n self.assertEqual(transaction.reference, data['transaction_id'])\n self.assertEqual(transaction.status, 'ACCEPT')\n self.assertEqual(transaction.request_token, data['request_token'])\n\n self.assertEqual(order.payment_events.count(), 1, 'Should save PaymentEvent')\n event = order.payment_events.first()\n self.assertEqual(event.amount, D('99.99'))\n self.assertEqual(event.reference, data['transaction_id'])\n self.assertEqual(event.event_type.name, 'Authorise')\n\n self.assertEqual(event.line_quantities.count(), 1, 'Should save PaymentEventQuantity')\n lq = event.line_quantities.first()\n self.assertEqual(lq.line, order.lines.first())\n self.assertEqual(lq.quantity, 1)", "def test_handle_notify_request_payment_failed(bambora_provider_base_config, order):\n order.order_number = \"abc123\"\n order.status = OrderStatus.PAID\n order.save()\n refund = OrderRefundFactory(\n order=order, refund_id=\"1234567\", amount=order.total_price\n )\n\n params = {\n \"AUTHCODE\": \"8CF2D0EA9947D09B707E3C2953EF3014F1AD12D2BB0DCDBAC3ABD4601B50462B\",\n \"RETURN_CODE\": \"1\",\n \"REFUND_ID\": \"1234567\",\n }\n\n rf = RequestFactory()\n request = rf.get(\"/payments/notify_refund/\", params)\n payment_provider = create_bambora_provider(bambora_provider_base_config, request)\n\n assert refund.status == OrderRefundStatus.PENDING\n lease_status = refund.order.lease.status\n\n returned = payment_provider.handle_notify_refund_request()\n\n refund = OrderRefund.objects.get(refund_id=params.get(\"REFUND_ID\"))\n order = refund.order\n\n assert refund.status == OrderRefundStatus.REJECTED\n # The order status shouldn't change\n assert order.status == OrderStatus.PAID\n assert order.lease.status == lease_status\n\n assert isinstance(returned, HttpResponse)\n assert returned.status_code == 204", "def test_api_user_resend_confirmation_post(self):\n pass", "def verify(self, response):", "def webhook_payment_successful(self, event):\n\n intent = event.data.object\n p_id = intent.id\n pack = intent.metadata.pack\n save_detail = intent.metadata.save_detail\n\n billing_details = intent.charges.data[0].billing_details\n shipping_details = intent.shipping\n grand_cost = round(intent.charges.data[0].amount / 100, 2)\n\n for field, value in shipping_details.address.items():\n if value == \"\":\n shipping_details.address[field] = None\n\n profile = None\n username = intent.metadata.username\n if username != 'AnonymousUser':\n profile = UserProfile.objects.get(user__username=username)\n if save_detail:\n profile.default_phone_number = shipping_details.phone,\n profile.default_home_Address = shipping_details.address.line1,\n profile.default_home_Address_continued = \\\n shipping_details.address.line2,\n profile.default_postcode = \\\n shipping_details.address.postal_code,\n profile.default_county = \\\n shipping_details.address.city,\n profile.default_country = \\\n shipping_details.address.country,\n profile.save()\n\n order_present = False\n seek = 1\n while seek <= 6:\n try:\n order = Order.objects.get(\n Name__iexact=shipping_details.name,\n user_account=profile,\n email__iexact=billing_details.email,\n phone_number__iexact=shipping_details.phone,\n home_Address__iexact=shipping_details.address.line1,\n home_Address_continued__iexact =(\n shipping_details.address.line2\n ),\n postcode__iexact=shipping_details.address.postal_code,\n county__iexact=shipping_details.address.city,\n country__iexact=shipping_details.address.country,\n grand_cost=grand_cost,\n original_pack=pack,\n stripe_p_id=p_id,\n )\n order_present = True\n break\n except Order.DoesNotExist:\n seek += 1\n time.sleep(1)\n if order_present:\n self._send_email_details(order)\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | Good news. \\\n This is now in the database',\n status=200)\n else:\n order = None\n try:\n order = Order.objects.create(\n Name=shipping_details.name,\n email=billing_details.email,\n phone_number=shipping_details.phone,\n home_Address=shipping_details.address.line1,\n home_Address_continued=shipping_details.address.line2,\n postcode=shipping_details.address.postal_code,\n county=shipping_details.address.city,\n country=shipping_details.address.country,\n original_pack=pack,\n stripe_p_id=p_id,\n )\n for item_id, item_data in json.load(pack).items():\n product = Product.objects.get(id=item_id)\n if isinstance(item_data, int):\n order_line_item = OrderLineItem(\n order=order,\n product=product,\n quantity=item_data,\n )\n order_line_item.save()\n else:\n for size, quantity in item_data['items_by_size'].items():\n order_line_item = OrderLineItem(\n order=order,\n product=product,\n quantity=quantity,\n product_size=size,\n )\n order_line_item.save()\n except Exception as e:\n if order:\n order.delete()\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | \\\n There is an error: {e}',\n status=500)\n self._send_email_details(order)\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | \\\n Goodnews: webhook order created',\n status=200)", "def _check_grade_response(self, callback_url, expected_header, expected_body):\r\n # Check the response posted back to us\r\n # This is the default response\r\n expected_callback_dict = {\r\n 'xqueue_header': expected_header,\r\n 'xqueue_body': expected_body,\r\n }\r\n\r\n # Check that the POST request was made with the correct params\r\n self.post.assert_called_with(callback_url, data=expected_callback_dict)", "def verify_payment(self, order_id, ref_id):\n try:\n client = Client(self.service_address)\n res = client.service.bpVerifyRequest(terminalId=self.terminalId,\n userName=self.userName,\n userPassword=self.userPassword,\n orderId=order_id,\n saleOrderId=order_id,\n saleReferenceId=ref_id)\n return True, res\n except WebFault as f:\n return False, f.fault.faultstring\n except Exception as e:\n return False, e.message", "def handle_webhook(self, body, headers):\n self._transport.verify_jwt_response(headers, None, body, self._subject)\n if \"service_user_hash\" in body:\n body = self._validate_response(loads(body), AuthorizeSSEValidator)\n return SessionEndRequest(body['service_user_hash'], self._transport.parse_api_time(body['api_time']))\n else:\n body = loads(self._transport.decrypt_response(body))\n return AuthorizationResponse(body, self._transport.loaded_issuer_private_keys)", "def do_process(self, request):\n oidconsumer = self.oidconsumer\n\n # retrieve the token from the environment (in this case, the URL)\n token = request['query'].get('token', '')\n\n # Ask the library to check the response that the server sent\n # us. Status is a code indicating the response type. info is\n # either None or a string containing more information about\n # the return type.\n status, info = oidconsumer.completeAuth(token, request['query'])\n\n css_class = 'error'\n openid_url = None\n if status == consumer.FAILURE and info:\n # In the case of failure, if info is non-None, it is the\n # URL that we were verifying. We include it in the error\n # message to help the user figure out what happened.\n openid_url = info\n fmt = \"Verification of %s failed.\"\n message = fmt % (cgi.escape(openid_url),)\n elif status == consumer.SUCCESS:\n # Success means that the transaction completed without\n # error. If info is None, it means that the user cancelled\n # the verification.\n css_class = 'alert'\n if info:\n # This is a successful verification attempt. If this\n # was a real application, we would do our login,\n # comment posting, etc. here.\n openid_url = info\n if self.url_to_username:\n username = self.url_to_username(request['environ'], openid_url)\n else:\n username = openid_url\n if 'paste.auth_tkt.set_user' in request['environ']:\n request['environ']['paste.auth_tkt.set_user'](username)\n if not self.login_redirect:\n fmt = (\"If you had supplied a login redirect path, you would have \"\n \"been redirected there. \"\n \"You have successfully verified %s as your identity.\")\n message = fmt % (cgi.escape(openid_url),)\n else:\n # @@: This stuff doesn't make sense to me; why not a remote redirect?\n request['environ']['paste.auth.open_id'] = openid_url\n request['environ']['PATH_INFO'] = self.login_redirect\n return self.app(request['environ'], request['start'])\n #exc = httpexceptions.HTTPTemporaryRedirect(self.login_redirect)\n #return exc.wsgi_application(request['environ'], request['start'])\n else:\n # cancelled\n message = 'Verification cancelled'\n else:\n # Either we don't understand the code or there is no\n # openid_url included with the error. Give a generic\n # failure message. The library should supply debug\n # information in a log.\n message = 'Verification failed.'\n\n return self.render(request, message, css_class, openid_url)", "def notify_payment_success(self, **kwargs):\n return self.notify(\"notify_payment_success\", **kwargs)", "def _onSuccess(self, controller):\r\n if controller.order.paid_in_full:\r\n controller.cart.empty()\r\n for item in controller.order.orderitem_set.all():\r\n if item.product.is_subscription:\r\n item.completed = True\r\n item.save()\r\n try:\r\n curr_status = controller.order.orderstatus_set.latest() \r\n except OrderStatus.DoesNotExist:\r\n curr_status = None\r\n \r\n if (curr_status is None) or (curr_status.notes and curr_status.status == \"New\"):\r\n controller.order.add_status(status='New', notes = \"Order successfully submitted\")\r\n else:\r\n # otherwise just update and save\r\n if not curr_status.notes:\r\n curr_status.notes = _(\"Order successfully submitted\")\r\n curr_status.save() \r\n\r\n #Redirect to the success page\r\n url = controller.lookup_url('satchmo_checkout-success')\r\n return HttpResponseRedirect(url) \r\n\r\n else:\r\n log.debug('Order #%i not paid in full, sending to pay rest of balance', controller.order.id)\r\n #url = controller.order.get_balance_remaining_url()\r\n url = reverse('satchmo_balance_remaining')\r\n return HttpResponseRedirect(url)", "def test_customer_notified(self, mocked_notify_client):\n order = OrderPaidFactory()\n\n notify.quote_accepted(order)\n\n assert mocked_notify_client.send_email_notification.called\n call_args = mocked_notify_client.send_email_notification.call_args_list[0][1]\n assert call_args['email_address'] == order.get_current_contact_email()\n assert call_args['template_id'] == Template.quote_accepted_for_customer.value\n assert call_args['personalisation']['recipient name'] == order.contact.name\n assert call_args['personalisation']['embedded link'] == order.get_public_facing_url()", "def event_payu_com_dpn(self, **post):\n cr, uid, context = request.cr, request.uid, request.context\n payment_acquire = request.env['payment.acquirer'].sudo().search([('provider', '=', 'payu')])\n transactionDetails = {}\n transactionDetails['store'] = {}\n transactionDetails['store']['soapUsername'] = payment_acquire.payu_api_username\n transactionDetails['store']['soapPassword'] = payment_acquire.payu_api_password\n transactionDetails['store']['safekey'] = payment_acquire.payu_seller_account\n transactionDetails['store']['environment'] = payment_acquire.environment\n transactionDetails['additionalInformation'] = {}\n transactionDetails['additionalInformation']['payUReference'] = post['PayUReference']\n try:\n result = PayuController.payuMeaGetTransactionApiCall('', transactionDetails)\n payment_transation_id = request.env['payment.transaction'].sudo().search(\n [('reference', '=', result['merchantReference'])])\n payu_response = {}\n if result:\n payu_response['TRANSACTION_STATUS'] = result['transactionState']\n # payu_response['SUCCESSFUL'] = result['successful']\n payu_response['AMOUNT'] = payment_transation_id.amount * 100 if payment_transation_id else 0.00\n payu_response['CURRENCYCODE'] = result['basket']['currencyCode']\n payu_response['PAYUREFERENCE'] = result['payUReference']\n payu_response['REFERENCE'] = result['merchantReference']\n payu_response['RESULTMESSAGE'] = result['resultMessage']\n response_state = request.env['payment.transaction'].sudo().form_feedback(payu_response, 'payu')\n # response_state = PaymentTransactionCus.form_feedback('', payu_response, 'payu')\n # if response_state:\n # return werkzeug.utils.redirect('/shop/payment/validate')\n # else:\n # return werkzeug.utils.redirect('/shop/unsuccessful')\n\n sale_order_id = request.env['sale.order'].sudo().search([('name', '=', result['merchantReference'])])\n sale_order_data = sale_order_id\n request.session['sale_last_order_id'] = sale_order_id.id\n\n tx_id = request.env['payment.transaction'].sudo().search([('reference', '=', result['merchantReference'])])\n tx = tx_id\n if not sale_order_id or (sale_order_id.amount_total and not tx):\n return request.redirect('/shop')\n if (not sale_order_id.amount_total and not tx) or tx.state in ['pending']:\n if sale_order_id.state in ['draft', 'sent']:\n if (not sale_order_id.amount_total and not tx):\n sale_order_id.action_button_confirm()\n email_act = sale_order_id.action_quotation_send()\n elif tx and tx.state == 'cancel':\n sale_order_id.action_cancel()\n elif tx and (tx.state == 'draft' or tx.state == 'sent' or tx.state == 'done'):\n # if result and payu_response['successful'] and payu_response['TRANSACTION_STATUS'] in ['SUCCESSFUL', 'PARTIAL_PAYMENT', 'OVER_PAYMENT']:\n if result and payu_response['TRANSACTION_STATUS'] in ['SUCCESSFUL', 'PARTIAL_PAYMENT', 'OVER_PAYMENT']:\n transaction = tx.sudo().write(\n {'state': 'done', 'date_validate': datetime.now(),\n 'acquirer_reference': result['payUReference']})\n email_act = sale_order_id.action_quotation_send()\n action_confirm_res = sale_order_id.action_confirm()\n sale_order = sale_order_id.read([])\n # if sale_order_id.state == 'sale':\n # journal_ids = request.env['account.journal'].sudo().search([('name', '=', 'FNB 62085815143')], limit=1)\n # journal = journal_ids.read([])\n currency = request.env['res.currency'].sudo().search([('name', '=', 'ZAR')], limit=1)\n method = request.env['account.payment.method'].sudo().search([('name', '=', 'Manual')], limit=1)\n journal_id = request.env['account.journal'].sudo().search(\n [('name', '=', 'FNB - Cheque Account 6208585815143')], limit=1, order=\"id desc\")\n if journal_id:\n account_payment = {\n 'partner_id': sale_order[0]['partner_id'][0],\n 'partner_type': 'customer',\n 'journal_id': journal_id.id,\n # 'invoice_ids':[(4,inv_obj.id,0)],\n 'amount': sale_order[0]['amount_total'],\n 'communication': sale_order_id.name,\n 'currency_id': currency.id,\n 'payment_type': 'inbound',\n 'payment_method_id': method.id,\n 'payment_transaction_id': tx.id,\n }\n acc_payment = request.env['account.payment'].sudo().create(account_payment)\n acc_payment.sudo().post()\n sale_order_id = request.session.get('sale_last_order_id')\n print(\"\\n\\n\\n\\n\\n\\n=======================sale order sale order======\", sale_order_id)\n sale_order_data = request.env['sale.order'].sudo().browse(sale_order_id)\n # if sale_order_data.project_project_id:\n # request.session['last_project_id'] = sale_order_data.project_project_id.id\n if response_state:\n sale_order_data.message_post(subject=\"T&C's Privacy Policy\",\n body=\"%s accepted T&C's and Privacy Policy.\" % sale_order_data.partner_id.name)\n return werkzeug.utils.redirect('/pay/thankyou')\n # return werkzeug.utils.redirect('/shop/confirmation')\n else:\n return werkzeug.utils.redirect('/event/unsuccessful')\n except Exception as e:\n return werkzeug.utils.redirect('/event/unsuccessful')", "def PostVerifyEmail(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def test_incoming_payment(node_factory):\n l1, l2 = node_factory.line_graph(2, opts=pluginopt, wait_for_announce=True)\n inv = l2.rpc.invoice(42, 'lbl', 'desc')['bolt11']\n l1.rpc.pay(inv)\n\n plugins = [p['name'] for p in l2.rpc.listconfigs()['plugins']]\n assert 'paytest.py' in plugins\n\n plugins = [p['name'] for p in l1.rpc.listconfigs()['plugins']]\n assert 'paytest.py' in plugins", "def callback(ch, method, properties, body):\n print(\"Callback body = '%s'\" % body)\n country_json = dict()\n body_json = json.loads(body)\n country_json[body_json['state']] = list()\n print(\" [x] Received %s\" % body_json)\n with connect_to_db(body_json['db_path']) as db_conn:\n cur = db_conn.cursor()\n cmd = create_country_purchases(cur, body_json['state'])\n\n cmd = create_country_total_purchases(cur, cmd, body_json['state'])\n\n create_country_json(cur, cmd, country_json, body_json['state'])\n\n xml_state = create_xml_file(cur, body_json, country_json)\n\n create_tables(cur, country_json, xml_state, body_json)\n\n create_json_table(cur, country_json, body_json)", "def _verify(self):\n\n def loading():\n for _ in range(3):\n print(Colors.yellow(\".\"), end=\"\")\n sys.stdout.flush()\n sleep(0.5)\n\n sys.stdout.write(Colors.yellow(\"verifying credentials\"))\n thread = Thread(target=loading()) # lol\n thread.daemon = True # kill this thread if program exits\n thread.start()\n\n api = self._authorize()\n try:\n me = api.me().screen_name\n except TweepError as e:\n raise ValueError(\"API might be disabled or you have invalid keys:\"\n f\"\\n\\t{self._extract_tweepy_error(e)}\")\n\n thread.join() # lol\n print(Colors.white(\" verified\\n\") +\n Colors.cyan(\"starting up bot \") + Colors.white(f\"@{me}!\\n\"))\n return api, me # api, the bot's handle", "def do_subscription_approval(sender, **kwargs):\r\n req_payment = sender.get_product_class().get_requires_payment_details()\r\n if not req_payment or has_valid_billing_details(sender.billing_account):\r\n status = 'approved'\r\n else:\r\n status = 'declined'\r\n sender.set_current_approval_status(status)\r\n return status", "def test_post_pull_request_review_bad_sign(\n self,\n validate_config,\n handler_class,\n conn,\n verify_signature\n ):\n\n conn.request.data.return_value = ''\n conn.request.headers.get.return_value = 'sha1=signature'\n verify_signature.side_effect = SignatureError(\"Error validating signature\")\n\n response = endpoints.post_pull_request_review({})\n\n handler = handler_class.return_value\n handler.get_config.return_value = \"config-data\"\n\n handler.get_statuses.assert_not_called()\n handler.is_authorized.assert_not_called()\n handler.post_status.assert_not_called()\n handler.get_config.assert_not_called()\n validate_config.assert_not_called()\n self.assertEqual(\n response,\n (\n {\n 'status': 'Signature Validation Error',\n 'message': 'Error validating signature'\n },\n 400\n )\n )", "def update_paypal(sender, **kwargs):\n ipn_obj = sender\n try:\n payment = json.loads(ipn_obj.custom)\n\n # try to get payment. if not exist, exception will be catched\n p = Payment.objects.filter(id=payment.get('id'), token=payment.get('token')).get()\n\n # update payment\n p.method = constants.PAYPAL\n p.ipn = ipn_obj\n p.save()\n\n # if payment is completed, so valid\n if ipn_obj.payment_status == ST_PP_COMPLETED:\n # check correct price , currency and mail\n if int(ipn_obj.mc_gross) == int(p.price.price) and \\\n ipn_obj.mc_currency == 'EUR' and \\\n ipn_obj.business == settings.PAYPAL_RECEIVER_EMAIL:\n # all is OK, update state\n p.state = True\n p.save()\n sendmail_payment_success(p)\n else:\n # TODO: send alert / mail\n return\n except Payment.DoesNotExist:\n # TODO: send alert / mail\n pass\n except:\n # TODO: send alert / mail\n pass", "def isw_callback(md=None, pa_res=None):\n url = cardinal_endpoint + '/collections/api/v1/pay/cardinalCallBack'\n\n headers = {'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n 'Accept-Encoding':'gzip, deflate, br',\n 'Accept-Language':'fr-FR,fr;q=0.9,en-GB;q=0.8,en;q=0.7,fr-CA;q=0.6,en-US;q=0.5',\n 'Content-Length':'4170',\n 'Content-Type':'application/x-www-form-urlencoded',\n 'User-Agent':'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36'}\n\n data = {'MD': md,'PaRes':pa_res}\n\n make_request = requests.post(url, data=data, headers=headers)\n visa_purchase = make_request.status_code\n print(visa_purchase)\n if visa_purchase == 200:\n return True\n else:\n return False", "async def fancysay(self, ctx):", "def payment_success(request):\r\n\tsecret_key = settings.SELLER_KEY\r\n\tpid = request.GET['pid']\r\n\tref = request.GET['ref']\r\n\tresult = request.GET['result']\r\n\t# Retrieve the cheksum value and validate it\r\n\tchecksumstr = \"pid={}&ref={}&result={}&token={}\".format(pid, ref, result, secret_key)\r\n\tm = md5(checksumstr.encode(\"ascii\"))\r\n\tchecksum = m.hexdigest()\r\n\tmalformed = False\r\n\tprint(\"calculated: \" + checksum)\r\n\tprint(\"received: \" + request.GET['checksum'] )\r\n\tif (checksum == request.GET['checksum'] ):\r\n\t\ttransaction = Transaction.objects.get(pk=pid)\r\n\t\ttransaction.state = Transaction.CONFIRMED\r\n\t\ttransaction.reference = ref\r\n\t\tgame = Game.objects.get(id = transaction.game.id)\r\n\t\ttransaction.save()\r\n\t\tinc_purchase = game.purchase_number + 1\r\n\t\tgame.purchase_number = inc_purchase\r\n\t\tgame.save()\r\n\t\tprint(\"about to call success\")\r\n\t\treturn render(request, 'success.html', {'game': game, 'MEDIA_URL': settings.MEDIA_URL, 'malformed': malformed})\r\n\telse:\r\n\t\ttransaction = Transaction.objects.get(pk=pid)\r\n\t\ttransaction.delete()\r\n\t\tmalformed = True\r\n\t\treturn render(request, 'success.html', {\"malformed\": malformed})", "def onCheckout(self, controller):\n \n if askokcancel(\"Proceed\", \"Pay the order?\"):\n c = controller.customer\n package = {'customer_id':c.id, 'order_price':c.my_order.GetTotalPrice}\n msg = controller.transmit(package)\n \n if msg['order_received']:\n c.CheckOut(c.my_order.GetTotalPrice)\n c.Clear()\n controller.show_frame(PageThree)", "def test_verify_balance_behaviour(self, cred):\n # check the initial balance\n resp = requests.get(balance_url.format(cred[0], cred[1]))\n assert resp.status_code == 200\n assert resp.headers['Content-Type'] == 'application/json;charset=UTF-8'\n start_balance = resp.json()['value']\n # now init the verification process\n resp = requests.get(verify_url.format('json', cred[0], cred[1],\n 'TestApp', test_number))\n assert resp.status_code == 200\n assert resp.json()['status'] == '0'\n request_id = resp.json()['request_id']\n resp = requests.get(balance_url.format(cred[0], cred[1]))\n assert resp.status_code == 200\n assert start_balance == resp.json()['value']\n # terminate verification process\n assert 'Workflow terminated' in \\\n terminate_workflow(cred[0], cred[1], request_id).json()['error_text']\n resp = requests.get(balance_url.format(cred[0], cred[1]))\n assert resp.status_code == 200\n assert start_balance == resp.json()['value']", "def test_verified_consumer(self):\n self.prep_consumer()\n self.consumer.is_email_verified = True\n self.consumer.save()\n UnqualifiedConsumerEmailTask().run(test_mode=self.consumer)\n self.common_asserts()\n self.assertTrue('Use this link to confirm your email address.' not in\n mail.outbox[0].body)\n self.assertTrue('Confirm your email address with a single click.' not in\n mail.outbox[0].alternatives[0][0])\n self.assertEqual(mail.outbox[0].cc, [])\n self.assertTrue('Provide your cell phone number' \n in mail.outbox[0].alternatives[0][0])\n self.assertTrue('Provide your cell phone number. Follow this link:' \n in mail.outbox[0].body)", "def test_cancel_pending_payment(self):\n pass", "def handle_notify_request(self):\n request = self.request\n logger.debug(\"Handling Bambora notify request, params: {}.\".format(request.GET))\n\n order_number, _timestamp = request.GET.get(\"ORDER_NUMBER\", \"-\").split(\"-\")\n try:\n order = Order.objects.get(order_number=order_number)\n except Order.DoesNotExist:\n # Target order might be deleted after posting but before the notify arrives\n logger.warning(\"Notify: Order does not exist.\")\n return HttpResponse(status=204)\n\n order.invalidate_tokens()\n\n if not self.check_new_payment_authcode(request):\n return HttpResponse(status=204)\n\n return_code = request.GET[\"RETURN_CODE\"]\n if return_code == \"0\":\n logger.debug(\"Notify: Payment completed successfully.\")\n try:\n order.set_status(\n OrderStatus.PAID,\n \"Code 0 (payment succeeded) in Bambora Payform notify request.\",\n )\n except OrderStatusTransitionError as oste:\n logger.warning(oste)\n elif return_code == \"1\":\n # Don't cancel the order\n logger.debug(\"Notify: Payment failed.\")\n else:\n logger.debug('Notify: Incorrect RETURN_CODE \"{}\".'.format(return_code))\n\n return HttpResponse(status=204)", "def test_deny_pending_payment(self):\n pass", "def test_post_pull_request_review_unapproved(\n self,\n validate_config,\n handler_class,\n conn,\n verify_signature\n ):\n conn.request.data.return_value = ''\n conn.request.headers.get.return_value = 'sha1=signature'\n verify_signature.return_value = None\n\n handler = handler_class.return_value\n handler.get_config.return_value = {\n \"context1\": [\n \"whitelist1\"\n ],\n \"context2\": [\n \"whitelist2\"\n ]\n }\n\n validate_config.return_value = None\n\n data = {\n \"repository\": {\n \"name\": \"repo-name\",\n \"full_name\": \"repo-full-name\",\n \"owner\": {\n \"login\": \"repo-owner\"\n }\n },\n \"review\": {\n \"state\": \"changes-requested\",\n \"commit_id\": \"review-commit-id\",\n \"user\": {\n \"login\": \"review-user-login\"\n }\n }\n }\n\n response = endpoints.post_pull_request_review(data)\n\n handler.get_statuses.assert_not_called()\n handler.is_authorized.assert_not_called()\n handler.post_status.assert_not_called()\n self.assertEqual(response, ({'status': 'OK', 'message': 'Review state is not approved'}, 200))", "def verify_request(self, scopes):\n uri, http_method, body, headers = extract_params(False)\n return self.server.verify_request(uri, http_method, body, headers, scopes)", "def receive(channel):\n\n def callback(ch, method, properties, body):\n\n event = json.loads(body)\n event_info = event['event_info']\n event_type = event['type']\n success = True\n logger.info(f\"Received event {event}\")\n\n try:\n # Events coming from account microservice\n\n if event_type == USER_CREATED_EVENT:\n\n add_and_publish_event(\n GlobalPreferencesCreatedEvent(event['uuid'], event_info['id'], dict(\n vehicles=['bus', 'subway', 'train', 'tram', 'car', 'walking', 'bike', 'taxi',\n 'enjoy', 'mobike'],\n personal_vehicles=[])),\n PREFERENCES_CREATED)\n\n elif event_type == USER_DELETED_EVENT:\n\n add_and_publish_event(GlobalPreferencesDeletedEvent(event['uuid'], event_info['id']), PREFERENCES_DELETED)\n\n # Events generated in this microservice\n\n elif event_type == PREFERENCES_CREATED_EVENT:\n add_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == PREFERENCES_MODIFIED_EVENT:\n modify_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == PREFERENCES_DELETED_EVENT:\n delete_global_preferences(GlobalPreferences(**event_info))\n\n elif event_type == CALENDAR_CREATED_EVENT:\n add_calendar(Calendar(**event_info))\n\n elif event_type == CALENDAR_MODIFIED_EVENT:\n modify_calendar(Calendar(**event_info))\n\n elif event_type == CALENDAR_DELETED_EVENT:\n delete_calendar(Calendar(**event_info))\n\n except SQLAlchemyError as e:\n\n # to deal with at least once delivery of rabbitmq and the create methods which are not idempotent\n if (event_type == USER_CREATED_EVENT or event_type == PREFERENCES_CREATED_EVENT or event_type == CALENDAR_CREATED_EVENT) \\\n and method.redelivered and isinstance(e, IntegrityError):\n logger.info(f'Not processed redelivered event {event}')\n\n else:\n logger.info(f\"Couldn't process event {event}\")\n success = False\n\n finally:\n if success: # ack only if the event has been processed\n ch.basic_ack(delivery_tag=method.delivery_tag)\n logger.info(f\"Processed and acked event {event}\")\n\n # channel.basic_qos(prefetch_count=1)\n channel.basic_consume(callback,\n queue=CALENDAR_QUEUE)\n\n logger.info(\"Started listening to events\")\n channel.start_consuming()", "def _on_response(self, response_type, p_api1, p_api2, double1, double2, ptr1, size1, ptr2, size2, ptr3, size3):\n if self.debug:\n print \"Response: \", ord(response_type)\n if response_type == OnConnectionStatus.value:\n self._on_connect_status(p_api2, chr(int(double1)), ptr1, size1)\n elif self._callbacks:\n for callback in self._callbacks:\n if response_type == OnRtnDepthMarketData.value:\n if self._is_market:\n callback.on_market_rtn_depth_market_data_n(p_api2, ptr1)\n elif response_type == OnRspQryInstrument.value:\n obj = cast(ptr1, POINTER(InstrumentField)).contents\n callback.on_trading_rsp_qry_instrument(p_api2, obj, bool(double1))\n elif response_type == OnRspQryTradingAccount.value:\n obj = cast(ptr1, POINTER(AccountField)).contents\n callback.on_trading_rsp_qry_trading_account(p_api2, obj, bool(double1))\n elif response_type == OnRspQryInvestorPosition.value:\n obj = cast(ptr1, POINTER(PositionField)).contents\n callback.on_trading_rsp_qry_investor_position(p_api2, obj, bool(double1))\n elif response_type == OnRspQrySettlementInfo.value:\n obj = cast(ptr1, POINTER(SettlementInfoField)).contents\n callback.on_trading_rsp_qry_settlement_info(p_api2, obj, bool(double1))\n elif response_type == OnRtnOrder.value:\n obj = cast(ptr1, POINTER(OrderField)).contents\n callback.on_trading_rtn_order(p_api2, obj)\n elif response_type == OnRtnTrade.value:\n obj = cast(ptr1, POINTER(TradeField)).contents\n callback.on_trading_rtn_trade(p_api2, obj)\n elif response_type == OnRtnQuote.value:\n obj = cast(ptr1, POINTER(QuoteField)).contents\n callback.on_trading_rtn_quote(p_api2, obj)\n elif response_type == OnRtnQuoteRequest.value:\n obj = cast(ptr1, POINTER(QuoteRequestField)).contents\n callback.on_trading_rtn_quote_request(p_api2, obj)\n elif response_type == OnRspQryHistoricalTicks.value:\n obj = cast(ptr1, POINTER(TickField)).contents\n obj2 = cast(ptr2, POINTER(HistoricalDataRequestField)).contents\n callback.on_trading_rsp_qry_historical_ticks(p_api2, obj, obj2, bool(double1))\n elif response_type == OnRspQryHistoricalBars.value:\n obj = cast(ptr1, POINTER(BarField)).contents\n obj2 = cast(ptr2, POINTER(HistoricalDataRequestField)).contents\n callback.on_trading_rsp_qry_historical_bars(p_api2, obj, obj2, bool(double1))\n elif response_type == OnRspQryInvestor.value:\n obj = cast(ptr1, POINTER(InvestorField)).contents\n callback.on_trading_rsp_qry_investor(p_api2, obj)\n elif response_type == OnFilterSubscribe.value:\n instrument = c_char_p(ptr1).value\n callback.on_trading_filter_subscribe(p_api2, ExchangeType(double1), size1, size2, size3, instrument)\n elif response_type == OnRtnError.value:\n obj = cast(ptr1, POINTER(ErrorField)).contents\n if self._is_market:\n callback.on_market_rsp_error(p_api2, obj, bool(double1))\n else:\n callback.on_trading_rsp_error(p_api2, obj, bool(double1))", "def verify_ipn(data):\n data = dict(data)\n data['cmd'] = '_notify-validate'\n resp = requests.post(app.config['PAYPAL']['endpoint'], data=data)\n if resp.text == 'VERIFIED':\n return True\n return False", "def __verify(self):\r\n code = self.request.get('code')\r\n email = None\r\n error = False\r\n # resend if code is not given or in case of some error\r\n if code is not None and code != '':\r\n email = User.verify(code, self.request.remote_addr)\r\n if email is None:\r\n error = True\r\n\r\n if email is None:\r\n template_values = {\r\n 'user_email': self.user_email,\r\n 'error': error\r\n }\r\n template = self.jinja2_env.get_template('verification.html')\r\n self.response.out.write(template.render(template_values))\r\n\r\n # message\r\n template_values = {\r\n 'user_email': self.user_email,\r\n 'message': self.gettext('THANK_YOU')\r\n }\r\n template = self.jinja2_env.get_template('staticmessage.html')\r\n self.response.out.write(template.render(template_values))", "async def test_online_banking_emails(app, session, stan_server, event_loop, client_id, events_stan, future):\n # Call back for the subscription\n from account_mailer.worker import cb_subscription_handler\n\n # vars\n user = factory_user_model_with_contact()\n org = factory_org_model()\n factory_membership_model(user.id, org.id)\n id = org.id\n\n events_subject = 'test_subject'\n events_queue = 'test_queue'\n events_durable_name = 'test_durable'\n with patch.object(notification_service, 'send_email', return_value=None) as mock_send:\n # register the handler to test it\n await subscribe_to_queue(events_stan,\n events_subject,\n events_queue,\n events_durable_name,\n cb_subscription_handler)\n\n # add an event to queue\n mail_details = {\n 'amount': '100.00',\n 'creditAmount': '10.00',\n 'accountId': id\n }\n await helper_add_event_to_queue(events_stan, events_subject, org_id=id,\n msg_type=MessageType.ONLINE_BANKING_UNDER_PAYMENT.value,\n mail_details=mail_details)\n\n mock_send.assert_called\n assert mock_send.call_args.args[0].get('recipients') == 'foo@bar.com'\n assert mock_send.call_args.args[0].get('content').get(\n 'subject') == SubjectType.ONLINE_BANKING_PAYMENT_SUBJECT.value\n assert mock_send.call_args.args[0].get('attachments') is None\n assert mock_send.call_args.args[0].get('content').get('body') is not None\n\n await helper_add_event_to_queue(events_stan, events_subject, org_id=id,\n msg_type=MessageType.ONLINE_BANKING_OVER_PAYMENT.value,\n mail_details=mail_details)\n\n mock_send.assert_called\n assert mock_send.call_args.args[0].get('recipients') == 'foo@bar.com'\n assert mock_send.call_args.args[0].get('content').get(\n 'subject') == SubjectType.ONLINE_BANKING_PAYMENT_SUBJECT.value\n assert mock_send.call_args.args[0].get('attachments') is None\n assert mock_send.call_args.args[0].get('content').get('body') is not None\n\n await helper_add_event_to_queue(events_stan, events_subject, org_id=id,\n msg_type=MessageType.ONLINE_BANKING_PAYMENT.value,\n mail_details=mail_details)\n\n mock_send.assert_called\n assert mock_send.call_args.args[0].get('recipients') == 'foo@bar.com'\n assert mock_send.call_args.args[0].get('content').get(\n 'subject') == SubjectType.ONLINE_BANKING_PAYMENT_SUBJECT.value\n assert mock_send.call_args.args[0].get('attachments') is None\n assert mock_send.call_args.args[0].get('content').get('body') is not None", "def callback__callback(req, test_env=test_env):\n _path, _qs = req.url.split(\"?\")\n\n testapp = test_env[\"testapp_app\"]\n res = testapp.get(\n \"/application/flow-register/authorized-callback?%s\" % _qs,\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '303 See Other'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)", "def test_process_postpay_exception(self):\r\n baseline = {\r\n 'orderNumber': '1',\r\n 'orderCurrency': 'usd',\r\n 'decision': 'ACCEPT',\r\n }\r\n # tests for missing key\r\n for key in baseline:\r\n params = baseline.copy()\r\n del params[key]\r\n result = process_postpay_callback(params)\r\n self.assertFalse(result['success'])\r\n self.assertIsNone(result['order'])\r\n self.assertIn('error_msg', result['error_html'])", "def download_callback_verifier(num_bytes, total_bytes):\n if num_bytes > 0:\n msg = \"Verified Read access, with partial download {}/{} MiB\".format(num_bytes / 1.e6, total_bytes / 1.e6)\n raise VerificationComplete(msg)", "def on_signature_response(self, messages):\n for message in messages:\n # get cache object linked to this request and stop timeout from occurring\n cache = self._request_cache.pop(message.payload.identifier, SignatureRequestCache)\n\n old_submsg = cache.request.payload.message\n new_submsg = message.payload.message\n if __debug__: dprint(\"response \", new_submsg)\n\n old_body = old_submsg.packet[:len(old_submsg.packet) - sum([member.signature_length for member in old_submsg.authentication.members])]\n new_body = new_submsg.packet[:len(new_submsg.packet) - sum([member.signature_length for member in new_submsg.authentication.members])]\n\n result = cache.response_func(cache, new_submsg, old_body != new_body, *cache.response_args)\n assert isinstance(result, bool), \"RESPONSE_FUNC must return a boolean value! True to accept the proposed message, False to reject %s %s\"%(type(cache), str(cache.response_func)) \n if result:\n # add our own signatures and we can handle the message\n for signature, member in new_submsg.authentication.signed_members:\n if not signature and member.private_key:\n new_submsg.authentication.set_signature(member, member.sign(new_body))\n\n assert new_submsg.authentication.is_signed\n self.store_update_forward([new_submsg], True, True, True)", "def verify(self):\n ACTIVATION_PERIOD = datetime.timedelta(days=14)\n if not self.org_verified:\n self.org_verified = True\n if not self.is_active:\n if not self.activation_code:\n self.activation_code = random_url_safe_code()\n self.activate_by = datetime.datetime.utcnow() + ACTIVATION_PERIOD\n import messaging # avoid circular import\n messaging.send_activation_emails(self)\n self.save()", "def run(send_to_bank=False):\n\n # Signed request\n sk = read_signing_key_file(os.path.join(SIGNING_KEY_DIR, 'cv_nid'))\n signed_request = generate_signed_request(\n data={\n 'end': '2020-07-09T22:10:25Z',\n 'start': '2020-08-09T22:10:25Z'\n },\n nid_signing_key=sk\n )\n\n if send_to_bank:\n send_request_to_bank(signed_request)\n\n write_json(\n os.path.join(SIGNED_REQUESTS_DIR, 'signed-validator-confirmation-services-request.json'),\n signed_request\n )", "def collect_payment():\n\n # 1. Get required arguments\n args = Eg014Controller.get_args()\n try:\n # 2. Call the worker method\n results = Eg014Controller.worker(args)\n except ApiException as err:\n return process_error(err)\n\n # 3. Render success response\n return render_template(\n \"example_done.html\",\n title=\"Envelope sent\",\n h1=\"Envelope sent\",\n message=f\"\"\"The envelope has been created and sent!<br/> Envelope ID {results[\"envelope_id\"]}.\"\"\"\n )", "def completed(payment_id):\n epay = PaymentProcessor.epay\n EpayPayment = apps.get_model('epay', 'EpayPayment')\n with transaction.atomic():\n epay_payment = EpayPayment.objects.select_related('payment').get(payment_id=payment_id)\n payment = epay_payment.payment\n epay.capture(\n payment_id, payment.amount, epay_payment.approval_code,\n epay_payment.reference, currency=payment.currency)\n\n # epay_payment.change_status(\"caputred\")\n epay_payment.update_from_kkb()\n return epay_payment", "def test_callback_calls_celery_task(self, rf):\n product = product_factory()\n request = rf.post('/')\n\n url = request.build_absolute_uri(product.get_absolute_url())\n\n with patch('remindme.tasks.send_notification_email.delay') as task:\n product_in_stock_callback(\n self.__class__, product=product, request=request\n )\n task.assert_called_with(product.pk, product.title, url)", "def test_notification_batch(self):\n req = '''[{\"jsonrpc\": \"2.0\", \"method\": \"notify_hello\", \"params\": [7]},\n {\"jsonrpc\": \"2.0\", \"method\": \"notify_hello\", \"params\": [7]}\n ]'''\n resp = ''\n status = 204\n r_status, r_resp = self.exec_handler(req)\n self.assertEqual(r_status, status)\n self.assertEqual(r_resp, resp)", "def process_incoming_transaction(\n version,\n sender_address,\n sender_sub_address,\n receiver_address,\n receiver_sub_address,\n amount,\n currency,\n) -> None:\n # Check if the payment is intended for us - this address is configured via environment variable, see config.py\n if receiver_address != OnchainWallet().address_str:\n logging.debug(\"Received payment to unknown base address.\")\n raise WrongReceiverAddressException(\"wrongaddr\")\n\n # Locate payment id and payment options related to the given subaddress\n payment = Payment.find_by_subaddress(receiver_sub_address)\n if payment is None:\n logging.debug(\n f\"Could not find the qualifying payment {receiver_sub_address}, ignoring.\"\n )\n # TODO - Process for errant payments?\n raise PaymentForSubaddrNotFoundException(\"wrongsubaddr\")\n\n if payment.status != PaymentStatus.created:\n logging.debug(f\"Payment status is invalid: {payment.status}\")\n raise PaymentStatusException(\"invalidpaymentstatus\")\n\n if payment.is_expired():\n logging.debug(f\"Payment expired: {payment.expiry_date}. Rejecting.\")\n payment.set_status(PaymentStatus.rejected)\n db_session.commit()\n # TODO - Do we need a reaper process to simply mark expired payments as such?\n raise PaymentExpiredException(\"paymentexpired\")\n\n # verify payment matches any of the payment options for this payment id\n if not payment.is_payment_option_valid(amount, currency):\n logging.debug(\n \"Payment does not match any of the relevant payment options, ignoring.\"\n )\n # TODO - Set status to rejected here or ignore?\n raise PaymentOptionNotFoundException(\"paymentoptionnotfound\")\n\n # We're good - mark as cleared\n logging.debug(f\"Clearing payment id {payment.id}\")\n payment.set_status(PaymentStatus.cleared)\n # version is tx_id\n\n payment.add_chain_transaction(\n identifier.encode_account(sender_address, sender_sub_address, CHAIN_HRP),\n amount,\n currency,\n version,\n )\n db_session.commit()", "def oauth2callback():\n app.logger.debug(\"Entering oauth2callback\")\n flow = client.flow_from_clientsecrets(\n CLIENT_SECRET_FILE,\n scope= SCOPES,\n redirect_uri=flask.url_for('oauth2callback', _external=True))\n ## Note we are *not* redirecting above. We are noting *where*\n ## we will redirect to, which is this function. \n \n ## The *second* time we enter here, it's a callback \n ## with 'code' set in the URL parameter. If we don't\n ## see that, it must be the first time through, so we\n ## need to do step 1. \n app.logger.debug(\"Got flow\")\n if 'code' not in flask.request.args:\n app.logger.debug(\"Code not in flask.request.args\")\n auth_uri = flow.step1_get_authorize_url()\n return flask.redirect(auth_uri)\n ## This will redirect back here, but the second time through\n ## we'll have the 'code' parameter set\n else:\n ## It's the second time through ... we can tell because\n ## we got the 'code' argument in the URL.\n app.logger.debug(\"Code was in flask.request.args\")\n auth_code = flask.request.args.get('code')\n credentials = flow.step2_exchange(auth_code)\n flask.session['credentials'] = credentials.to_json()\n ## Now I can build the service and execute the query,\n ## but for the moment I'll just log it and go back to\n ## the main screen\n app.logger.debug(\"Got credentials\")\n return flask.redirect(flask.url_for('choose'))", "def oauth2callback():\n app.logger.debug(\"Entering oauth2callback\")\n flow = client.flow_from_clientsecrets(\n CLIENT_SECRET_FILE,\n scope= SCOPES,\n redirect_uri=flask.url_for('oauth2callback', _external=True))\n ## Note we are *not* redirecting above. We are noting *where*\n ## we will redirect to, which is this function. \n \n ## The *second* time we enter here, it's a callback \n ## with 'code' set in the URL parameter. If we don't\n ## see that, it must be the first time through, so we\n ## need to do step 1. \n app.logger.debug(\"Got flow\")\n if 'code' not in flask.request.args:\n app.logger.debug(\"Code not in flask.request.args\")\n auth_uri = flow.step1_get_authorize_url()\n return flask.redirect(auth_uri)\n ## This will redirect back here, but the second time through\n ## we'll have the 'code' parameter set\n else:\n ## It's the second time through ... we can tell because\n ## we got the 'code' argument in the URL.\n app.logger.debug(\"Code was in flask.request.args\")\n auth_code = flask.request.args.get('code')\n credentials = flow.step2_exchange(auth_code)\n flask.session['credentials'] = credentials.to_json()\n ## Now I can build the service and execute the query,\n ## but for the moment I'll just log it and go back to\n ## the main screen\n app.logger.debug(\"Got credentials\")\n return flask.redirect(flask.url_for('choose'))", "def test_checkout_process(self):\n product = self.create_product()\n\n res = self.do_get_basket()\n self.assertEqual(res.status_code, 200)\n basket_id = res.data['id']\n\n res = self.do_add_to_basket(product.id)\n self.assertEqual(res.status_code, 200)\n\n cs_url, cs_data = self.do_sign_auth_request(basket_id)\n\n res = self.do_cybersource_post(cs_url, cs_data)\n self.assertEqual(res.status_code, 302)\n self.check_finished_order(cs_data['reference_number'], product.id)", "async def test_webhook_endpoint_generates_telegram_callback_event(\n hass: HomeAssistant,\n webhook_platform,\n hass_client: ClientSessionGenerator,\n update_callback_query,\n) -> None:\n client = await hass_client()\n events = async_capture_events(hass, \"telegram_callback\")\n\n response = await client.post(TELEGRAM_WEBHOOK_URL, json=update_callback_query)\n assert response.status == 200\n assert (await response.read()).decode(\"utf-8\") == \"\"\n\n # Make sure event has fired\n await hass.async_block_till_done()\n\n assert len(events) == 1\n assert events[0].data[\"data\"] == update_callback_query[\"callback_query\"][\"data\"]", "def receive_confirmation(self):\n #print(\"(%d) receive_confirmation:\" % int(time.time()))\n #print(\" **> state:\", self.state)\n if self.state != KeyExchangeManager.STATE_CONFIRMING:\n return\n rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1))\n self.set_invoke_timer(rand_time)\n self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER)\n self.key_name = self.pending_key_name\n self._set_state(KeyExchangeManager.STATE_ESTABLISHED)\n #print(\"*STATE_ESTABLISHED\")", "def on_incoming_call(self, call):\n\n try:\n current_time = time.time()\n remote_uri = hash_remote_uri(self.cfg, call.info().remote_uri)\n\n if not self.cfg['VoipIO']['reject_calls']:\n if self.voipio.black_list[get_user_from_uri(remote_uri)] < current_time:\n # answer the call\n self.voipio.call = call\n self.voipio.on_incoming_call(remote_uri)\n\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Incoming call from %s\" % remote_uri)\n\n call_cb = CallCallback(self.cfg, call, self.voipio)\n call.set_callback(call_cb)\n\n call.answer()\n else:\n # rejected the call since the caller is blacklisted\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Rejected call from blacklisted remote URI %s \" % remote_uri)\n wait_hours = (self.voipio.black_list[get_user_from_uri(remote_uri)] - current_time) / (60 * 60)\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Must wait for %d hours\" % wait_hours)\n # respond by \"Busy here\"\n call.answer(486)\n\n self.voipio.on_rejected_call_from_blacklisted_uri(remote_uri)\n else:\n # reject the call since all calls must be rejected\n if self.cfg['VoipIO']['debug']:\n self.cfg['Logging']['system_logger'].debug(\"AccountCallback::on_incoming_call - Rejected call from %s\" % remote_uri)\n\n # respond by \"Busy here\"\n call.answer(486)\n # respond by \"Decline\"\n #call.answer(603)\n\n self.voipio.on_rejected_call(remote_uri)\n except:\n self.voipio.close_event.set()\n self.cfg['Logging']['system_logger'].exception('Uncaught exception in the AccountCallback class.')\n raise", "def create_webhook(client_id, client_secret, verify_token, callback_url):\n url = 'https://www.strava.com/api/v3/push_subscriptions'\n payload = {\n 'client_id': client_id,\n 'client_secret': client_secret,\n 'verify_token': verify_token,\n 'callback_url': callback_url}\n response = helper.send_http_request(url, \"POST\", payload=payload, use_proxy=False)\n\n try:\n response.raise_for_status()\n except Exception:\n if 'already exists' in response.text:\n webhook_details = get_webhook(client_id, client_secret)\n helper.log_info(webhook_details)\n if 'GET to callback URL does not return 200' in response.text:\n helper.log_error(f'Error: Strava can\\'t reach {callback_url}')\n if 'not verifiable' in response.text:\n helper.log_error(f'Error: Strava can\\'t verify {callback_url}. URL incorrect or server not using public CA certificate.')\n else:\n helper.log_error(f'{response.status_code} Error: {response.text}')\n else:\n response = response.json()\n helper.log_info(f\"Webhook created successfully: ID {response['id']}\")", "def verify_auth_request(self, *args, **kwargs):\n if len(args) == 1:\n url = args[0]\n qs = get_query_string(url)\n response_type = qs.pop('response_type', None)\n client_id = qs.pop('client_id', None)\n redirect_uri = qs.pop('redirect_uri', None)\n scope = qs.pop('scope', None)\n state = qs.pop('state', None)\n\n elif len(args) == 2:\n response_type = args[0]\n client_id = args[1]\n\n redirect_uri = kwargs.pop('redirect_uri', None)\n scope = kwargs.pop('scope', None)\n state = kwargs.pop('state', None)\n\n if not client_id: \n return self.invalid_request(\n error_description = 'client_id is required'\n , redirect_uri = redirect_uri\n , state = state\n )\n\n if not response_type:\n return self.invalid_request(\n error_description = 'response_type is required'\n , redirect_uri = redirect_uri\n , state = state\n )\n\n is_client_id_valid = self.verify_client_id(client_id)\n\n if not is_client_id_valid:\n return self.unauthorized_client(\n redirect_uri = redirect_uri\n , state = state\n )\n\n\n if redirect_uri == None:\n redirect_uri = self.get_redirect_uri(client_id)\n\n is_redirect_uri_valid = self.verify_redirect_uri(client_id,\n redirect_uri)\n\n if not is_redirect_uri_valid:\n return self.invalid_request()\n\n is_scope_valid = self.verify_scope(scope)\n\n if not is_scope_valid:\n return self.invalid_scope(\n redirect_uri = redirect_uri\n , state = state\n )\n\n is_authenticated = self.authenticate_user()\n\n if not is_authenticated:\n return self.access_denied(\n redirect_uri = redirect_uri\n , state = state\n )\n\n if response_type == 'code':\n # We are doing 4.1.1\n code = self.generate_authorization_code()\n\n # Save information to be used to validate later requests\n self.save_auth_code(\n client_id\n , code\n , scope\n , redirect_uri\n )\n\n new_qs = {'code': code}\n\n if state:\n new_qs['state'] = state\n\n return {\n 'redirect_uri': clean_url(redirect_uri, new_qs,\n should_force_ssl=self.should_force_ssl\n )\n }\n\n elif response_type == 'token':\n # We are doing 4.2.1\n token = self.generate_access_token()\n\n self.save_auth_token(token, None)\n\n # don't issue a refresh token in this mode\n\n #TODO: If scope is different than requested, return it\n\n return {'access_token': token }\n else:\n return self.unsupported_response_type(\n redirect_uri = redirect_uri\n , state = state\n )", "def webhook():\n if request.method == 'POST':\n data = request.get_json(force=True)\n # For logging and debugging, print incoming requests\n print(request, '\\n', data)\n # The two LNbits extensions used return data in\n # different formats. This try-except handles both.\n try:\n sats = int(data['amount'] / 1000)\n comment = data['comment']\n except KeyError:\n sats = int(data['amount'])\n comment = data['description']\n if not comment:\n comment = \"No message!\"\n amount = convert_to_fiat(sats, 'usd')\n url = \"https://streamlabs.com/api/v1.0/donations\"\n data = {\n \"name\": \"bitcoin\",\n \"message\": f\"{str(sats)} sats: {comment}\",\n \"identifier\": \"bitcoin_donos\",\n \"amount\": amount,\n \"currency\": fiat.upper(),\n \"access_token\": access_token,\n }\n response = requests.post(url, data=data)\n # For logging/debugging purposes\n print(response.json())\n return \"Success!\", 200\n else:\n abort(400)", "def paypalTransationComplete(request):\n\n\tif request.method == \"POST\":\n\n\t\t# Get the system discound\n\t\tsystem_discount = True if request.user.is_authenticated else False\n\t\tif system_discount:\n\t\t\tdiscount_per = 20\n\n\t\tform = PayPalForm(request.POST)\n\n\t\tif form.is_valid():\n\n\t\t\t# Check if a counpon was used (discount_per_db is the coupon discount)\n\t\t\tdiscount_per_db = 0\n\t\t\tif form.cleaned_data.get(\"coupon_code\"):\n\t\t\t\ttry:\n\t\t\t\t\tdiscount_per_db += CouponCodes.objects.get(code=form.cleaned_data.get(\"coupon_code\")).percentage\n\t\t\t\texcept CouponCodes.DoesNotExist:\n\t\t\t\t\tprint(\"Coupon code does not exist.\")\n\n\t\t\t# Get the Order Data from PayPal\n\t\t\tpaypal_order_data = GetOrder(form.cleaned_data.get(\"order_id\"))\n\n\t\t\tsubtotal = 0\n\t\t\t# Get the price of each Product and add them to the subtotal\n\t\t\tfor cart in json.loads(form.cleaned_data.get(\"cart_data\")):\n\t\t\t\ttry:\n\t\t\t\t\tproduct = Products.objects.get(hash_key=cart[\"product_id\"])\n\t\t\t\t\tsubtotal += ((product.price * (100 - discount_per) / 100) * (100 - discount_per_db) / 100 if system_discount else product.price * (100 - discount_per_db) / 100) * cart[\"quantity\"]\n\t\t\t\texcept Products.DoesNotExist:\n\t\t\t\t\treturn JsonResponse({\"STATUS\": True, \"TC\": False, \"error_message\": f\"Please remove product at position: {cart['cart_id']} in your cart.\"})\n\n\t\t\t# Open the Country JSON file to give the user country codes to select from in the form\n\t\t\twith open(os.getcwd()+\"/country_codes/country_JSON_v1.json\") as f:\n\t\t\t\tcountry_data = json.load(f)\n\t\t\tf.close()\n\t\t\t\n\t\t\tshipping = 0\n\t\t\t# Look for the Continet that will be shipped to and charge by continet\n\t\t\tfor country in country_data:\n\t\t\t\tif form.cleaned_data.get(\"country\") == country[\"Country_Name\"]:\n\t\t\t\t\tif country[\"Continent\"] in [\"EU\", \"NA\"]:\n\t\t\t\t\t\tshipping = 21.34\n\t\t\t\t\telse:\n\t\t\t\t\t\tshipping = 23.48\n\n\t\t\t# Make sure the prices of the products and shipping match the price paid on paypal match\n\t\t\tif '{:,.2f}'.format(subtotal + shipping) != '{:,.2f}'.format(float(paypal_order_data[\"purchase_units\"][0][\"amount\"][\"value\"])):\n\t\t\t\treturn JsonResponse({\"STATUS\": True, \"TC\": False, \"error_message\": \"The prices have changed. Sorry try again.\"})\n\n\t\t\t# Create the order in the Database before Capturing the funds\n\t\t\tdatabase_order_data = Orders.objects.create(\n\t\t\t\tpaypal_auth=form.cleaned_data.get(\"AuthorizationID\"),\n\t\t\t\tpaypal_order_id=form.cleaned_data.get(\"order_id\"),\n\t\t\t\tpaypal_data=GetOrder(form.cleaned_data.get(\"order_id\")),\n\t\t\t\tcart_data=json.loads(form.cleaned_data.get(\"cart_data\")),\n\t\t\t\tcountry=form.cleaned_data.get(\"country\"),\n\t\t\t\tsubtotal=subtotal,\n\t\t\t\tshipping_cost=shipping,\n\t\t\t\tregistered_user= True if request.user.is_authenticated else False,\n\t\t\t\tuser_pk= request.user.pk if request.user.is_authenticated else None,\n\t\t\t\tdiscount_per={\"user_discount\": discount_per if system_discount else 0, \"coupon_discount\": discount_per_db},\n\t\t\t)\n\n\t\t\t# Authorize the transaction after placing it in the Database\n\t\t\t#CaptureAuthorization().capture_auth(form.cleaned_data.get(\"AuthorizationID\"))\n\n\t\t\tCapturePayPalOrder(form.cleaned_data.get(\"order_id\"))\n\n\t\t\t# Update the PayPal Data to the CAPTURED one which contains more detail\n\t\t\tdatabase_order_data.paypal_data = GetOrder(form.cleaned_data.get(\"order_id\"))\n\t\t\tdatabase_order_data.save()\n\n\t\t\treturn JsonResponse({\"STATUS\": True, \"TC\": True, \"order_id\": database_order_data.hash_key})\n\t\telse:\n\n\t\t\t# Cannot use the custom form error catcher here because of the Json response\n\t\t\tfor _inputs in [\"AuthorizationID\", \"cart_data\", \"order_id\", \"country\", \"coupon_code\"]:\n\t\t\t\ttry:\n\t\t\t\t\t# Check if the error return has the '_inputs' details of why it's invalid\n\t\t\t\t\tif form.errors.get_json_data()[_inputs] and form.errors.get_json_data()[_inputs][0][\"code\"] in [\"required\", \"max_length\", \"min_length\", \"invalid\"]:\n\t\t\t\t\t\treturn JsonResponse({\"STATUS\": True, \"TC\": False, \"error_message\": form.errors.get_json_data()[_inputs][0][\"message\"]})\n\t\t\t\t\telif form.errors.get_json_data()[_inputs]:\n\t\t\t\t\t\treturn JsonResponse({\"STATUS\": True, \"TC\": False, \"error_message\": form.errors.get_json_data()[_inputs][0][\"message\"]})\n\t\t\t\texcept KeyError:\n\t\t\t\t\tprint(f\"Form validation error '{_inputs}' cannot be found!\")\n\n\t\t\treturn JsonResponse({\"STATUS\": True, \"TC\": False, \"error_message\": \"Something wen't wrong. Try again!\"})", "def verify_receipt(receipt_data, user=None):\n #data = json.dumps({'receipt-data': '{' + receipt_data + '}'})\n data = '{{\\n \"receipt-data\" : \"{}\" \\n}}'.format(receipt_data)\n\n def verify(url):\n tries = 3\n for try_ in range(1, tries + 1):\n try:\n req = urllib2.Request(url, data)\n resp = urllib2.urlopen(req, timeout=18) # app timeout is supposed to be 60\n return json.loads(resp.read())\n except (urllib2.URLError, socket_error) as e:\n if try_ == tries:\n raise e\n\n cleaned_data = verify(settings.IAP_VERIFICATION_URL)\n\n # See: http://developer.apple.com/library/ios/#technotes/tn2259/_index.html\n if cleaned_data['status'] == 21007:\n cleaned_data = verify(settings.IAP_VERIFICATION_SANDBOX_URL)\n\n if cleaned_data['status'] != 0:\n extra = {'status': cleaned_data['status']}\n if user is not None and user.is_authenticated():\n extra['username'] = user.username\n extra['response_from_apple'] = json.dumps(cleaned_data)\n client.captureMessage('IAP receipt validation failed', extra=extra)\n raise ValidationError(\"Your purchase went through, but there was an error processing it. Please contact support: support@example.com\")\n\n return cleaned_data['receipt']", "def testCaptureResponse(self):\n message = (mavutil.mavlink.GOPRO_COMMAND_CAPTURE_MODE, mavutil.mavlink.GOPRO_REQUEST_SUCCESS, (CAPTURE_MODE_BURST, 0, 0, 0))\n self.mgr.get_response_callback('vehicle','name', message)\n self.assertEqual( self.mgr.captureMode, CAPTURE_MODE_BURST)\n self.mgr.processMsgQueue.assert_called_with()\n self.mgr.sendState.assert_called_with()", "def make_payment():\n\n response = VoiceResponse()\n if 'caller_name' not in session:\n session['caller_name'] = request.args.get(\n 'caller_name') or \"Twilio Payment\"\n if 'payment_amount' not in session:\n session['payment_amount'] = request.args.get('amount') or \"5000\"\n if 'card_number' not in session:\n response.redirect('/get_card_number')\n elif 'expiry' not in session:\n response.redirect('/get_expiry')\n elif 'cvv' not in session:\n response.redirect('/get_cvv')\n else:\n call_sid = request.form.get('CallSid')\n session['call_sid'] = call_sid\n response.redirect('/process_payment')\n\n return str(response)", "def oauth2callback():\n app.logger.debug(\"Entering oauth2callback\")\n if(isMain):\n flow = client.flow_from_clientsecrets(\n CLIENT_SECRET_FILE,\n scope=SCOPES,\n redirect_uri=flask.url_for('oauth2callback', _external=True))\n else:\n \t# from Heroku, a clientID and client secrets are needed for OAuth.\n \t# Normally these are taken from client_secrets.json, \n \t# but they can be manually entered, eliminating the need for the .json file\n flow = OAuth2WebServerFlow(client_id=clientId,\n client_secret=clientSecret,\n scope=SCOPES,\n redirect_uri=flask.url_for('oauth2callback', _external=True))\n\n # Note we are *not* redirecting above. We are noting *where*\n # we will redirect to, which is this function.\n\n # The *second* time we enter here, it's a callback\n # with 'code' set in the URL parameter. If we don't\n # see that, it must be the first time through, so we\n # need to do step 1.\n app.logger.debug(\"Got flow\")\n if 'code' not in flask.request.args:\n app.logger.debug(\"Code not in flask.request.args\")\n auth_uri = flow.step1_get_authorize_url()\n return flask.redirect(auth_uri)\n # This will redirect back here, but the second time through\n # we'll have the 'code' parameter set\n else:\n # It's the second time through ... we can tell because\n # we got the 'code' argument in the URL.\n app.logger.debug(\"Code was in flask.request.args\")\n auth_code = flask.request.args.get('code')\n credentials = flow.step2_exchange(auth_code)\n flask.session['credentials'] = credentials.to_json()\n # Now I can build the service and execute the query,\n # but for the moment I'll just log it and go back to\n # the main screen\n app.logger.debug(\"Got credentials\")\n return flask.redirect(flask.url_for('choose'))", "def purchased_callback(self):\r\n try:\r\n verification_attempt = SoftwareSecurePhotoVerification.active_for_user(self.course_enrollment.user)\r\n verification_attempt.submit()\r\n except Exception as e:\r\n log.exception(\r\n \"Could not submit verification attempt for enrollment {}\".format(self.course_enrollment)\r\n )\r\n self.course_enrollment.change_mode(self.mode)\r\n self.course_enrollment.activate()", "def confirm_handle(self, update, context):\n chat_id = update.effective_chat.id\n response_code = update.callback_query[\"data\"] # caution_ok or caution_cancel\n request_id = context.user_data[\"reviewed_request\"]\n log.info(\"In progress req:%s %s\", request_id, response_code)\n\n if response_code == \"handle_onmyway\":\n # they pressed \"I am 'on my way' in the GUI\"\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=f\"{c.MSG_SAFETY_INSTRUCTIONS} \\n\\n {c.MSG_LET_ME_KNOW_ARRIVE} \\n\\n p.s. {c.MSG_SAFETY_REMINDER}\",\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=InlineKeyboardMarkup(k.inprogress_choices),\n )\n self.backend.update_request_status(request_id, \"onprogress\")\n\n elif response_code == \"handle_done\":\n # they pressed 'Mission accomplished' in the GUI\n self.send_message_ex(chat_id, c.MSG_THANKS_FEEDBACK)\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=c.MSG_FEEDBACK_EXPENSES,\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=InlineKeyboardMarkup(k.endgame_choices),\n )\n context.user_data[\"state\"] = c.State.EXPECTING_AMOUNT\n self.backend.update_request_status(request_id, \"done\")\n\n elif response_code == \"handle_no_expenses\":\n # they indicated no compensation is required; proceed to the exit survey and ask some additional questions\n # about this request\n self.send_exit_survey(update, context)\n context.user_data[\"state\"] = c.State.EXPECTING_EXIT_SURVEY\n\n elif response_code == \"handle_cancel\":\n # they bailed out at some point while the request was in progress\n self.send_message(chat_id, c.MSG_NO_WORRIES_LATER)\n context.user_data[\"reviewed_request\"] = None\n context.user_data[\"state\"] = c.State.AVAILABLE\n self.backend.update_request_status(request_id, \"cancelled\")" ]
[ "0.6399683", "0.61258787", "0.5970598", "0.58396447", "0.5839387", "0.5666613", "0.5610431", "0.56041867", "0.55926824", "0.5504206", "0.55006534", "0.54768145", "0.54386526", "0.53870726", "0.536934", "0.53045034", "0.5277781", "0.5267446", "0.5254518", "0.5215958", "0.5209088", "0.5205473", "0.51849633", "0.51714885", "0.51574993", "0.51463306", "0.511778", "0.50850415", "0.5064915", "0.5064541", "0.50580025", "0.50431824", "0.5035292", "0.5032544", "0.5028079", "0.50230056", "0.50223583", "0.500512", "0.49660286", "0.49558955", "0.49301547", "0.49130386", "0.49108732", "0.49103177", "0.49102503", "0.4897129", "0.488084", "0.48530087", "0.48461097", "0.4841504", "0.48361313", "0.48247692", "0.48063675", "0.48051468", "0.4802253", "0.47981772", "0.4790668", "0.478687", "0.47838798", "0.4781761", "0.47731966", "0.47663093", "0.4763453", "0.47606918", "0.47586444", "0.47402477", "0.47348613", "0.47273386", "0.47142777", "0.4709094", "0.47036955", "0.4703133", "0.46950257", "0.46836165", "0.46718404", "0.46702993", "0.46702692", "0.46654037", "0.46651077", "0.46636268", "0.46614286", "0.46588457", "0.46505815", "0.46505776", "0.46481636", "0.46481636", "0.46414056", "0.46409553", "0.4626799", "0.46244064", "0.46230453", "0.46169683", "0.4616896", "0.46051538", "0.45999736", "0.45999312", "0.45996612", "0.45983705", "0.45972317", "0.45971328" ]
0.61163276
2
Retrieve a list of recent donations.
def recent(self, limit): def _cb(players, donations): donators = [] for donation in donations: player = players[donation.donator.steamID].copy() player['date'] = donation.date.asPOSIXTimestamp() player['amount'] = str(donation.amount) donators.append(player) return donators donations = [] steamids = set() for donation in self.store.query(Donation, AND(Donation.donator == Donator.storeID, Donator.anonymous == False, Donator.steamID != None), limit=limit, sort=Donation.date.descending): steamids.add(donation.donator.steamID) donations.append(donation) d = self.getPlayerSummaries(steamids) d.addCallback(_cb, donations) return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_list_of_donations():\n try:\n logger.info('opening get_list_of_donations database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n query_results = (Donations.select(Donations.id, Donations.donation_date,\n Donations.donation_amount, Donations.donated_by_id.alias('fullname')))\n return query_results\n except Exception as e:\n logger.info(f'Error getting list of donors')\n logger.info(e)\n\n finally:\n logger.info('closing get_list_of_donations database call')\n database.close()", "def donations(self):\n return self.caller.player.Dominion.assets.donations.all().order_by(\"amount\")", "async def api_get_donations(g: WalletTypeInfo = Depends(get_key_type)):\n user = await get_user(g.wallet.user)\n wallet_ids = user.wallet_ids if user else []\n donations = []\n for wallet_id in wallet_ids:\n new_donations = await get_donations(wallet_id)\n donations += new_donations if new_donations else []\n return [donation.dict() for donation in donations] if donations else []", "def get(self, request):\n concerts = Concert.objects.order_by('date_time')\n if concerts:\n last_updated = concerts[0].date_scraped\n # TODO handle empty DB\n else:\n last_updated = datetime(1900,1,1)\n context = {\n 'concert_list': concerts,\n 'last_updated': last_updated,\n }\n return render(request, 'concerts/concert_list.html', context)", "def GetAllDateOfPaymentOfCost():\n\n logs.logger.debug(\n \"Start to get back all payment date of Cost objects from database.\")\n try:\n searchedCostsItems = session.query(Cost.Cost).all()\n logs.logger.info(\n \"Get back all payment date of Cost objects from database.\")\n return [CostItems.dateOfPayment for CostItems in searchedCostsItems]\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def get_list_of_donors():\n try:\n logger.info('opening get_list_of_donors database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n return Donors.select()\n\n except Exception as e:\n logger.info(e)\n\n finally:\n database.close()", "def list_donations(self, caller):\n msg = \"{wDonations:{n\\n\"\n table = PrettyTable([\"{wGroup{n\", \"{wTotal{n\"])\n for donation in self.donations:\n table.add_row([str(donation.receiver), donation.amount])\n msg += str(table)\n caller.msg(msg)", "def GetAllDifferentDateOfPaymentOfCost():\n\n logs.logger.debug(\n \"Start to get back all different payment date of \"\n \"Cost objects from database.\")\n try:\n ListOfAllDifferentDateOfPaymentOfCost = []\n searchedCostsItems = GetAllDateOfPaymentOfCost()\n for item in searchedCostsItems:\n if item not in ListOfAllDifferentDateOfPaymentOfCost:\n ListOfAllDifferentDateOfPaymentOfCost.append(item)\n logs.logger.info(\n \"Get back all different payment date of \"\n \"Cost objects from database.\")\n return ListOfAllDifferentDateOfPaymentOfCost\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def getDonates(id):\n contributor = db.find_one({'_id': ObjectId(id)})\n print(contributor)\n return jsonify({\n '_id': str(ObjectId(contributor['_id'])),\n 'name': contributor['name'],\n 'amount': contributor['amount']\n })", "def get_max_donation_date_list():\n try:\n logger.info('opening get_max_donation_date_list database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n\n query_max_date = (Donations\n .select(Donations.donated_by_id.alias('fullname'),\n fn.MAX(Donations.donation_date).alias(\n 'last_donation_date'),\n Donations.donation_amount.alias('last_donation'))\n .group_by(Donations.donated_by_id)\n )\n return query_max_date\n\n except Exception as e:\n logger.info(e)\n\n finally:\n database.close()\n logger.info('closing get_max_donation_date_list database call')", "def GetAllRegistrationDateOfCost():\n\n logs.logger.debug(\"Start to get back all registration date of\\\n Cost objects from database.\")\n try:\n searchedCostsItems = session.query(Cost.Cost).all()\n logs.logger.info(\n \"Get back all registration date of Cost objects from database.\")\n return [CostItems.registrationDate for CostItems in searchedCostsItems]\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def fetch_review(self):\n c = self.db.cursor()\n c.execute(\"\"\"SELECT * FROM cards\n WHERE date_last_reviewed < (DATETIME('now', 'localtime', '-8 hours'))\n OR correct = 0\"\"\")\n rows = c.fetchall()\n cards = [\n Card(\n id=id,\n card_type=card_type,\n text=text,\n created=created,\n uri=uri,\n updated=updated,\n difficulty=difficulty,\n days_between=days_between,\n date_last_reviewed=date_last_reviewed,\n correct=correct,\n )\n for id, card_type, text, uri, created, updated, difficulty, days_between, date_last_reviewed, correct in rows\n ]\n cards = filter(lambda card: card.percent_overdue >= 1, cards)\n cards = sorted(cards, key=lambda card: card.percent_overdue)\n\n return cards[:20]", "def print_donor_list():\n print('Below are the existing donors: ')\n for donor in donors_data:\n print('\\t- ', donor[\"name\"], ' ', donor[\"donations\"])", "def GetAllDifferentRegistrationDateOfCost():\n\n logs.logger.debug(\n \"Start to get back all different registration date of \"\n \"Cost objects from database.\")\n try:\n ListOfAllDifferentRegistrationDateOfCost = []\n searchedCostsItems = GetAllRegistrationDateOfCost()\n for item in searchedCostsItems:\n if item not in ListOfAllDifferentRegistrationDateOfCost:\n ListOfAllDifferentRegistrationDateOfCost.append(item)\n logs.logger.info(\n \"Get back all different registration date of \"\n \"Cost objects from database.\")\n return ListOfAllDifferentRegistrationDateOfCost\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def getPurchaseDates(self):\n\t\treturn self.dateList", "def last_donation(self):\n return self._donations[-1]", "def print_all_donor_donations():\n print(\"\\nList of Donors and Donations\")\n print(\"\\nDonor Name - Donation Date - Donation Amount:\")\n print(\"-\"*40)\n for donation in donor_donations_list:\n print(f'{donation.fullname} - {donation.donation_date} - ${donation.donation_amount:,.2f}')\n print()", "def all_donors(self):\n return [item for item in self.r.keys()]", "def recent(self):\n return self.filter(\n start_date__lte=self.current().end_date + timezone.timedelta(days=1),\n end_date__gte=self.current().start_date - timezone.timedelta(days=1),\n )", "def last_contribution_date(self):\n from kitsune.customercare.models import Reply\n from kitsune.questions.models import Answer\n from kitsune.wiki.models import Revision\n\n dates = []\n\n # Latest Army of Awesome reply:\n try:\n aoa_reply = Reply.objects.filter(\n user=self.user).latest('created')\n dates.append(aoa_reply.created)\n except Reply.DoesNotExist:\n pass\n\n # Latest Support Forum answer:\n try:\n answer = Answer.objects.filter(\n creator=self.user).latest('created')\n dates.append(answer.created)\n except Answer.DoesNotExist:\n pass\n\n # Latest KB Revision edited:\n try:\n revision = Revision.objects.filter(\n creator=self.user).latest('created')\n dates.append(revision.created)\n except Revision.DoesNotExist:\n pass\n\n # Latest KB Revision reviewed:\n try:\n revision = Revision.objects.filter(\n reviewer=self.user).latest('reviewed')\n # Old revisions don't have the reviewed date.\n dates.append(revision.reviewed or revision.created)\n except Revision.DoesNotExist:\n pass\n\n if len(dates) == 0:\n return None\n\n return max(dates)", "def list_donors(self):\n return [donor.name for donor in self.donors]", "def get(self, request):\n\n matched_concert_ids = list(ConcertMatch.objects.values_list('concert', flat=True))\n matches = Concert.objects.filter(id__in=matched_concert_ids).order_by('date_time')\n if Concert.objects.count():\n last_updated = Concert.objects.latest('date_scraped').date_scraped\n # TODO handle empty DB\n else:\n last_updated = datetime(1900,1,1)\n context = {\n 'matches': matches,\n 'last_updated': last_updated,\n }\n\n return render(request, 'concerts/upcoming_concerts.html', context)", "def select(self):\n last_results = self.database.query('''SELECT *\n FROM History\n ORDER BY request_date DESC\n LIMIT 10''')\n return last_results", "def get(self):\n try:\n tasks = tasks_overdue(get_db())\n return list(map(task_to_dict, tasks))\n except ValueError:\n api.abort(422, \"Invalid Date\")", "def add_donations():\n done = False\n while not done:\n name = input(\"Enter donor name (or \\\"list\\\" for list): \")\n if name == \"list\":\n # list donor names\n for d in donor_history: print(d.name)\n continue\n for thisdonor in donor_history:\n if name == thisdonor.name:\n break\n if thisdonor == None:\n thisdonor = donor(name)\n donor_history.append(thisdonor)\n print(\"Adding new donor: \" + name)\n moredonations = True\n while moredonations:\n value = input(\"Enter donation amount or -1 when finished: \")\n try:\n donation_amount = int(value)\n except ValueError:\n print(\"Invalid input, reenter.\")\n continue\n if donation_amount == -1: break\n thisdonor.donations.append(donation_amount)\n done = True\n if thisdonor: print(f\"Thank you, {name}, for your donation(s)!\")\n print()\n return", "def getCitationsData():\n # Follows https://github.com/simonw/irma-scrapers/issues/1\n citationsResponse = requests.get(\"https://api.github.com/repos/greenelab/covid19-review/git/trees/output\", headers=headers).json()\n treeEntry = [t for t in citationsResponse[\"tree\"] if t[\"path\"] == \"references.json\"][0] \n citations = json.loads(base64.b64decode(requests.get(treeEntry[\"url\"]).json()[\"content\"]))\n\n citationsDF = pd.DataFrame(citations)\n citationsDF[\"Covid19-review_paperLink\"] = citationsDF.id.apply(lambda x: \"https://greenelab.github.io/covid19-review/#ref-\" + x)\n citationsDF = citationsDF[[\"DOI\", \"title\", \"issued\", \"container-title\", \"URL\", \"Covid19-review_paperLink\"]]\n citationsDF.rename(columns={\"DOI\": \"doi\", \"issued\": \"date\", \"container-title\": \"publication\"}, inplace=True)\n\n # Convert date to string\n def dateStringFromDateParts(row):\n try:\n dateParts = row['date']['date-parts'][0]\n if len(dateParts) == 3:\n return \"-\".join([str(dateParts[1]), str(dateParts[2]), str(dateParts[0])])\n elif len(dateParts) == 2:\n return \"-\".join([str(dateParts[1]), str(dateParts[0])])\n elif len(dateParts) == 1:\n return str(dateParts[0])\n else:\n return\n except:\n return\n\n citationsDF.date = citationsDF.apply(dateStringFromDateParts, axis=1)\n\n citationsDF.set_index(\"doi\", inplace=True)\n return citationsDF", "def get_queryset(self):\n return Person.objects.filter(expiry_date__gt=timezone.now())", "def populate_donations():\n logger.info('Starting Donations table population')\n\n DONATION_DATE = 0\n DONATION_AMOUNT = 1\n DONATED_BY = 2\n\n d = datetime.today() - timedelta(days=random.randint(1, 301))\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n\n for donor in Donors:\n # Randomly generated number of donations\n #donation_times = random.randint(1, 10)\n for donation in range(random.randint(1, 10)):\n with database.transaction():\n # random date in last year\n # random donation amount converted to decimal\n # pulling donor fullname as id\n new_donation = Donations.create(\n donation_date=datetime.today() - timedelta(days=random.randint(1, 301)),\n donation_amount=decimal.Decimal(\n random.randrange(1, 9999999))/100,\n donated_by=donor.fullname,\n )\n new_donation.save()\n logger.info('Database add successful')\n\n logger.info('Print the Donors records we saved...')\n for don in Donations:\n logger.info(f'donation: {don.id} : {don.donation_date} : {don.donation_amount} : '\n + f' donor_id: {don.donated_by} has been added to the Donations table ')\n except Exception as e:\n logger.info(f'Error creating = {donation[DONATION_DATE]} {donation[DONATION_AMOUNT]}'\n + f'{donation[DONATED_BY]}')\n logger.info(e)\n logger.info('See how the database protects our data')\n finally:\n logger.info('closing database')\n database.close()", "def __get_unique_due_date_list(self) -> List[str]:\n return self.tasks.get_due_date_list()", "def all_donors_all_donation(self):\n for name in self.all_donors:\n person = self.r.hgetall(name)\n print(f\"Person: {name}\")\n for key, value in person.items():\n print(f\"{key}: {value}\")", "def recently(self):\n items = []\n for item in self.p.entries:\n dt = datetime.fromtimestamp(mktime(item.published_parsed))\n delta = datetime.today() - dt\n\n if delta.days > self.days:\n continue\n items.append(item)\n if 'verbose' in self.args and self.args['verbose']:\n print delta.days, dt\n self.items = items\n return items", "def complete_charges():\n for charge in Charge.objects.filter(state=Charge.CREATED):\n charge.retrieve()", "def returnDepositsWithdrawals(self,\n start=datetime.now() - timedelta(days=1),\n end=datetime.now()):\n pass", "def get_refund_contribs(self):\n return Contribution.objects.filter(\n related=self, type=mkt.CONTRIB_REFUND).order_by('-modified')", "def report_data(self):\n report = [donor_obj.data for donor_obj in self.donor_list]\n return report", "def get_pull_requests():\n pull_requests = []\n url_base = f\"https://github.com/{GITHUB_OWNER}/{GITHUB_REPO}/pull/\"\n repo = GITHUB.get_user(GITHUB_OWNER).get_repo(GITHUB_REPO)\n pulls = repo.get_pulls(base=\"main\", state=\"closed\")\n last_release_date = repo.get_latest_release().published_at\n for pull in pulls:\n if not pull.draft and pull.closed_at > last_release_date and pull.merged:\n log_line = f\"* {pull.title} [#{pull.number}]({url_base}{pull.number})\"\n pull_requests.append(log_line)\n return pull_requests", "def get(self, args):\n return Payment.query.offset(args['offset']).limit(args['limit'])", "def recent(\n self,\n limit: int = 20,\n offset: int = 0,\n max_days: int = 180\n ) -> Tuple[\n List[sqlite3.Row], int, List[str]\n ]:\n\n sql = \"\"\"SELECT rowid, url as 'url [url]', domain, title,\n added as 'added [timestamp]',\n updated as 'updated [timestamp]',\n retrieved 'retrieved [timestamp]',\n comments, tags as 'tags [comma_delimited]'\n FROM bookmarks\n WHERE substr(added, 0, 11) >= date('now', ?)\n AND deleted IS NULL\n ORDER BY added DESC\n LIMIT ? OFFSET ?\"\"\"\n\n max_days_clause = f\"-{max_days} day\"\n\n return (\n self._select(sql, (max_days_clause, limit, offset)),\n self._count(sql, (max_days_clause,)),\n self._explain(sql, (max_days_clause, limit, offset))\n )", "def list_comments_history(self, expense_id):\n url = base_url + expense_id + '/comments'\n resp = zoho_http_client.get(url, self.details, self.headers)\n return parser.get_comments(resp)", "def get_payees(self):\n # open a cursor object\n cur = self.get_cursor()\n\n # get payees from database\n cur.execute(\"SELECT * FROM payees\")\n payees_data = cur.fetchall()\n\n # convert into a list of payee dictionaries\n payees_list = []\n [payees_list.append({'payee_id': payee[0],\n 'payee_name': payee[1]})\n for payee in payees_data]\n\n # close the cursor\n self.close_cursor()\n\n return payees_list", "def display_list(d):\n print(\"\\nOur generous donors: \\n\")\n for donor_name in iter(d.donors):\n print(donor_name)\n print(\"\\n\")", "def trackRentRequest(self):\n\t\t#start_date = timezone.now().date()\n\t\tstart_dat=datetime.today()\n\t\tstart_date = start_dat - timedelta( hours=start_dat.time().hour,minutes=start_dat.time().minute,seconds=start_dat.time().second ) \n\t\tend_date=start_dat\n\t\tans=None\n\t\t#print start_dat.time().hour\n\t\tprint end_date\n\t\tans=Rents.objects.filter(date_of_issue__range=(start_date,end_date))\n\t\tlst=[]\n\t\tfor b in ans:\n\t\t\towneradd=b.owner_id.address\n\t\t\tuseradd=b.userid.address\n\t\t\tusername=b.userid.email\n\t\t\townername=b.owner_id.email\n\t\t\tuserphone=b.userid.contact_no\n\t\t\townerphone=b.owner_id.contact_no\n\t\t\tbookname=b.bookid.title\n\t\t\tstatus=b.paymentid.ispending\n\t\t\tbook=b.__dict__\n\t\t\tbook['owneradd']=owneradd\n\t\t\tbook['useradd']=useradd\n\t\t\tbook['username']=username\n\t\t\tbook['ownername']=ownername\n\t\t\tbook['userphone']=userphone\n\t\t\tbook['ownerphone']=ownerphone\n\t\t\tbook['name']=bookname\n\t\t\tif status==True:\n\t\t\t\tbook['status']=\"Pending\"\n\t\t\telse:\n\t\t\t\tbook['status']=\"Delivered\"\n\t\t\tlst.append(book)\n\t\t#print ans\n\t\tif ans is None:\n\t\t\tprint \"not found\"\n\t\telse:\n\t\t\tprint \"found\"\n\t\treturn lst", "def update_lists():\n global donor_totals_list\n global donor_donations_list\n global donor_names_list\n global last_donation_list\n donor_totals_list = get_all_donor_totals()\n donor_donations_list = get_list_of_donations()\n donor_names_list = get_list_of_donors()\n last_donation_list = get_max_donation_date_list()", "def get_donor_list_text(self):\n text_string = \"\\nCurrent donors are:\\n\"\n for donor in self.donor_names:\n text_string += f\"{donor}\\n\"\n return text_string", "def recent_comic_titles():\r\n\treturn [comic.title for comic in Comic.objects.all().order_by('-created_on')[0:10]]", "def fetch_deliveries(self):\n deliveries_cursor = self.database.Deliveries.find()\n deliveries = []\n for delivery in deliveries_cursor:\n delivery.pop('_id', None)\n deliveries.append(delivery)\n return deliveries", "def show_donors(self):\n\t\twith self.driver.session() as session:\n\t\t\tstr_build = \"\"\n\t\t\ttry:\n\t\t\t\tcyph = \"\"\"\n\t\t\t\tMATCH (d:Donor)\n\t\t\t\tRETURN d.full_name as full_name, d.email as email\n\t\t\t\t\"\"\"\n\t\t\t\tresult = session.run(cyph)\n\t\t\t\tfor record in result:\n\t\t\t\t\tstr_build += record['full_name'] + ' -- ' + record['email'] + '\\n'\n\t\t\texcept Exception as e:\n\t\t\t\tprint(\"Error occurred. See below.\")\n\t\t\t\tprint(e)\n\t\treturn str_build", "def get_queryset(self):\n return Comment.objects.order_by('-pub_date')[:10]", "def get_notifications(self, new=True):\n url = (\"https://api.imgur.com/3/account/{0}/\"\n \"notifications\".format(self.name))\n resp = self._imgur._send_request(url, params=locals(), needs_auth=True)\n msgs = [Message(msg_dict, self._imgur, has_fetched=True) for msg_dict\n in resp['messages']]\n replies = [Comment(msg_dict, self._imgur, has_fetched=True) for\n com_dict in resp['replies']]\n return {'messages': msgs, 'replies': replies}", "def donor_names(self):\n return list(self.donors)", "def viewdonations(request):\r\n # Obtain the context from the HTTP request.\r\n\r\n context_instance = RequestContext(request)\r\n\r\n try:\r\n user = _validate_and_get_geniuser(request)\r\n except LoggedInButFailedGetGeniUserError:\r\n return _show_failed_get_geniuser_page(request)\r\n\r\n\r\n username = user.username\r\n my_donations = interface.get_donations(user)\r\n lent = \"AA\"\r\n\r\n return render(request, 'control/viewdonations.html', {'username' : username, \r\n 'my_donations' : my_donations, 'lent' : lent})", "def upcoming(self):\n\n today = datetime.now().strftime(\"%Y-%m-%d\")\n last_day = (datetime.now() + timedelta(days=7)).strftime(\"%Y-%m-%d\")\n\n query = \"SELECT * FROM {} WHERE happening_on BETWEEN\\\n '{}' AND '{}'\".format(self.table, today, last_day)\n\n return self.fetch_all(query)", "def get_books_read_by_patron():\n list_of_books = model.get_finished_books(session['patron'])\n\n return render_template('finished_book_list.html',\n list_of_books=list_of_books\n )", "def get_replies(self, new=True):\n url = (\"https://api.imgur.com/3/account/{0}/\"\n \"notifications/replies\".format(self.name))\n return self._imgur._send_request(url, needs_auth=True)", "def get_donor_info(self):\n name = self.get_donor()\n if name in self.all_donors:\n person = self.r.hgetall(name)\n print(f\"Person: {name}\")\n for key, value in person.items():\n print(f\"{key}: {value}\")\n else:\n print(\"Name not in database.\")", "def getNumDonations(self):\n return len(self.donationList)", "def show():\n logger.info('List donors')\n try:\n logger.info('Connecting to database...')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for i in Donor.select().order_by(Donor.donor_name):\n print(i)\n except Exception as e:\n logger.info(e)\n finally:\n database.close()", "def getPropositionsByDates(view):\n context = aq_inner(view.context)\n request = view.request\n ctx_selected_dates = request.SESSION.get('rendezvous', {})\n uid = context.UID()\n if uid in ctx_selected_dates:\n return copy.deepcopy(ctx_selected_dates[uid])\n else:\n return context.getPropositionsByDates()", "def get_all_entries():\n conn = sqlite3.connect(CONF.database, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)\n curs = conn.cursor()\n try:\n return curs.execute(\"SELECT date_time, price FROM rates ORDER BY date_time DESC\").fetchall()\n finally:\n curs.close()\n conn.close()", "def get(self):\n # Check if last_refreshed is saved in the storage\n if \"last_refreshed\" in storage:\n self.refresh_infos(storage[\"last_refreshed\"])\n else:\n # last_refreshed not found in the storage\n soccerInfo = SoccerInfo.objects.first()\n if soccerInfo and soccerInfo[\"last_refreshed\"]:\n self.refresh_infos(soccerInfo[\"last_refreshed\"])\n else:\n self.refresh_infos()\n return storage[\"cached_soccerInfos\"], 200", "def get_recent_obsid():\n#\n#--- extract a list of the last two weeks of acis observations\n#\n stop = time.strftime('%Y:%j:%H:%M:%S', time.gmtime())\n stop = Chandra.Time.DateTime(stop).secs\n start = stop - 86400 * 14\n\n a_list = make_obsid_list(start, stop)\n\n return a_list", "def get_all_donor_totals():\n try:\n logger.info('opening get_all_donor_totals database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n query_totals = (Donations\n .select(Donations.donated_by_id.alias('fullname'),\n fn.COUNT(Donations.donated_by_id).alias(\n 'num_donations'),\n fn.sum(Donations.donation_amount).alias('donation_total'))\n .group_by(Donations.donated_by_id)\n )\n return query_totals\n except Exception as e:\n logger.info(f'Error getting list of donors')\n logger.info(e)\n\n finally:\n logger.info('closing get_all_donor_totals database call')\n database.close()", "def _get_valuation_history(self, ticker=\"ACN\", refresh=False):\n\n # Try the database\n year_ref = datetime.today().year-1\n year_object = datetime(year_ref, 12, 31)\n\n query = stockdatamodel.Valuation.query.filter(\n stockdatamodel.Valuation.ticker == ticker\n ).filter(stockdatamodel.Valuation.year == year_object)\n if not refresh and query.count() == 1:\n query = stockdatamodel.Valuation.query.filter(\n stockdatamodel.Valuation.ticker == ticker\n )\n return query.all()\n\n # Else go and fetch\n data = self.api.get_valuation_history(ticker=ticker, refresh=refresh)\n for valuation_i in data:\n date = datetime.strptime(valuation_i.date, \"%Y-%M-%d\")\n date = datetime(year=date.year, month=12, day=31)\n query = stockdatamodel.Valuation.query.filter(\n stockdatamodel.Valuation.ticker == ticker\n ).filter(stockdatamodel.Valuation.year == date)\n count = query.count()\n if not refresh and count == 1:\n continue\n elif count < 1:\n # Else build a new one\n valuation = stockdatamodel.Valuation(\n ticker=ticker, year=date, valuation=valuation_i.valuation\n )\n logger.debug(valuation)\n db.session.add(valuation)\n elif refresh:\n valuation = query.first()\n valuation.valuation = valuation_i.valuation\n db.session.commit()\n\n query = stockdatamodel.Valuation.query.filter(\n stockdatamodel.Valuation.ticker == ticker\n )\n return query.all()", "def recent(cls, count=3, date=None):\n q = cls.query_started(date).limit(count)\n return q.all()", "def get_citations(self):\n url = self._get_url() + 'citations'\n citations = self._request(url)\n return citations.json()", "def get_pr_data_for_repo(owner, repo, client_id, client_secret):\n prs = []\n is_last = False\n url_params = {'owner': owner, 'repo': repo}\n url_pat = 'https://api.github.com/repos/%(owner)s/%(repo)s/pulls'\n payload = {\n 'client_id': client_id,\n 'client_secret': client_secret,\n 'state': 'closed',\n }\n next_url = None\n first = True\n resp = requests.get(url_pat % url_params, params=payload)\n while not is_last:\n # Request next_url if this is not the first request\n if first:\n first = False\n else:\n resp = requests.get(next_url)\n print(resp.url, file=sys.stderr)\n\n # Abort if the return is an error\n out = resp.json()\n if 'message' in out:\n pprint.pprint(out, file=sys.stderr)\n raise Exception(resp.text)\n\n # Process the PRs\n for pr in resp.json():\n if pr['merged_at']:\n # Record the PR\n pr_obj = PullRequest(owner, repo, pr)\n prs.append((owner, repo, pr_obj.number, pr_obj))\n\n # Process the links and get the next URL\n links = get_links(resp.headers['Link'])\n next_url = links.get('next')\n is_last = next_url is None\n\n prs.sort()\n return prs", "def print_donor_list():\n print(data_base.donor_names)", "def get(self):\n user_id = get_jwt_identity()\n user = find_user(user_id)\n\n args = pagination_parser.parse_args()\n return get_notifications(user, args['from_id'])", "def get_due_contacts(user) -> typing.List[Contact]:\n contacts = (\n Contact.objects.filter(user=user)\n .order_by(\"name\")\n .prefetch_related(\"interactions\")\n .all()\n )\n contacts = filter(lambda c: c.get_urgency() > 0, contacts)\n contacts = sorted(contacts, key=lambda c: c.get_urgency(), reverse=True)\n return list(contacts)", "def get(self) -> Iterable[instarepo.github.Repo]:\n return self._filter_pushed_after(\n self._filter_pushed_before(\n self._filter_language(\n self._filter_prefix(\n self._filter_forks(\n self._filter_archived(\n self.github.get_all_repos(self.sort, self.direction)\n )\n )\n )\n )\n )\n )", "def get_clients_to_be_reactivated(file=\"db.json\") -> List[Client]:\n with TinyDB(file) as db:\n query = Query()\n result = db.search(query[\"rem date\"].test(contact_now))\n output = []\n for client in result:\n output.append(Client(client[\"first name\"], client[\"last name\"],\n client[\"last visit\"], client[\"rem date\"],\n client[\"email\"]\n ))\n return output", "def get_last_prices(self):\n return self.last_results", "def get_available_rental_instruments(self) -> list:\n self.cursor.execute(\"\"\"\n SELECT DISTINCT name, brand, monthly_cost, ri_id AS id\n FROM rental_instrument AS ri\n WHERE NOT EXISTS\n (SELECT 1 FROM rental AS r\n WHERE ri.ri_id = r.ri_id \n AND CURRENT_DATE < end_date\n AND terminated IS NULL)\n \"\"\")\n self.db.commit()\n return self._cursor_result()", "def get_contributors(self, count: int = 0) -> List[Tuple[str, int]]:\n date_from = self._date_from.isoformat() + \"Z\" if self._date_from else None\n date_to = self._date_to.isoformat() + \"Z\" if self._date_to else None\n\n # get commits for the specified period of time\n commits = self._repo.get_commits(date_from, date_to)\n\n # collect statistics on commits\n rating = defaultdict(int)\n\n for commit in commits:\n author = commit[\"author\"]\n\n if not author:\n continue\n\n rating[author[\"login\"]] += 1\n\n # sort by the number of commits in descending order\n rating = list(rating.items())\n rating.sort(key=lambda x: x[1], reverse=True)\n\n if count:\n rating = rating[:count]\n return rating", "def _get_repo_contributors(self, owner, repo):\n url = f\"{BASE_URL}/repos/{owner}/{repo}/contributors\"\n return self.fetch_all_pages(url, flatten=True, query_params={\"per_page\": 100})", "def printdonorlist():\n for name in donor_db:\n print(name)", "def last_donation(self):\n return self.donations[-1] if self.donations else 0", "def get_incidents(self) -> tuple[list[Any], Any, Any | None]:\n timestamp = None\n fetch_limit = arg_to_number(self.fetch_limit)\n fetch_time = self.fetch_time\n if not fetch_limit or not fetch_time:\n raise DemistoException('Missing parameter - fetch limit or fetch time')\n last_run = demisto.getLastRun()\n if last_run and last_run.get('timestamp'):\n timestamp = last_run.get('timestamp', '')\n last_fetched_ids = last_run.get('last_fetched_ids', [])\n else:\n if last_fetch := arg_to_datetime(fetch_time, required=True):\n # convert to ISO 8601 format and add Z suffix\n timestamp = last_fetch.strftime(DATE_FORMAT)\n last_fetched_ids = []\n\n page_size = '100'\n # set the until argument to prevent duplicates\n until = get_now_time()\n response = self.list_incidents_request(page_size, '0', until, timestamp)\n if not response.get('items'):\n return [], last_fetched_ids, timestamp\n\n page_number = response.get('totalPages', 1) - 1\n total = 0\n total_items: list[dict] = []\n while total < fetch_limit and page_number >= 0:\n try:\n response = self.list_incidents_request(page_size, page_number, until, timestamp)\n except HTTPError as e:\n if e.response is not None and e.response.status_code == 429:\n raise DemistoException(\n 'Too many requests, try later or reduce the number of Fetch Limit parameter.'\n ) from e\n raise e\n\n items = response.get('items', [])\n new_items = remove_duplicates_for_fetch(items, last_fetched_ids)\n # items order is from old to new , add new items at the start of list to maintain order\n total_items = new_items + total_items\n total += len(new_items)\n page_number -= 1\n\n # bring the last 'fetch_limit' items, as order is reversed\n total_items = total_items[len(total_items) - fetch_limit:]\n return total_items, last_fetched_ids, timestamp", "def getListIngr(cls):\n\n # meals = Meals.getMealsByFutureDate(user=session['User'])\n list_ingr = db.session.query(RecipeIngredient).join(Recipe).join(Meals).\\\n join(Ingredient).\\\n filter(func.substr(Meals.date_planned,0,11) >= func.substr(datetime.today(),0,11)).\\\n filter(Meals.recipe_fk==Recipe.recipe_id).\\\n filter(Recipe.recipe_id==RecipeIngredient.recipe_fk).\\\n filter(RecipeIngredient.ingredient_name==Ingredient.name).\\\n filter(Meals.user_fk==session['User']).\\\n order_by(Meals.date_planned).all()\n\n return list_ingr", "def gen_donor():\n# <<<<<<< master\n return [donor for donor in donor_data]", "def get_pr_data(repos, client_id, client_secret):\n pr_data = []\n for owner, repo in repos:\n repo_prs = get_pr_data_for_repo(\n owner, repo, client_id, client_secret)\n pr_data.extend(repo_prs)\n return pr_data", "def patrons_expiring_on_date(session, date):\n query = {\n 'target': {\n 'record': {\n 'type': 'patron'\n },\n 'id': 43\n },\n 'expr': {\n 'op': 'equals',\n 'operands': [\n # dates go in to queries as MM-DD-YY,\n # but come out in ISO-8601 (YYYY-MM-DD)\n date.strftime('%m/%d/%Y'),\n ''\n ]\n }\n }\n url = '/patrons/query?offset=0&limit=' + str(api_limit)\n headers = {'content-type': 'application/json'}\n data = json.dumps(query)\n r = session.post(api_url_base + url, data=data, headers=headers)\n entries = json.loads(r.text)['entries']\n return [x['link'].split('/')[-1] for x in entries]", "def due_soon(request):\n soon = timezone.now() + timedelta(days=1)\n return Task.objects.select_related('project').filter(user=request.user, due__lt=soon, done=False).exclude(folder='trash')", "def dryrecs():\n click.echo(\"Recommendations, not emailed: \")\n dio_dir: DioDir = DioDir()\n sched: ScheduleABC = DefaultSchedule()\n today: datetime.date = datetime.datetime.now().date()\n res: Optional[List[Person]] = get_recs(dio_dir, sched, today)\n next_day: datetime.date = sched.next_emailing_day(today)\n click.echo(recs_to_message(res, next_day))", "def getRelevantPRData():\n prInfoFromAPI = getPRsFromAPI()\n diffHeader = headers.copy()\n diffHeader['Accept'] = \"application/vnd.github.v3.diff\"\n textForReviewPRs = []\n\n for PR in prInfoFromAPI:\n labels = [label[\"name\"] for label in PR['labels']]\n if \"Text for Review\" in labels:\n diffResponse = requests.get(PR[\"url\"], headers=diffHeader)\n diff = diffResponse.text\n # Add the info the list\n textForReviewPRs.append({\n \"pull_request_link\": PR[\"html_url\"],\n \"diff\": diff\n })\n if int(diffResponse.headers[\"X-RateLimit-Remaining\"]) <= 2:\n print('GitHub api rate limit will be exceeded; the GITHUB_TOKEN env variable needs to be set.')\n break\n return textForReviewPRs", "def list(self, request):\n currentYear = datetime.now().year\n expenses = Expenses.objects.filter(\n date_purchased__contains=currentYear)\n serializer = ExpenseSerializer(\n expenses, many=True, context={'request': request})\n return Response(serializer.data)", "def get_shipments_by_date(auth, date, base_url='https://api.cratejoy.com/v1/'):\n \n shipment_endpoint = '{}shipments/?batch.end__lt={}T00:00:00Z'.format(base_url, date)\n\n resp = requests.get(\n shipment_endpoint,\n auth=auth\n )\n\n print('GET request to {} responded with status '\n 'code: {}'.format(shipment_endpoint,\n resp.status_code))\n print(resp.content)", "def get_all(self):\n return self.__fetcher.get_fetched()", "def fetchGithubFeed(startDate,endDate):\n\tresp = requests.get(k.GITHUB_URL + \"journal/commits?access_token=\" + k.GITHUB_TOKEN + \"&since=\" + startDate.isoformat() + \"&until=\" + endDate.isoformat())\n\t\n\treturn resp.json()", "def get( self, donor_type ):\n\n # Grab the donor type and make sure it is one of the allowed values.\n if donor_type not in [ 'caged', 'queued' ]:\n raise TypeError\n\n query_terms = build_filter_from_request_args( request.args )\n\n # Sanitize incoming partial UUID to only hex characters.\n if 'searchable_id' in query_terms:\n if 'eq' in query_terms[ 'searchable_id' ]:\n is_hex_string( query_terms[ 'searchable_id' ][ 'eq' ] )\n elif 'in' in query_terms[ 'searchable_id' ] or 'nin' in query_terms[ 'searchable_id' ]:\n for in_nin in query_terms[ 'searchable_id' ]:\n for searchable_id in query_terms[ 'searchable_id' ][ in_nin ]:\n is_hex_string( searchable_id )\n\n page_information = {}\n sort_information = []\n if query_terms:\n page_information = {}\n if 'paginate' in query_terms and query_terms[ 'paginate' ]:\n page_information = {\n 'page_number': query_terms[ 'paginate' ][ 'page_number' ],\n 'rows_per_page': query_terms[ 'paginate' ][ 'rows_per_page' ]\n }\n del query_terms[ 'paginate' ]\n\n if 'sort' in query_terms and query_terms[ 'sort' ]:\n sort_information = query_terms[ 'sort' ]\n del query_terms[ 'sort' ]\n\n donors = get_donors(\n donor_type,\n query_terms,\n page_information=page_information,\n sort_information=sort_information\n )\n\n if page_information:\n transformed_data = transform_data(\n 'donate/donors/{}'.format( donor_type ),\n page_information,\n donors,\n CagedDonorSchema\n )\n response = jsonify( transformed_data[ 'page' ] )\n response.headers[ 'Link' ] = transformed_data[ 'link-header' ]\n response.status_code = 200\n return response\n\n if donor_type == 'caged':\n schema = CagedDonorSchema( many=True )\n else:\n schema = QueuedDonorSchema( many=True )\n\n result = schema.dump( donors ).data\n\n return result, 200", "def donor_names():\n return list(donor_db.keys())", "def latest(self):\n return self.journal_data[self.latest_id]", "def get_recently_articles(cls, num):\n return cls.objects.values('title', 'view_times', 'update_time', 'author')\\\n .filter(status=0).order_by('-update_time')[:num]", "def get_latest_ratings_list(ratings):\n return [[timezone.localtime(rating.timestamp).strftime(\"%I:%M%p\"),\n rating.rating] for rating in ratings]", "def get(self, due_date):\n try:\n tasks = tasks_by_due_date(get_db(), date.fromisoformat(due_date))\n return list(map(task_to_dict, tasks))\n except ValueError:\n api.abort(422, \"Invalid Date\")", "def get_recent_contacts(user, limit=5, timespan_days=14) -> typing.List[Contact]:\n timespan_recent = datetime.now().astimezone() - timedelta(days=timespan_days)\n contacts_recent = (\n Contact.objects.filter(interactions__was_at__gt=timespan_recent)\n .filter(user=user)\n .annotate(count=Count(\"interactions\"))\n .order_by(\"-count\")[:limit]\n )\n return list(contacts_recent)", "def latest_joined(request):\n prev = int(request.POST['days'])\n d = datetime.today()-timedelta(prev)\n users = OTNUser.objects.filter(date_joined__gt=d).order_by(\"date_joined\")\n r = [] \n # e-mail MIT card office\n for u in users:\n r.append(\"%s, %s, %s\"%(u.mit_id, u.name, u.approved))\n\n return JSONHttpResponse(r)", "def scrape_crew(self):\n\n page = requests.get(self.url)\n soup = BeautifulSoup(page.content, \"html.parser\")\n results = soup.find(\"div\", id=\"fullcredits_content\")\n directors_and_writers = results.find_all(\n \"table\", class_=\"simpleTable simpleCreditsTable\"\n )\n cast = results.find(\"table\", class_=\"cast_list\")\n\n crew = []\n crew.append(directors_and_writers[0])\n crew.append(directors_and_writers[1])\n crew.append(cast)\n\n return crew", "def recent(self):\n now = timezone.now()\n # construct a datetime based on now but with zero hour/minute/second\n today = datetime(\n now.year, now.month, now.day, tzinfo=timezone.get_default_timezone()\n )\n return self.filter(end_time__lt=today).order_by(\"-start_time\")", "def refresh_list(self):\n if self._dominfo_lock.acquire(False):\n try:\n return self._refresh_list()\n finally:\n self._dominfo_lock.release()\n else:\n # wait until the refresh done by the other party is complete\n with self._dominfo_lock:\n pass" ]
[ "0.65837014", "0.65390664", "0.64193267", "0.5857776", "0.5843226", "0.56542754", "0.565169", "0.56222814", "0.56117177", "0.55442196", "0.55310655", "0.5512683", "0.5447088", "0.54198706", "0.53970146", "0.5342934", "0.5318437", "0.5283673", "0.52812785", "0.52525187", "0.52378017", "0.51988626", "0.5181239", "0.5178459", "0.5177728", "0.51682246", "0.5164721", "0.516058", "0.5152485", "0.50993836", "0.50802165", "0.5073399", "0.5053522", "0.5047333", "0.50461125", "0.5020219", "0.501913", "0.5018403", "0.50098675", "0.50038487", "0.50030816", "0.49936816", "0.49923", "0.49849436", "0.49836913", "0.49779004", "0.49755174", "0.49711093", "0.49696344", "0.49686125", "0.49629614", "0.4959528", "0.49585283", "0.49474558", "0.4932711", "0.492987", "0.49246964", "0.4915851", "0.49146765", "0.48961318", "0.4891362", "0.48890886", "0.4885499", "0.4883505", "0.48795363", "0.48724788", "0.48619926", "0.4855372", "0.48490566", "0.48472297", "0.48347437", "0.48320302", "0.48179287", "0.48157468", "0.48086834", "0.480841", "0.48070776", "0.47957322", "0.47954082", "0.47849062", "0.47816303", "0.47786194", "0.4778281", "0.4777902", "0.47777143", "0.47694328", "0.47552624", "0.47487235", "0.47480288", "0.47464842", "0.47435156", "0.47428647", "0.47388265", "0.472771", "0.47256422", "0.4721353", "0.47192493", "0.4717233", "0.4716928", "0.4715243" ]
0.668418
0
Retrieves a list of donators sorted by total donation amount.
def getTop(self, limit): def _cb(info, donators): players = [] for donator in donators: players.append(dict(donator, **info[donator['steamID']])) return players donators = [] steamIDs = [] for d in self.store.query(Donator, AND(Donator.anonymous == False, Donator.steamID != None), sort=Donator.totalAmount.desc, limit=limit): steamIDs.append(d.steamID) donators.append(donatorToDict(d)) d = self.getPlayerSummaries(steamIDs) d.addCallback(_cb, donators) return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def donations(self):\n return self.caller.player.Dominion.assets.donations.all().order_by(\"amount\")", "def list_donations(self, caller):\n msg = \"{wDonations:{n\\n\"\n table = PrettyTable([\"{wGroup{n\", \"{wTotal{n\"])\n for donation in self.donations:\n table.add_row([str(donation.receiver), donation.amount])\n msg += str(table)\n caller.msg(msg)", "def get_list_of_donations():\n try:\n logger.info('opening get_list_of_donations database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n query_results = (Donations.select(Donations.id, Donations.donation_date,\n Donations.donation_amount, Donations.donated_by_id.alias('fullname')))\n return query_results\n except Exception as e:\n logger.info(f'Error getting list of donors')\n logger.info(e)\n\n finally:\n logger.info('closing get_list_of_donations database call')\n database.close()", "def all_donors(self):\n return [item for item in self.r.keys()]", "async def api_get_donations(g: WalletTypeInfo = Depends(get_key_type)):\n user = await get_user(g.wallet.user)\n wallet_ids = user.wallet_ids if user else []\n donations = []\n for wallet_id in wallet_ids:\n new_donations = await get_donations(wallet_id)\n donations += new_donations if new_donations else []\n return [donation.dict() for donation in donations] if donations else []", "def get_all_donor_totals():\n try:\n logger.info('opening get_all_donor_totals database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n query_totals = (Donations\n .select(Donations.donated_by_id.alias('fullname'),\n fn.COUNT(Donations.donated_by_id).alias(\n 'num_donations'),\n fn.sum(Donations.donation_amount).alias('donation_total'))\n .group_by(Donations.donated_by_id)\n )\n return query_totals\n except Exception as e:\n logger.info(f'Error getting list of donors')\n logger.info(e)\n\n finally:\n logger.info('closing get_all_donor_totals database call')\n database.close()", "def list_donors(self):\n return [donor.name for donor in self.donors]", "def sort_donor_list(donor_list):\n sorted_list = sorted(\n donor_list, key=lambda donor: sum(donor_list[donor]), reverse=True)\n return sorted_list", "def all_donors_all_donation(self):\n for name in self.all_donors:\n person = self.r.hgetall(name)\n print(f\"Person: {name}\")\n for key, value in person.items():\n print(f\"{key}: {value}\")", "def sort_donations(self):\n intermed_list = []\n for item in self.donations:\n intermed_list.append((item.item_class, item))\n intermed_list.sort()\n self.donations = []\n for item in intermed_list:\n self.donations.append(item[1])", "def donor_names(self):\n return list(self.donors)", "def print_all_donor_donations():\n print(\"\\nList of Donors and Donations\")\n print(\"\\nDonor Name - Donation Date - Donation Amount:\")\n print(\"-\"*40)\n for donation in donor_donations_list:\n print(f'{donation.fullname} - {donation.donation_date} - ${donation.donation_amount:,.2f}')\n print()", "def generate_report_data(self):\n # Get list of donors and custom sort using magic method\n donors = list(self.donors.values())\n donors.sort(reverse=True)\n report = [(donor.name, donor.total_donations(), donor.num_donations(),\n donor.average_donation()) for donor in donors]\n return report", "def get_list_of_donors():\n try:\n logger.info('opening get_list_of_donors database call')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n return Donors.select()\n\n except Exception as e:\n logger.info(e)\n\n finally:\n database.close()", "def print_donor_list():\n print('Below are the existing donors: ')\n for donor in donors_data:\n print('\\t- ', donor[\"name\"], ' ', donor[\"donations\"])", "def donation_totals(donor_list, donor):\n return sum(donor_list[donor])", "def print_donor_totals_report():\n # # Creating list to hold donors info for printing\n update_lists()\n try:\n print()\n title = ['Donor Name', '| Total Given ', '| Num Gifts',\n ' | Average Gift']\n print('{:<20}{:>14}{:^14}{:>14}'.format(title[0], title[1],\n title[2], title[3]))\n print('-'*65)\n print()\n for donor in donor_totals_list:\n average_gift = float(donor.donation_total) / donor.num_donations\n print('{:<22}{}{:>12.2f}{:>10}{:>8}{:>12.2f}'.format(donor.fullname, '$', donor.donation_total,\n donor.num_donations, '$', average_gift))\n print()\n\n except Exception as e:\n logger.info(f'Error printing donor list at {donor.fullname}')\n logger.info(e)", "def donor_names(self):\n return [donor.name for donor in self.donors]", "def print_donor_list():\n print(data_base.donor_names)", "def clients_sorted_by_rentals(self):\n rentals = self.get_list()\n number_of_rented_movies = dict.fromkeys([rental.client for rental in rentals], 0)\n for rental in rentals:\n number_of_rented_movies[rental.client] += 1\n items = sorted(number_of_rented_movies.items(), key = lambda item: item[1], reverse=True)\n return [ClientDTO(item[0], item[1]) for item in items]", "def recent(self, limit):\n def _cb(players, donations):\n donators = []\n for donation in donations:\n player = players[donation.donator.steamID].copy()\n player['date'] = donation.date.asPOSIXTimestamp()\n player['amount'] = str(donation.amount)\n donators.append(player)\n return donators\n\n donations = []\n steamids = set()\n for donation in self.store.query(Donation,\n AND(Donation.donator == Donator.storeID,\n Donator.anonymous == False,\n Donator.steamID != None),\n limit=limit,\n sort=Donation.date.descending):\n steamids.add(donation.donator.steamID)\n donations.append(donation)\n\n d = self.getPlayerSummaries(steamids)\n d.addCallback(_cb, donations)\n return d", "def donor_names():\n names = list()\n for name in donor_db:\n names = names + [name[0]]\n return names", "def getTotDonation(self):\n return sum(self.donationList)", "def get_donnees(self):\r\n \r\n return self._donnees", "def total_donations(self):\n return sum(self.donations)", "def total_donations(self):\n return sum(self.donations)", "def display_list(d):\n print(\"\\nOur generous donors: \\n\")\n for donor_name in iter(d.donors):\n print(donor_name)\n print(\"\\n\")", "def getDonates(id):\n contributor = db.find_one({'_id': ObjectId(id)})\n print(contributor)\n return jsonify({\n '_id': str(ObjectId(contributor['_id'])),\n 'name': contributor['name'],\n 'amount': contributor['amount']\n })", "def clients_sorted_by_name(self):\n rentals = self.get_list()\n rentals = sorted(rentals, key = lambda rental: rental.client.full_name)\n return [rental.client for rental in rentals]", "def display_top_donor_for_each_group(self):\n orgs = Organization.objects.filter(donations__isnull=False)\n if not self.caller.check_permstring(\"builders\"):\n orgs = orgs.exclude(secret=True)\n orgs = list(orgs.distinct())\n npcs = list(\n InfluenceCategory.objects.filter(donations__isnull=False).distinct()\n )\n groups = orgs + npcs\n table = PrettyTable([\"Group\", \"Top Donor\", \"Donor's Total Donations\"])\n top_donations = []\n for group in groups:\n donation = (\n group.donations.filter(amount__gt=0)\n .order_by(\"-amount\")\n .distinct()\n .first()\n )\n if donation:\n top_donations.append(donation)\n top_donations.sort(key=lambda x: x.amount, reverse=True)\n for donation in top_donations:\n table.add_row(\n [str(donation.receiver), str(donation.giver), str(donation.amount)]\n )\n self.msg(str(table))", "def gen_donor():\n# <<<<<<< master\n return [donor for donor in donor_data]", "def print_donors_names():\n update_lists()\n print(\"\\nDonors\")\n print(\"-\"*20)\n for donor in donor_names_list:\n print(donor.fullname)\n print()", "def print_donor_list():\n print('Below are the existing donors: ')\n for donor in donors_list:\n print('\\t- ', donor[0], ' ', donor[1])", "def printdonorlist():\n for name in donor_db:\n print(name)", "def report_data(self):\n report = [donor_obj.data for donor_obj in self.donor_list]\n return report", "def donor_names():\n return donor_db.keys()", "def donor_names():\n return list(donor_db.keys())", "def show_donors(self):\n\t\twith self.driver.session() as session:\n\t\t\tstr_build = \"\"\n\t\t\ttry:\n\t\t\t\tcyph = \"\"\"\n\t\t\t\tMATCH (d:Donor)\n\t\t\t\tRETURN d.full_name as full_name, d.email as email\n\t\t\t\t\"\"\"\n\t\t\t\tresult = session.run(cyph)\n\t\t\t\tfor record in result:\n\t\t\t\t\tstr_build += record['full_name'] + ' -- ' + record['email'] + '\\n'\n\t\t\texcept Exception as e:\n\t\t\t\tprint(\"Error occurred. See below.\")\n\t\t\t\tprint(e)\n\t\treturn str_build", "def get_donor_list_text(self):\n text_string = \"\\nCurrent donors are:\\n\"\n for donor in self.donor_names:\n text_string += f\"{donor}\\n\"\n return text_string", "def getNumDonations(self):\n return len(self.donationList)", "def make_donor_report(self):\n report_rows = []\n for donor in self.donor_info.values():\n name = donor.name\n donations = donor.donations\n tot_don = donor.tot_donations\n num_don = len(donations)\n avg_don = donor.avg_donation\n report_rows.append(name, tot_don, num_don, avg_don)\n report_rows.sort(key=self.sort_key)\n report = []\n report.append(\"{:<25s}|{:>16s}|{:>16s}|{:>16s}\".format(\"Donor Name\",\"Total Donation\",\n \"Num Donations\",\"Average Amount\"))\n for row in report_rows:\n report.append(\"{:<25s} {:>16.2f} {:>16d} {:>16.2f}\".format(*row))\n return \"\\n\".join(report)", "def payments(self) -> List[Payment]:\n return self.session.get_payments(self.account_id)", "def _get_repo_contributors(self, owner, repo):\n url = f\"{BASE_URL}/repos/{owner}/{repo}/contributors\"\n return self.fetch_all_pages(url, flatten=True, query_params={\"per_page\": 100})", "def test_data():\n return [Donor(\"David Andrews\", [200.50, 400.00, 250.75]),\n Donor(\"John Goodfellow\", [25.00, 175.50]),\n Donor(\"Mary Suzuki\", [75.00, 125.00, 250.00]),\n Donor(\"Bonney Lake\", [500.50, 700.75, 500.25]),\n Donor(\"DeMarcus Rollins\", [155.00, 165.00])\n ]", "def total_donation(self):\n return self._total_donation", "def _print_donors():\n print('Current donors: ' + ', '.join(_get_donor_names()))", "def total_donated(self):\n if not hasattr(self, 'dynamic_total'):\n agg = self.donations.aggregate(Sum('amount'))\n self.dynamic_total = agg['amount__sum']\n return self.current + (self.dynamic_total or 0)", "def get_contributors(self, count: int = 0) -> List[Tuple[str, int]]:\n date_from = self._date_from.isoformat() + \"Z\" if self._date_from else None\n date_to = self._date_to.isoformat() + \"Z\" if self._date_to else None\n\n # get commits for the specified period of time\n commits = self._repo.get_commits(date_from, date_to)\n\n # collect statistics on commits\n rating = defaultdict(int)\n\n for commit in commits:\n author = commit[\"author\"]\n\n if not author:\n continue\n\n rating[author[\"login\"]] += 1\n\n # sort by the number of commits in descending order\n rating = list(rating.items())\n rating.sort(key=lambda x: x[1], reverse=True)\n\n if count:\n rating = rating[:count]\n return rating", "def funding(self):\n return self.donation_set.filter(\n order__status__in=[StatusDefinition.PLEDGED,\n StatusDefinition.PENDING,\n StatusDefinition.SUCCESS]\n ).distinct('order__user').count()", "def get_payments_per_loan(self, loan_id):\n query = \"\"\"\n SELECT * FROM payments WHERE loan_id='{}'\n \"\"\".format(loan_id)\n pay_data = self.fetch_all_data(query)\n reply = []\n if pay_data:\n for item in pay_data:\n item = self.pay_formatter(item)\n item[\"amount_paid\"] = str(item[\"amount_paid\"])\n reply.append(item)\n return reply\n return pay_data", "def gen_stats_report():\n donor_stats = []\n for donors, donations in donor_data.items():\n total = sum(donations)\n num = len(donations)\n average = total/num\n donor_stats.append((donors, total, num, average))\n\n donor_stats.sort(key=itemgetter(1), reverse=True)\n header = [\"{:<10}|{:>21}|{:>19}|{:>22}|\".format(\n \"Donor\",\n \"Total Donated ($)\",\n \"Total Donations\",\n \"Average Donation ($)\"\n ), \"-\" * 83]\n for line in donor_stats:\n header.append(\"{:<10}|{:>21,.2f}|{:>19}|{:>22,.2f}|\".format(*line))\n return \"\\n\".join(header)", "def list(per_page=None, page=None):\n # Comprehension dict are not supported in Python 2.6-. You can use this commented line instead of the current\n # line when you drop support for Python 2.6.\n # pagination = {key: value for (key, value) in [('page', page), ('per_page', per_page)] if value}\n pagination = dict((key, value) for (key, value) in [('page', page), ('per_page', per_page)] if value)\n\n http_client = HttpClient()\n response, _ = http_client.get(routes.url(routes.PAYMENT_RESOURCE, pagination=pagination))\n return resources.APIResourceCollection(resources.Payment, **response)", "def _get_donor_names():\n return tuple(x[0] for x in _donors)", "def viewdonations(request):\r\n # Obtain the context from the HTTP request.\r\n\r\n context_instance = RequestContext(request)\r\n\r\n try:\r\n user = _validate_and_get_geniuser(request)\r\n except LoggedInButFailedGetGeniUserError:\r\n return _show_failed_get_geniuser_page(request)\r\n\r\n\r\n username = user.username\r\n my_donations = interface.get_donations(user)\r\n lent = \"AA\"\r\n\r\n return render(request, 'control/viewdonations.html', {'username' : username, \r\n 'my_donations' : my_donations, 'lent' : lent})", "def _all_donors(self, include_background=True):\n sheets = self.shortcut_sheets\n if not include_background:\n sheets = filter(is_not_background, sheets)\n for sheet in sheets:\n for entity in sheet.bio_entities.values():\n yield entity", "def get_donor_info(self):\n name = self.get_donor()\n if name in self.all_donors:\n person = self.r.hgetall(name)\n print(f\"Person: {name}\")\n for key, value in person.items():\n print(f\"{key}: {value}\")\n else:\n print(\"Name not in database.\")", "def get_payees(self):\n # open a cursor object\n cur = self.get_cursor()\n\n # get payees from database\n cur.execute(\"SELECT * FROM payees\")\n payees_data = cur.fetchall()\n\n # convert into a list of payee dictionaries\n payees_list = []\n [payees_list.append({'payee_id': payee[0],\n 'payee_name': payee[1]})\n for payee in payees_data]\n\n # close the cursor\n self.close_cursor()\n\n return payees_list", "def get_donors(df: str):\n \n df_input=contrib[(contrib['cand_nm'] == df) & (contrib['contbr_occupation'] != '[BLANK]') & (contrib['contbr_occupation'] != 'INFORMATION REQUESTED') &\n (contrib['contbr_occupation'] !='INFORMATION REQUESTED PER BEST EFFORTS') &\n (contrib['contbr_occupation'] != 'RETIRED')]\n \n top_occupation=df_input. groupby(['contbr_occupation']). agg({'contbr_occupation':'count'}). rename(columns={'contbr_occupation': 'count'}). sort_values('count', ascending = False). apply(lambda x: x.nlargest(5)).reset_index()\n \n \n print(\"top 5 occupations of {}'s donors: \\n{}\\n\".format(df, top_occupation))", "def print_thank_you_total(donor):\n # donor comes in with last donation date , last_donation and fullname\n # pull donor total form global total list -\n for d in donor_totals_list:\n if d.fullname == donor.fullname:\n donation_total = d.donation_total\n\n thank_you = '''\\n\\nDear {}\n\n Thank you for your most recent generous donation of ${:,.2f}. You're support of ${:,.2f}\n over the years has helped us fund many great programs! We wanted to write you to thank you and that we \n look forward to your continued support!\n\n Sincerely,\n\n The ChickTech Donations Department'''.format(donor.fullname, donor.last_donation, donation_total)\n return thank_you", "def get_dealers(zip, make, radius=50):\n api_url = 'https://api.edmunds.com/api/dealer/v2/dealers?zipcode={}&make={}&radius={}&fmt=json&api_key={}'\\\n .format(zip, make, radius, API_KEY)\n r = requests.get(api_url).json()\n dealers = r['dealers']\n print len(dealers)\n for a in dealers:\n print a\n dealers = [format_dealer(dealer) for dealer in dealers]\n return dealers", "def create_report():\n\n # Sort by total donation amount\n _donors.sort(key=lambda x: -sum(x[1]))\n\n # Generate the report\n _str_ = [\"Donor Name | Total Given | Num Gifts | Average Gift\\n\" +\n \"------------------------------------------------------------------\"]\n for donor in _donors:\n sm = sum(donor[1])\n l = len(donor[1])\n _str_.append(f\"{donor[0]:<25} $ {sm:>9.2f} {l:>9d} $ {sm / l:>10.2f}\")\n\n report = '\\n'.join(_str_)\n print(report)\n return report", "def getPaymentsByPayer(payments, payer_name):\n result = []\n for p in payments:\n if p.payer == payer_name:\n result.append(p)\n return result", "def add_donations():\n done = False\n while not done:\n name = input(\"Enter donor name (or \\\"list\\\" for list): \")\n if name == \"list\":\n # list donor names\n for d in donor_history: print(d.name)\n continue\n for thisdonor in donor_history:\n if name == thisdonor.name:\n break\n if thisdonor == None:\n thisdonor = donor(name)\n donor_history.append(thisdonor)\n print(\"Adding new donor: \" + name)\n moredonations = True\n while moredonations:\n value = input(\"Enter donation amount or -1 when finished: \")\n try:\n donation_amount = int(value)\n except ValueError:\n print(\"Invalid input, reenter.\")\n continue\n if donation_amount == -1: break\n thisdonor.donations.append(donation_amount)\n done = True\n if thisdonor: print(f\"Thank you, {name}, for your donation(s)!\")\n print()\n return", "def get_members(self):\n return sorted([x[\"patient\"] for x in self.pedigree])", "def get( self, donor_type ):\n\n # Grab the donor type and make sure it is one of the allowed values.\n if donor_type not in [ 'caged', 'queued' ]:\n raise TypeError\n\n query_terms = build_filter_from_request_args( request.args )\n\n # Sanitize incoming partial UUID to only hex characters.\n if 'searchable_id' in query_terms:\n if 'eq' in query_terms[ 'searchable_id' ]:\n is_hex_string( query_terms[ 'searchable_id' ][ 'eq' ] )\n elif 'in' in query_terms[ 'searchable_id' ] or 'nin' in query_terms[ 'searchable_id' ]:\n for in_nin in query_terms[ 'searchable_id' ]:\n for searchable_id in query_terms[ 'searchable_id' ][ in_nin ]:\n is_hex_string( searchable_id )\n\n page_information = {}\n sort_information = []\n if query_terms:\n page_information = {}\n if 'paginate' in query_terms and query_terms[ 'paginate' ]:\n page_information = {\n 'page_number': query_terms[ 'paginate' ][ 'page_number' ],\n 'rows_per_page': query_terms[ 'paginate' ][ 'rows_per_page' ]\n }\n del query_terms[ 'paginate' ]\n\n if 'sort' in query_terms and query_terms[ 'sort' ]:\n sort_information = query_terms[ 'sort' ]\n del query_terms[ 'sort' ]\n\n donors = get_donors(\n donor_type,\n query_terms,\n page_information=page_information,\n sort_information=sort_information\n )\n\n if page_information:\n transformed_data = transform_data(\n 'donate/donors/{}'.format( donor_type ),\n page_information,\n donors,\n CagedDonorSchema\n )\n response = jsonify( transformed_data[ 'page' ] )\n response.headers[ 'Link' ] = transformed_data[ 'link-header' ]\n response.status_code = 200\n return response\n\n if donor_type == 'caged':\n schema = CagedDonorSchema( many=True )\n else:\n schema = QueuedDonorSchema( many=True )\n\n result = schema.dump( donors ).data\n\n return result, 200", "def GetAllDateOfPaymentOfCost():\n\n logs.logger.debug(\n \"Start to get back all payment date of Cost objects from database.\")\n try:\n searchedCostsItems = session.query(Cost.Cost).all()\n logs.logger.info(\n \"Get back all payment date of Cost objects from database.\")\n return [CostItems.dateOfPayment for CostItems in searchedCostsItems]\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def test_get_donor_stats():\n total_given, total_gifts, average_gift = mailroom.get_donor_stats(\n \"Paul Allen\")\n # Use round to ensure the precision is as good as we need\n assert round(total_given, 2) == 708.42\n assert total_gifts == 3\n assert round(average_gift, 2) == round(708.42 / 3, 2)", "def addDonation(self, amount):\n self.donationList.append(amount)", "def get_all_persons(self):\r\n return self.__person_repository.elements", "def get_customers(self):\n self.navigate_to_page()\n customer_list=[]\n while True:\n page_customer = [{\n 'name': self.get_name(customer), \n 'parent':self.get_parent(customer),\n 'active':self.get_active(customer),\n 'link':self.get_details_link(customer)\n } for customer in self.get_page_customers()]\n customer_list = page_customer + customer_list\n if not CustomerGroupsPage.have_next_page(self):\n break\n self.navigate_to_page()\n return customer_list", "def test_caged_donors( self ):\n with self.app.app_context():\n url = '/donation/donors/caged'\n # Ensure a GET with no saved caged_donors returns 0.\n response = self.test_client.get( url, headers=self.headers )\n self.assertEqual( len( json.loads( response.data.decode( 'utf-8' ) ) ), 0 )\n\n # Create some caged_donors to retrieve.\n total_caged_donors = 5\n caged_donor_models = create_model_list( CagedDonorSchema(), get_caged_donor_dict(), total_caged_donors )\n database.session.bulk_save_objects( caged_donor_models )\n database.session.commit()\n\n # Ensure GET returns all caged_donors.\n response = self.test_client.get( url, headers=self.headers )\n self.assertEqual( len( json.loads( response.data.decode( 'utf-8' ) ) ), total_caged_donors )", "def get_adopters_for_advertisement(adoption_center, list_of_adopters, n):\n list_of_adopters = sorted(list_of_adopters, key=lambda x:x.get_name())\n ordered_list = sorted(list_of_adopters, key=lambda x:x.get_score(adoption_center),reverse = True)\n return ordered_list[:n]", "def doctors(self) -> DoctorsList:\n data = self.get(\"minhealth_doctors\")\n \n ls = [Doctors(**doc) for doc in data]\n return DoctorsList(items=ls)", "def list_payments(owner, statuses=None):\n if statuses is None:\n return list(Payment.payments.filter(owner=owner).all().order_by('-started_at'))\n return list(Payment.payments.filter(owner=owner, status__in=statuses).all().order_by('-started_at'))", "def sorted(self):\n return (\n self.get_queryset().annotate(\n null_member=models.Count('member'),\n ).order_by('-null_member', 'member')\n )", "def show():\n logger.info('List donors')\n try:\n logger.info('Connecting to database...')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for i in Donor.select().order_by(Donor.donor_name):\n print(i)\n except Exception as e:\n logger.info(e)\n finally:\n database.close()", "def top30_clients(self):\n clients = self.clients_sorted_by_rentals()\n return clients[:int(0.3 * len(clients))]", "async def eventstats_donors(self, ctx, event_id: int = None):\n if event_id:\n ctx.config = await self.bot.utils.event_config_id(event_id)\n if ctx.config and ctx.config.guild_id != ctx.guild.id and not await self.bot.is_owner(ctx.author):\n return await ctx.send(\n \"Uh oh! You're trying to get info for an event not registered to this server! \"\n \"Please try again with a different Event ID.\"\n )\n\n if not ctx.config:\n event_id = await self.get_recent_event(ctx.guild.id)\n if event_id:\n ctx.config = await self.bot.utils.event_config_id(event_id)\n else:\n return await ctx.send(\n 'It would appear that there are no recent events connected with this server. You can:\\n'\n 'Use `+add event` to create an event.\\n'\n 'Use `+info events` to list all events on this server.\\n'\n 'Use `+seasonstats attacks` to see results for the season.'\n )\n\n query = \"\"\"SELECT player_tag, \n (end_friend_in_need + end_sharing_is_caring) - (start_friend_in_need + start_sharing_is_caring) as donations\n FROM eventplayers \n WHERE event_id = $1\n ORDER BY donations DESC\n NULLS LAST\n \"\"\"\n fetch = await ctx.db.fetch(query, ctx.config.id)\n\n title = f\"Donations for {ctx.config.event_name}\"\n\n p = StatsDonorsPaginator(ctx, fetch, title, page_count=math.ceil(len(fetch) / 20))\n await p.paginate()", "def getPersons():\n\n cur, user_id = initialise(3)\n cur.execute(\"SELECT username FROM users WHERE NOT username = (SELECT username FROM users WHERE id = ?)\", [user_id])\n tempPersons = cur.fetchall()\n persons = []\n for person in tempPersons:\n persons.append(person[0])\n persons.sort()\n return persons", "def get_numbers(driver):\n soup = BeautifulSoup(driver.page_source, 'lxml')\n followers = soup.find(\"li\", class_=\"Y8-fY\").nextSibling.a.span.string\n following = soup.find(\"li\", class_=\"Y8-fY\").nextSibling.nextSibling.a.span.string\n name = soup.find('h1', class_='rhpdm').string\n return [followers, following, name]", "def display_score_for_group(self):\n org, npc = self.get_org_or_npc_from_args()\n if org and org.secret:\n raise CommandError(\"Cannot display donations for secret orgs.\")\n group = org or npc\n if not group:\n return\n msg = \"Top donors for %s\\n\" % group\n table = PrettyTable([\"Donor\", \"Amount\"])\n for donation in (\n group.donations.filter(amount__gt=0).distinct().order_by(\"-amount\")\n ):\n table.add_row([str(donation.giver), str(donation.amount)])\n msg += str(table)\n self.msg(msg)", "def iterate_payments(**kwargs):\n return Payment.payments.filter(**kwargs).all()", "def sortdb():\n return sorted(donor_db.items(), key=sumdbkey, reverse=True)", "def movies_sorted_by_rentals(self):\n rentals = self.get_list()\n number_of_rentals = dict.fromkeys([rental.movie for rental in rentals], 0)\n for rental in rentals:\n number_of_rentals[rental.movie] += 1\n items = sorted(number_of_rentals.items(), key = lambda item: item[1], reverse=True)\n return [MovieDTO(item[0], item[1]) for item in items]", "def get_donors(df):\n \n top_occupation = df. groupby(['contbr_occupation']). agg({'contbr_occupation':'count'}). rename(columns = {'contbr_occupation': 'count'}). sort_values('count', ascending = False). apply(lambda x: x.nlargest(5)).reset_index()\n \n return top_occupation", "def get_persons(self):\n return self.person_list.model().get_person_list()", "def update_lists():\n global donor_totals_list\n global donor_donations_list\n global donor_names_list\n global last_donation_list\n donor_totals_list = get_all_donor_totals()\n donor_donations_list = get_list_of_donations()\n donor_names_list = get_list_of_donors()\n last_donation_list = get_max_donation_date_list()", "def print_single_donor_donations():\n update_lists()\n donor_name = get_name_input()\n single_donor_print(donor_name)\n return donor_name", "def num_donations(self):\n return len(self.donations)", "def get_all_charges_output():\n chargeDB = ChargeDBHelper()\n charge_list = []\n\n rxcui_bundles = chargeDB.get_all_charge_bundles()\n clinic_count = clinic_cnt_for_days(chargeDB.get_days_spanned())\n\n for bundle in rxcui_bundles:\n bundle_info = to_order_limit_row(bundle, clinic_count, False)\n bundle_size = len(bundle.charges)\n item_per_clinic = bundle.get_item_per_clinic_list()\n\n\n for i in range(bundle_size-1,-1,-1):\n charge_row_dict = OrderedDict()\n charge = bundle.charges[i]\n #add charge bundle info every charge\n for k,v in bundle_info.iteritems(): charge_row_dict[k]=v\n charge_row_dict[ChargeColumns.AVG_PILL_CHG]=item_per_clinic[i]\n #add charge-specific info for every charge\n for k,v in charge.iteritems(): charge_row_dict[k]=v\n charge_list.append(charge_row_dict)\n\n\n chargeDB.close()\n return charge_list", "def print_donor_report(database):\n name_max = 30\n\n rpt_title = \"Donor Name\" + ' ' * (name_max - 9) + \"| Total Given | Num Gifts | Average Gift\"\n print(rpt_title)\n print(\"-\" * len(rpt_title))\n\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n\n query = (Donor\n .select(Donor.name,\n fn.COUNT(Donation.amount).alias('ccount'),\n fn.SUM(Donation.amount).alias('csum'),\n fn.AVG(Donation.amount).alias('cavg'))\n .join(Donation, JOIN.LEFT_OUTER)\n .group_by(Donor.name)\n )\n\n for d in query:\n print(f\"{d.name:{name_max}} $ {d.csum:>10.2f} {d.ccount:>9} ${d.cavg:>12.2f}\")\n\n database.close()", "def people(persons):\n sorted_list = sorted(persons, key=lambda k: k['age'])\n return sorted_list", "def get_deals_list(self, session) -> List:\n\n deals = session.query(\n Deals.id,\n Deals.linkedin,\n Deals.leadgen_id\n ).all()\n\n return deals", "def diamonds(self):\n return sorted(tuple([v for v in self if v.suit == 'diamonds']), reverse=True)", "def _get_wallets(self, freelancer_id, client_id):\n response_freelancer = requests.get(self.BASE_API +\n f'api/freelancers/{freelancer_id}')\n response_client = requests.get(self.BASE_API +\n f'api/clients/{client_id}')\n\n registered_freelancer = json.loads(response_freelancer.text)\n registered_client = json.loads(response_client.text)\n\n freelancer_wallet = registered_freelancer['total_earnings']\n client_wallet = registered_client['total_spend']\n\n return (freelancer_wallet, client_wallet)", "def populate_donations():\n logger.info('Starting Donations table population')\n\n DONATION_DATE = 0\n DONATION_AMOUNT = 1\n DONATED_BY = 2\n\n d = datetime.today() - timedelta(days=random.randint(1, 301))\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n\n for donor in Donors:\n # Randomly generated number of donations\n #donation_times = random.randint(1, 10)\n for donation in range(random.randint(1, 10)):\n with database.transaction():\n # random date in last year\n # random donation amount converted to decimal\n # pulling donor fullname as id\n new_donation = Donations.create(\n donation_date=datetime.today() - timedelta(days=random.randint(1, 301)),\n donation_amount=decimal.Decimal(\n random.randrange(1, 9999999))/100,\n donated_by=donor.fullname,\n )\n new_donation.save()\n logger.info('Database add successful')\n\n logger.info('Print the Donors records we saved...')\n for don in Donations:\n logger.info(f'donation: {don.id} : {don.donation_date} : {don.donation_amount} : '\n + f' donor_id: {don.donated_by} has been added to the Donations table ')\n except Exception as e:\n logger.info(f'Error creating = {donation[DONATION_DATE]} {donation[DONATION_AMOUNT]}'\n + f'{donation[DONATED_BY]}')\n logger.info(e)\n logger.info('See how the database protects our data')\n finally:\n logger.info('closing database')\n database.close()", "def get_members(self):\n return sorted([x[\"patient\"] for x in self.get_filtered_pedigree_with_samples()])", "async def get_all_investigators(request):\n client_key = general.get_request_key_header(request)\n investigator_list = await security_messaging.get_investigators(request.app.config.VAL_CONN, client_key)\n\n investigator_list_json = []\n for address, dp in investigator_list.items():\n investigator_list_json.append({\n 'public_key': dp.public_key,\n 'name': dp.name\n })\n return response.json(body={'data': investigator_list_json},\n headers=general.get_response_headers())", "def get(self, args):\n return Payment.query.offset(args['offset']).limit(args['limit'])", "def add_donation(self, amount):\n self.donations.append(amount)" ]
[ "0.71429664", "0.68319184", "0.6688947", "0.65133834", "0.65004057", "0.64896405", "0.6419263", "0.63591397", "0.6345003", "0.6311067", "0.6118759", "0.60955817", "0.6024189", "0.5991387", "0.5889248", "0.58387095", "0.5754497", "0.57437706", "0.5725672", "0.57109547", "0.5638232", "0.561924", "0.56098115", "0.5590151", "0.55869913", "0.55869913", "0.5522128", "0.55151457", "0.53870696", "0.53834003", "0.5380326", "0.5363315", "0.5357959", "0.5354456", "0.53247195", "0.52872396", "0.5285354", "0.5280338", "0.52773595", "0.52597576", "0.5224847", "0.52052355", "0.5198659", "0.51580197", "0.51557297", "0.5152697", "0.5127127", "0.5117946", "0.5106445", "0.50877875", "0.506603", "0.5044863", "0.5026754", "0.50076973", "0.50050247", "0.499371", "0.4962132", "0.49527076", "0.49158427", "0.49145386", "0.49008846", "0.48928005", "0.4883468", "0.48750907", "0.4865763", "0.48393562", "0.48386264", "0.48376083", "0.4829569", "0.48092157", "0.48069844", "0.48002797", "0.47862256", "0.47718343", "0.47714296", "0.4769929", "0.4761142", "0.4760483", "0.47590843", "0.47576752", "0.47542125", "0.47534156", "0.47472465", "0.47468933", "0.47409722", "0.47409403", "0.47374043", "0.47305286", "0.47237045", "0.472326", "0.47063082", "0.47033262", "0.46995297", "0.46883836", "0.46830386", "0.46828288", "0.46811306", "0.46692687", "0.46634954", "0.4660792" ]
0.5803403
16
Sets the learning rate to the initial LR decayed by 0.5 every 5 epochs
def adjust_learning_rate(optimizer, epoch): lr = args.lr * (0.4 ** (epoch // 4)) for param_group in optimizer.param_groups: param_group['lr'] = lr
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def adjust_learning_rate(init_lr, optimizer, epoch, n=100):\n init_lr = init_lr * (0.1 ** (epoch // n))\n print('learning rate : ', init_lr)\n for param_group in optimizer.param_groups:\n param_group['lr'] = init_lr", "def adjust_learning_rate(optimizer, epoch):\n lr = opt.lr * (0.5 ** (epoch // opt.step))\n return lr", "def adjust_learning_rate(start_lr, optimizer, epoch, total_epoch_num):\n #lr = start_lr * (0.1 ** (epoch // 30))\n lr = start_lr * (0.3 ** (epoch // 5))\n if epoch==total_epoch_num:\n lr = lr * 0.3\n\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def learning_rate(epoch):\n self.lr = self.lr / 1.00000001\n return self.lr", "def adjust_lr(self):\n learning_rate = self.params.base_lr * (1 - float(self.epoch) / self.params.num_epoch) ** self.params.power\n for param_group in self.opt.param_groups:\n param_group['lr'] = learning_rate\n print('Change learning rate into %f' % (learning_rate))\n self.summary_writer.add_scalar('learning_rate', learning_rate, self.epoch)", "def adjust_learning_rate(initial_lr, optimizer, epoch, every_epoch):\n lr = initial_lr * (0.1 ** (epoch // every_epoch))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(self, epoch):\n lr = self.lr * (0.5 ** (epoch // 2))\n for param_group in self.optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n initial_lr = args.lr\n if epoch <= 150:\n lr = initial_lr\n elif epoch <=225:\n lr = initial_lr/10\n else:\n lr = initial_lr/100\n\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n print(\"=\"*100)\n print('At epoch:',epoch,\" lr is:\",lr)", "def adjust_learning_rate(lr, optimizer, epoch):\n lr = lr_init * (0.1 ** (epoch // 10))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n return lr", "def update_learning_rate(self) -> None:\n self.epsilon = self.initial_epsilon / (1. + self.rate_decay * self.n_it)\n return", "def update_learning_rate(self) -> None:\n self.epsilon = self.initial_epsilon / (1. + self.rate_decay * self.n_it)\n return", "def adjust_learning_rate(jnet, optimizer, epoch):\n lr = args.lr * (0.5 ** (epoch // 10))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(args, optimizer, epoch):\n if (epoch*3==args.epochs) or (epoch*3==2*args.epochs):\n lr = args.lr * (0.1 ** (epoch*3//args.epochs))\n print(\"Changing Learning Rate to {}\".format(lr))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr\n if 20 < epoch <= 30:\n lr = 0.0001\n elif 30 < epoch :\n lr = 0.00001\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n print(\"learning rate -> {}\\n\".format(lr))", "def adjust_learning_rate(lr, optimizer, lr_decay, epoch):\n\n if epoch >= lr_decay[0]:\n lr = lr * 0.1\n if epoch >= lr_decay[1]:\n lr = lr * 0.01\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n\n return lr", "def adjust_learning_rate(optimizer, epoch, args, step):\n lr = args.lr * (0.1 ** (epoch // step))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(lr, decay, optimizer, cur_epoch, every_n_epochs):\n new_lr = lr * (decay ** (cur_epoch // every_n_epochs))\n\n # if cur_epoch % every_n_epochs == 0:\n # new_lr = lr * 0.1\n\n for param_group in optimizer.param_groups:\n param_group['lr'] = new_lr", "def adjust_learning_rate(lr, optimizer, epoch, decay_epoch=30):\n lr = lr * (0.1 ** (epoch // decay_epoch))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = hyper.lr * (0.5 ** (epoch // 10))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n return lr", "def adjust_learning_rate(optimizer, epoch):\n lr = 0.5 * (0.1 ** (epoch // 100))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(self, optimizer, epoch, args):\n lr = args.learning_rate * (0.1 ** (epoch // 30))\n # print(lr)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, lr_factor, epoch):\n #lr = args.lr * (0.1 ** (epoch // 30))\n print('the learning rate is set to {0:.5f}'.format(lr_factor[epoch]*args.lr))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr_factor[epoch]*args.lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr * (0.3 ** (epoch // args.lr_decay))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, args):\r\n lr = args.lr * (0.1 ** (epoch // 30))\r\n for param_group in optimizer.param_groups:\r\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, config):\n if config.cos:\n lr_min = 0\n lr_max = config.lr\n lr = lr_min + 0.5 * (lr_max - lr_min) * (1 + math.cos(epoch / config.num_epochs * 3.1415926535))\n else:\n epoch = epoch + 1\n if epoch <= 5:\n lr = config.lr * epoch / 5\n elif epoch > 180:\n lr = config.lr * 0.01\n elif epoch > 160:\n lr = config.lr * 0.1\n else:\n lr = config.lr\n\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, lr=0.01):\n new_lr = lr * (0.1 ** (epoch // 5))\n for param_group in optimizer.param_groups:\n param_group[\"lr\"] = new_lr", "def adjust_learning_rate(lr, optimizer, epoch):\n lr = lr * (0.1 ** (epoch // 10))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr * (0.5 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(self):\n out_base_lr = self.args.base_lr\n for param_group in self.optimizer.param_groups:\n in_lr = param_group[\"initial_lr\"]\n out_lr = in_lr\n if self.args.lr_decay_type == \"cos\": # cosine lr schedule\n out_lr *= 0.5 * (1.0 + np.cos(np.pi * self.epoch / self.args.epochs))\n else: # stepwise lr schedule\n for milestone in self.args.lr_step_schedule:\n out_lr *= 0.1 if self.epoch >= milestone else 1.0\n param_group[\"lr\"] = out_lr\n if in_lr == self.args.base_lr:\n out_base_lr = out_lr\n if self.train_logger is not None:\n self.train_logger.scalar_summary(\n \"metrics/%s/epoch\" % self.full_name, self.epoch, step=self.iteration, increment_counter=False\n )\n self.train_logger.scalar_summary(\n \"metrics/%s/lr\" % self.full_name, out_base_lr, step=self.iteration, increment_counter=False\n )\n print(\"Epoch\", self.epoch, \"Learning rate\", out_base_lr)\n return out_base_lr", "def adjust_learning_rate(optimizer, epochs, base_lr):\r\n lr = base_lr * (0.01 ** (epochs//5))\r\n print('Learning Rate decreased to {}'.format(lr))\r\n for param_group in optimizer.state_dict()['param_groups']:\r\n param_group['lr'] = lr", "def ft_adjust_learning_rate(optimizer, intial_lr, epoch, lr_steps):\n decay = 0.3 ** (sum(epoch >= np.array(lr_steps)))\n lr = intial_lr * decay\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate_D(start_lr, optimizer, epoch):\n #lr = start_lr * (0.1 ** (epoch // 30))\n lr = start_lr * (0.3 ** (epoch // 5))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def initialize_learning_rate(self):\n\n if (self.FLAGS.learning_rate_decay is \"exponential\"):\n self.learning_rate = tf.train.exponential_decay(\n self.FLAGS.learning_rate,\n self.global_step,\n self.FLAGS.decay_steps,\n self.FLAGS.decay_rate)\n else :\n self.learning_rate = self.FLAGS.learning_rate", "def adjust_learning_rate(optimizer, epoch, args):\r\n lr = args.lr * (0.1 ** (epoch // 30))\r\n for param_group in optimizer.param_groups:\r\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr\n if epoch >= 75:\n lr = args.lr * 0.1\n if epoch >= 90:\n lr = args.lr * 0.01\n if epoch >= 100:\n lr = args.lr * 0.001\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(learning_rate,optimizer, epoch):\n lr = learning_rate * (0.1 ** (epoch // 25))\n print(str(lr))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n \n lr = learning_rate * (0.1 ** (epoch // 15))\n if lr >= 0.0001:\n lr = 0.0001\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr\n if epoch >= 60:\n lr = args.lr * 0.1\n if epoch >= 90:\n lr = args.lr * 0.01\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch,threshold,lr_init,lr_decay_rate):\n lr = lr_init * (lr_decay_rate ** (epoch // threshold))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n return lr", "def adjust_learning_rate(self, optimizer, epoch):\n lr = self.lr\n if epoch >= 80:\n lr = self.lr * (0.1 ** ((epoch-80) // 40))\n\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\r\n lr = args.lr\r\n if epoch >= 0.5 * args.epoch:\r\n lr /= 10\r\n if epoch >= 0.75 * args.epoch:\r\n lr /= 10\r\n for param_group in optimizer.param_groups:\r\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, args, epoch):\n\tlr = args.learning_rate * (0.1 ** (epoch // args.lr_decay_step))\n\tfor param_group in optimizer.param_groups:\n\t\tparam_group['lr'] = lr", "def adjust_learning_rate_schedule(optimizer, epoch, initial_lr, decay_factor, decay_epochs):\n\n # Find the index of the current interval:\n interval_index = len([mark for mark in decay_epochs if mark < epoch])\n\n lr = initial_lr * (decay_factor ** interval_index)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n \n boundary = [args.epochs//2,args.epochs//4*3,args.epochs]\n lr = args.lr * 0.1 ** int(bisect.bisect_left(boundary, epoch))\n print('Learning rate: %f'%lr)\n #print(epoch, lr, bisect.bisect_left(boundary, epoch))\n # lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n\n return lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1 ** (epoch // 15))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, n):\n lr = args.lr * (0.1 ** (epoch // n))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate_adam(optimizer, epoch):\n \n boundary = [args.epochs//5*4]\n lr = args.lr * 0.2 ** int(bisect.bisect_left(boundary, epoch))\n print('Learning rate: %f'%lr)\n #print(epoch, lr)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n \n return lr", "def update_lr(optimizer, lr, epoch, max_epochs, exponent=0.9):\n optimizer.param_groups[0]['lr'] = lr * (1 - epoch / max_epochs)**exponent", "def adjust_learning_rate(optimizer, epoch, args):\n \"\"\"Comes from pytorch demo\"\"\"\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1 ** (epoch // 100))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, learning_rate):\n\n if epoch >= 60 and epoch < 75:\n lr = learning_rate / 2\n elif epoch >= 75:\n lr = learning_rate / 4\n else:\n lr = learning_rate\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def assign_learning_rate(session, lr_update, lr_placeholder, new_lr):\n session.run(lr_update, feed_dict={lr_placeholder: new_lr})", "def adjust_learning_rate(optimizer, epoch, lr):\n lr = lr * ((1 - 0.015) ** epoch)\n print('learning rate : {}'.format(lr))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\r\n lr = 0.001 * (0.1 ** (epoch // 30))\r\n for param_group in optimizer.param_groups:\r\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = ln * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n print(\"learning rate\",lr)", "def update_learning_rate(self):\r\n self.scheduler.step(self.clock.epoch)", "def adjust_learning_rate(optimizer, epoch):\r\n lr = args.lr * (0.1 ** (epoch // 30))\r\n for param_group in optimizer.param_groups:\r\n param_group['lr'] = lr", "def adjust_learning_rate(epoch, learn_rate, decay_step, decay_rate, optimizer):\n steps = np.sum(epoch > np.asarray(decay_step))\n if steps > 0:\n new_lr = learn_rate * (decay_rate ** steps)\n for param_group in optimizer.param_groups:\n param_group['lr'] = new_lr", "def adjust_learning_rate(optimizer, epoch, decay, lrate):\n lr = lrate * (0.1 ** (epoch // decay))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, lr, step, args):\n # decay = 0.1**(sum(epoch >= np.array(lr_steps)))\n lr = lr * (0.95**(step//args.lr_decay_every))\n print(\"current learning rate: {:.6f}\".format(lr))\n param_group = optimizer.param_groups\n for i in range(len(param_group)):\n param_group[i]['lr'] = lr\n\n return optimizer", "def adjust_learning_rate(opt, epoch):\n d, e = args.lr_decay\n lr = args.lr * (d ** -(epoch // e))\n for param_group in opt.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.5 ** (epoch // args.low_lr))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1**(epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def _internal_adjust_learning_rate_pyramid(epoch):\n lr = base_lr * (0.1 ** (epoch // (max_epoch * 0.5))) * (0.1 ** (epoch // (max_epoch * 0.75)))\n return lr", "def adjust_learning_rate(optimizer, epoch, args):\n lr = args.lr\n if args.cos: # cosine lr schedule\n lr *= 0.5 * (1. + math.cos(math.pi * epoch / args.epochs))\n else: # stepwise lr schedule\n for milestone in args.schedule:\n lr *= 0.1 if epoch >= milestone else 1.\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = LEARNING_RATE * (0.1 ** (epoch // 10))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, lr):\n lr = lr * (0.5 ** (epoch // 20))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n return lr", "def adjust_learning_rate(self, optimizer, epoch, initial_lr, writer=None):\n lr = initial_lr * (0.98 ** epoch)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n if writer:\n writer.add_scalar(\"lr_G\", lr, epoch + 1)", "def adjust_learning_rate(optimizer, epoch, num_epochs, lr_decay=0.5):\n step = num_epochs // 1\n\n if not epoch % step and epoch > 0:\n for param_group in optimizer.param_groups:\n param_group['lr'] *= lr_decay\n print('Learning rate sets to {}.'.format(param_group['lr']))\n else:\n for param_group in optimizer.param_groups:\n print('Learning rate sets to {}.'.format(param_group['lr']))", "def adjust_learning_rate(lr, lr_decay_steps, optimizer, epoch, lr_decay_rate=0.1):\n steps = list(map(int, lr_decay_steps.split(',')))\n for milestone in steps:\n lr *= lr_decay_rate if epoch >= milestone else 1.\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def step_decay(epoch):\n # initial_lrate = 1.0 # no longer needed\n #drop = 0.5\n #epochs_drop = number_epoch_drop\n #lrate = init_lr * math.pow(drop,\n #math.floor((1+epoch)/epochs_drop))\n lr = learning_rate\n if epoch >= 60:\n lr *= 1e-1\n print('Learning rate: ', lr)\n return lr", "def adjust_learning_rate(optimizer, epoch, base_lr):\n lr = max(base_lr * (0.5 ** (epoch // 20)), 1e-5)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n #if (epoch + 1) == 51 or (epoch + 1) == 101 or (epoch + 1) == 151:\n # #lr = lr * 0.1\n # for param_group in optimizer.param_groups:\n # param_group['lr'] *= 0.1", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (args.expo ** epoch)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(epoch, learning_rate, lr_decay_epochs, optimizer):\r\n steps = np.sum(epoch > np.asarray(lr_decay_epochs))\r\n if steps > 0:\r\n new_lr = learning_rate * (lr_decay_rate ** steps)\r\n for param_group in optimizer.param_groups:\r\n param_group['lr'] = new_lr", "def adjust_learning_rate(optimizer, epoch):\n \n lr = opt.lr * (0.1 ** (epoch // 3)) # Original\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def _update_learning_rate(self):\r\n\r\n self.n_steps += 1\r\n lr = self.factor * self._get_lr_scale()\r\n for param_group in self._optimizer.param_groups:\r\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer: torch.optim.SGD, epoch: int, args: Namespace):\n lr = args.lr * (0.1 ** (epoch // 90)) * (0.1 ** (epoch // 180)) * (0.1 ** (epoch // 270))\n # log to TensorBoard\n if args.tensorboard:\n log_value('learning_rate', lr, epoch)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * ((1 - 0.015) ** epoch)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch, power):\n lr = args.lr * (0.1 ** (power*(epoch // 30)))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate_GAN(optimizer, epoch):\r\n lrD = opt.lrD * (0.1 ** (epoch // opt.step))\r\n return lrD", "def adjust_learning_rate(args,optimizer, epoch):\n \n args.epochs\n\n lr = args.lr * (\n (0.2 ** int(epoch >= args.epochs - 140))\n * (0.2 ** int(epoch >= args.epochs - 80))\n * (0.2 ** int(epoch >= args.epochs - 40))\n )\n\n ##lr = args.lr ##DELETE ME!\n\n if args.tensorboard:\n log_value(\"learning_rate\", lr, epoch)\n\n for param_group in optimizer.param_groups:\n param_group[\"lr\"] = lr", "def adjust_learning_rate(optimizer, epoch, lr):\n lr = lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.1 ** (epoch // args.lr_drop))\n print('lr= '+str(lr), flush=True)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, cur_epoch, base_lr=0.1, lr_schedule=[4, 8, 12, 14, 16]):\n lr = 0\n for i, e in enumerate(lr_schedule):\n if cur_epoch < e:\n lr = base_lr * (0.1 ** i)\n break\n if lr == 0:\n lr = base_lr\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def lr_decay(config, optimizer: optim.Optimizer, epoch: int) -> optim.Optimizer:\n lr = config.learning_rate / (1 + config.lr_decay * (epoch - 1))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n print('learning rate is set to: ', lr)\n return optimizer", "def __learning_rate(self, lr0, epoch):\n \n \"\"\"\n Dan's Methos\n \"\"\"\n lrs = lr0 * 0.001\n c = np.power((lrs/lr0), 1.0/self.__maxEpoch)\n \n return lr0*np.power(c, epoch)", "def _update_initial_learning_rate(configs, learning_rate):\n\n optimizer_type = get_optimizer_type(configs[\"train_config\"])\n if optimizer_type == \"rms_prop_optimizer\":\n optimizer_config = configs[\"train_config\"].optimizer.rms_prop_optimizer\n elif optimizer_type == \"momentum_optimizer\":\n optimizer_config = configs[\"train_config\"].optimizer.momentum_optimizer\n elif optimizer_type == \"adam_optimizer\":\n optimizer_config = configs[\"train_config\"].optimizer.adam_optimizer\n else:\n raise TypeError(\"Optimizer %s is not supported.\" % optimizer_type)\n\n learning_rate_type = get_learning_rate_type(optimizer_config)\n if learning_rate_type == \"constant_learning_rate\":\n constant_lr = optimizer_config.learning_rate.constant_learning_rate\n constant_lr.learning_rate = learning_rate\n elif learning_rate_type == \"exponential_decay_learning_rate\":\n exponential_lr = (\n optimizer_config.learning_rate.exponential_decay_learning_rate)\n exponential_lr.initial_learning_rate = learning_rate\n elif learning_rate_type == \"manual_step_learning_rate\":\n manual_lr = optimizer_config.learning_rate.manual_step_learning_rate\n original_learning_rate = manual_lr.initial_learning_rate\n learning_rate_scaling = float(learning_rate) / original_learning_rate\n manual_lr.initial_learning_rate = learning_rate\n for schedule in manual_lr.schedule:\n schedule.learning_rate *= learning_rate_scaling\n elif learning_rate_type == \"cosine_decay_learning_rate\":\n cosine_lr = optimizer_config.learning_rate.cosine_decay_learning_rate\n learning_rate_base = cosine_lr.learning_rate_base\n warmup_learning_rate = cosine_lr.warmup_learning_rate\n warmup_scale_factor = warmup_learning_rate / learning_rate_base\n cosine_lr.learning_rate_base = learning_rate\n cosine_lr.warmup_learning_rate = warmup_scale_factor * learning_rate\n else:\n raise TypeError(\"Learning rate %s is not supported.\" % learning_rate_type)", "def set_learning_rate(self, rate):\n self.SGD.set_learning_rate(rate)", "def adjust_learning_rate_and_learning_taks(optimizer, epoch, args):\n if epoch >= args.step2: \n lr = args.lr * 0.01\n elif epoch >= args.step1:\n lr = args.lr * 0.1\n else:\n lr = args.lr\n \n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n\n # Return training classes\n return range(len(args.dataset))", "def adjust_learning_rate(optimizer, epoch):\n lr = args.lr * (0.95 ** epoch)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def update_learning_rate(self):\n self.scheduler.step()\n lr = self.optimizer.param_groups[0]['lr']\n print('learning rate = %.7f' % lr)", "def adjust_learning_rate_SR(optimizer, epoch):\r\n lr = opt.lr * (0.1 ** (epoch // opt.step))\r\n return lr", "def adjust_learning_rate(opts, optimizer, epoch):\n lr = opts.lr * (0.1 ** (epoch // 30))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr", "def adjust_learning_rate(optimizer, gamma, epoch, step_index, iteration, epoch_size):\n if epoch < 0:\n lr = 1e-6 + (args.lr - 1e-6) * iteration / (epoch_size * 5)\n else:\n lr = args.lr * (gamma ** (step_index))\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr\n return lr" ]
[ "0.75293964", "0.75141555", "0.75137126", "0.74792343", "0.7465126", "0.7460736", "0.7442363", "0.73354864", "0.73117834", "0.72968334", "0.72968334", "0.72960633", "0.7283488", "0.7278503", "0.72781247", "0.72749794", "0.72587585", "0.723886", "0.72382396", "0.7237294", "0.72355586", "0.7234524", "0.7234162", "0.72314745", "0.72299296", "0.7218353", "0.72182924", "0.72155476", "0.7214274", "0.7214049", "0.71954894", "0.7187057", "0.71806777", "0.7177044", "0.7171427", "0.71704125", "0.71700597", "0.7165537", "0.7164937", "0.7164579", "0.7158631", "0.7152555", "0.71514666", "0.7151211", "0.71502835", "0.71502835", "0.71502835", "0.7148319", "0.7147411", "0.7139659", "0.71339864", "0.71336937", "0.71289974", "0.71252257", "0.71244234", "0.71099484", "0.7105974", "0.710538", "0.71041936", "0.7103638", "0.71034807", "0.7097195", "0.70947254", "0.7094402", "0.70864403", "0.7084433", "0.7084343", "0.7081754", "0.7078355", "0.7078355", "0.7078355", "0.70747995", "0.7071378", "0.7063741", "0.70625716", "0.7062281", "0.7052927", "0.705019", "0.7043637", "0.7034485", "0.70329124", "0.7032815", "0.70311785", "0.70298856", "0.7020846", "0.7020548", "0.70204496", "0.7019793", "0.70126843", "0.6986417", "0.6972679", "0.6963047", "0.69551575", "0.69537705", "0.69514537", "0.69503635", "0.6944814", "0.6936648", "0.6932051", "0.69312763" ]
0.7062625
74
Create a message for an email.
def createMessageWithAttachment(sender, to, subject, msgHtml, msgPlain, attachmentFile): message = MIMEMultipart('mixed') message['to'] = to message['from'] = sender message['subject'] = subject messageA = MIMEMultipart('alternative') messageR = MIMEMultipart('related') messageR.attach(MIMEText(msgHtml, 'html')) messageA.attach(MIMEText(msgPlain, 'plain')) messageA.attach(messageR) message.attach(messageA) print("create_message_with_attachment: file: %s" % attachmentFile) content_type, encoding = mimetypes.guess_type(attachmentFile) if content_type is None or encoding is not None: content_type = 'application/octet-stream' main_type, sub_type = content_type.split('/', 1) if main_type == 'text': fp = open(attachmentFile, 'rb') msg = MIMEText(fp.read(), _subtype=sub_type) fp.close() elif main_type == 'image': fp = open(attachmentFile, 'rb') msg = MIMEImage(fp.read(), _subtype=sub_type) fp.close() elif main_type == 'audio': fp = open(attachmentFile, 'rb') msg = MIMEAudio(fp.read(), _subtype=sub_type) fp.close() else: fp = open(attachmentFile, 'rb') msg = MIMEBase(main_type, sub_type) msg.set_payload(fp.read()) fp.close() filename = os.path.basename(attachmentFile) msg.add_header('Content-Disposition', 'attachment', filename=filename) message.attach(msg) return {'raw': base64.urlsafe_b64encode(message.as_string())}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createMessage( self, *args, **kw ):\n return MailMessage( *args, **kw )", "def createMessage( self, *args, **kw ):\n if not kw.has_key('charset'):\n kw['charset'] = self.getOutputCharset()\n kw['to_mail'] = 1\n return MailServerBase.createMessage( self, *args, **kw )", "def _createEmail(self, address_to, message, emailSubject):\r\n\t\tfrom_email = Email(self.sender)\r\n\t\tto_email = To(address_to)\r\n\t\tsubject = emailSubject\r\n\t\tcontent = Content(\"text/plain\", message)\r\n\t\t#creates Mail object from sendgrid api\r\n\t\tmail = Mail(from_email, to_email, subject, content)\r\n\t\treturn mail", "def create_message(sender, to, subject, cc, message_text):\n\n email = \"\"\n \n for c in message_text:\n email += c\n \n message = MIMEText(email)\n message['to'] = to\n message['cc'] = cc\n message['from'] = sender\n message['subject'] = subject\n return {'raw': base64.urlsafe_b64encode(message.as_string())}", "def __create_message(sender, recipients, subject, message_text):\n message = {\n \"to\": recipients,\n \"from_email\": sender,\n \"subject\": subject,\n \"html\": message_text,\n }\n\n return message", "def _create_email(\n to_field=\"to_user@test1.com\",\n from_field=\"from_user@test2.com\",\n subject=\"This is a test email\",\n body=\"Almost empty text message\",\n attachment=None,\n maintype=None,\n subtype=None,\n):\n msg = EmailMessage()\n\n msg['To'] = to_field\n msg['From'] = from_field\n msg['Subject'] = subject\n msg.set_content(body)\n\n if attachment:\n with open(attachment, 'rb') as fp:\n attachment = fp.read()\n\n msg.add_attachment(attachment, maintype=maintype, subtype=subtype)\n\n email_message = email.message_from_bytes(\n msg.as_bytes(),\n policy=email.policy.default\n )\n\n return email_message", "def create_message(sender, to, subject, message_text):\r\n message = MIMEText(message_text)\r\n message['to'] = to\r\n message['from'] = sender\r\n message['subject'] = subject\r\n raw = base64.urlsafe_b64encode(message.as_bytes())\r\n raw = raw.decode()\r\n body = {'raw': raw}\r\n return body", "def createMessage( self, *args, **kw ):\n if not kw.has_key('charset'):\n kw['charset'] = self.getInputCharset()\n return MailServerBase.createMessage( self, *args, **kw )", "def make_message(self, mto, mbody=None, msubject=None, mtype=None,\n mhtml=None, mfrom=None, mnick=None):\n message = self.Message(sto=mto, stype=mtype, sfrom=mfrom)\n message['body'] = mbody\n message['subject'] = msubject\n if mnick is not None:\n message['nick'] = mnick\n if mhtml is not None:\n message['html']['body'] = mhtml\n return message", "def CreateMessage(sender, to, subject, message_text):\n message = MIMEText(message_text, 'html')\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode()}", "def create_message(sender, to, subject, message_text_html, message_text_plain):\r\n message = MIMEMultipart('alternative')\r\n message['to'] = to\r\n message['from'] = sender\r\n message['subject'] = subject\r\n message_html = MIMEText(message_text_html, 'html') # HTML version\r\n message_plain = MIMEText(message_text_plain) # plain text version\r\n message.attach(message_plain)\r\n message.attach(message_html)\r\n return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode()}", "def create_message(sender, to, subject, message_text):\n message = MIMEText(message_text)\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n raw = base64.urlsafe_b64encode(message.as_bytes())\n return {'raw':raw.decode()}", "def createMessage(self, sender: str, to: str, subject: str, message_text: str):\n message = MIMEText(message_text)\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n raw_message = {'raw': base64.urlsafe_b64encode(message.as_bytes())}\n raw_message['raw']=raw_message['raw'].decode('utf-8')\n return raw_message", "def get_message(self, **kwargs):\n message = Mail()\n if \"from_email\" in kwargs:\n sender = Email()\n message_content = kwargs.get(\"message_content\", \"\")\n sender.name = message_content.get(\"sender\", emailconf.DEFAULT_SENDER)\n sender.email = kwargs.get(\"from_email\", emailconf.DEFAULT_SENDER_EMAIL)\n message.from_email = sender\n if \"subject\" in kwargs:\n message.subject = kwargs.get(\"subject\", \"\")\n if \"text\" in kwargs:\n content = Content(\"text/plain\", kwargs.get(\"text\", \"\"))\n message.add_content(content)\n if \"html\" in kwargs:\n content = Content(\"text/html\", kwargs.get(\"html\", \"\"))\n message.add_content(content)\n if \"category\" in kwargs:\n category = Category(kwargs.get(\"category\", \"\"))\n message.add_category(category)\n\n personalization = self.create_personalization(**kwargs)\n if personalization:\n message.add_personalization(personalization)\n\n return message.get()", "def create_message(text: str, room: str, creating_user: str, receiving_user: str, channel: models.PrivateChannel):\n message = models.Message(\n text=text,\n creating_user=creating_user,\n receiving_user=receiving_user,\n channel_id=channel.id,\n room=room\n )\n db.session.add(message)\n db.session.commit()", "def create_message(self, subject, message, destination, origin = None):\n msg = MIMEText(message)\n if origin is not None:\n msg['From'] = origin\n else:\n msg['From'] = self.origin\n msg['Subject'] = subject\n msg['To'] = destination\n self.messages.append(msg)", "def create_email(sender, to, subject, message_text):\n # This is intended to strip non-ascii chars in message_text\n message_text = ''.join(filter(lambda x: x in printable, message_text))\n \n message = MIMEText(message_text)\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n return message.as_bytes()", "def _make_message(request, issue, message, comments=None, send_mail=False,\n draft=None, in_reply_to=None):\n attach_patch = request.POST.get(\"attach_patch\") == \"yes\"\n template, context = _get_mail_template(request, issue, full_diff=attach_patch)\n # Decide who should receive mail\n my_email = db.Email(request.user.email())\n to = ([db.Email(issue.owner.email())] +\n issue.reviewers +\n [db.Email(email) for email in issue.collaborator_emails()])\n cc = issue.cc[:]\n if django_settings.RIETVELD_INCOMING_MAIL_ADDRESS:\n cc.append(db.Email(django_settings.RIETVELD_INCOMING_MAIL_ADDRESS))\n reply_to = to + cc\n if my_email in to and len(to) > 1: # send_mail() wants a non-empty to list\n to.remove(my_email)\n if my_email in cc:\n cc.remove(my_email)\n issue_id = issue.key.id()\n subject = issue.mail_subject()\n patch = None\n if attach_patch:\n subject = 'PATCH: ' + subject\n if 'patch' in context:\n patch = context['patch']\n del context['patch']\n if issue.num_messages:\n subject = 'Re: ' + subject\n if comments:\n details = _get_draft_details(request, comments)\n else:\n details = ''\n message = message.replace('\\r\\n', '\\n')\n text = ((message.strip() + '\\n\\n' + details.strip())).strip()\n if draft is None:\n msg = models.Message(issue_key=issue.key,\n subject=subject,\n sender=my_email,\n recipients=reply_to,\n text=text,\n parent=issue.key,\n issue_was_closed=issue.closed)\n else:\n msg = draft\n msg.subject = subject\n msg.recipients = reply_to\n msg.text = text\n msg.draft = False\n msg.date = datetime.datetime.now()\n msg.issue_was_closed = issue.closed\n issue.calculate_updates_for(msg)\n\n if in_reply_to:\n try:\n replied_msg_id = int(in_reply_to)\n replied_msg = models.Message.get_by_id(replied_msg_id, parent=issue.key)\n msg.in_reply_to_key = replied_msg.key\n replied_issue_id = replied_msg.issue_key.id()\n if replied_issue_id != issue_id:\n logging.warn('In-reply-to Message is for a different issue: '\n '%s instead of %s', replied_issue_id, issue_id)\n msg.in_reply_to_key = None\n except (db.KindError, db.BadKeyError, ValueError):\n logging.warn('Invalid in-reply-to Message or key given: %s', in_reply_to)\n\n if send_mail:\n # Limit the list of files in the email to approximately 200\n if 'files' in context and len(context['files']) > 210:\n num_trimmed = len(context['files']) - 200\n del context['files'][200:]\n context['files'].append('[[ %d additional files ]]' % num_trimmed)\n url = request.build_absolute_uri(reverse(show, args=[issue.key.id()]))\n reviewer_nicknames = ', '.join(library.get_nickname(rev_temp, True,\n request)\n for rev_temp in issue.reviewers)\n cc_nicknames = ', '.join(library.get_nickname(cc_temp, True, request)\n for cc_temp in cc)\n my_nickname = library.get_nickname(request.user, True, request)\n reply_to = ', '.join(reply_to)\n description = (issue.description or '').replace('\\r\\n', '\\n')\n home = request.build_absolute_uri(reverse(index))\n modified_added_count, modified_removed_count = _get_modified_counts(issue)\n context.update({'reviewer_nicknames': reviewer_nicknames,\n 'cc_nicknames': cc_nicknames,\n 'my_nickname': my_nickname, 'url': url,\n 'message': message, 'details': details,\n 'description': description, 'home': home,\n 'added_lines' : modified_added_count,\n 'removed_lines': modified_removed_count,\n })\n for key, value in context.iteritems():\n if isinstance(value, str):\n try:\n encoding.force_unicode(value)\n except UnicodeDecodeError:\n logging.error('Key %s is not valid unicode. value: %r' % (key, value))\n # The content failed to be decoded as utf-8. Enforce it as ASCII.\n context[key] = value.decode('ascii', 'replace')\n body = django.template.loader.render_to_string(\n template, context, context_instance=RequestContext(request))\n logging.warn('Mail: to=%s; cc=%s', ', '.join(to), ', '.join(cc))\n send_args = {'sender': my_email,\n 'to': [_encode_safely(address) for address in to],\n 'subject': _encode_safely(subject),\n 'body': _encode_safely(body),\n 'reply_to': _encode_safely(reply_to)}\n if cc:\n send_args['cc'] = [_encode_safely(address) for address in cc]\n if patch:\n send_args['attachments'] = [('issue_%s_patch.diff' % issue.key.id(),\n patch)]\n\n attempts = 0\n while True:\n try:\n mail.send_mail(**send_args)\n break\n except mail.InvalidSenderError:\n if django_settings.RIETVELD_INCOMING_MAIL_ADDRESS:\n previous_sender = send_args['sender']\n if previous_sender not in send_args['to']:\n send_args['to'].append(previous_sender)\n send_args['sender'] = django_settings.RIETVELD_INCOMING_MAIL_ADDRESS\n else:\n raise\n except apiproxy_errors.DeadlineExceededError:\n # apiproxy_errors.DeadlineExceededError is raised when the\n # deadline of an API call is reached (e.g. for mail it's\n # something about 5 seconds). It's not the same as the lethal\n # runtime.DeadlineExeededError.\n attempts += 1\n if attempts >= 3:\n raise\n if attempts:\n logging.warning(\"Retried sending email %s times\", attempts)\n\n return msg", "def create_message(self, sender, to, subject, message_text):\n message = MIMEText(message_text, 'html')\n message['to'] = str(to)\n message['from'] = str(sender)\n message['subject'] = str(subject)\n encoded_message = base64.urlsafe_b64encode(\n message.as_string().encode('utf-8')).decode('ascii')\n print(encoded_message, 'this is encoded_message')\n return {'raw': encoded_message}", "def create_message(self, sender, to, subject, message_text):\n message = MIMEText(message_text, 'html')\n message['to'] = str(to)\n message['from'] = str(sender)\n message['subject'] = str(subject)\n encoded_message = base64.urlsafe_b64encode(\n message.as_string().encode('utf-8')).decode('ascii')\n print(encoded_message, 'this is encoded_message')\n return {'raw': encoded_message}", "def build_message():\n outgoing_mail = Mail()\n outgoing_mail.from_email = Email(email_from_address, email_from_name)\n outgoing_mail.subject = subject\n personalization = Personalization()\n for recipient in email_to_addresses:\n personalization.add_to(Email(recipient))\n outgoing_mail.add_personalization(personalization)\n outgoing_mail.add_content(Content(\"text/plain\", str.join('\\n', _log)))\n outgoing_mail.add_content(Content(\"text/html\", \"<html><body> {} </body></html>\".format(str.join(' <br /> ', _log))))\n return outgoing_mail.get()", "def test_notification_creation_email(self):\n mailhost = api.portal.get_tool('MailHost')\n self.assertEqual(len(mailhost.messages), 1)\n msg = message_from_string(mailhost.messages[0])\n\n self.assertEqual(msg['To'], BOARD_LIST_ADDRESS)\n self.assertEqual(\n msg['From'], 'EESTEC International <noreply@eestec.net>')\n self.assertEqual(\n msg['Subject'],\n '=?utf-8?q?=5BEVENTS=5D=5BCreated=5D_T=C3=A9st_event?=',\n )\n self.assertIn('a new Event has been created', msg.get_payload())\n self.assertIn('T=C3=A9st event', msg.get_payload())", "def create_message(author, text):\n\n message = Message(author=author, text=text)\n\n db.session.add(message)\n db.session.commit()\n\n return message", "def create_message(message_content, timestamp, username, channel, message_type):\n return Message.objects.create(\n message_content=message_content,\n datetime_start=timestamp_to_datetime(timestamp),\n username=username,\n typing=True,\n channel=channel,\n message_type=message_type\n )", "def create_message(sender, to, subject, message_text, image_directory=None):\n message = MIMEMultipart()\n message['to'] = to if isinstance(to, str) else ','.join(to)\n if sender:\n message['from'] = sender\n message['subject'] = subject\n\n msg = MIMEText(message_text, 'html')\n message.attach(msg)\n\n if image_directory:\n for image_name in os.listdir(image_directory):\n with open(os.path.join(image_directory, image_name), 'rb') as img_data:\n content_type, encoding = mimetypes.guess_type(image_name)\n\n if content_type is None or encoding is not None:\n content_type = 'application/octet-stream'\n main_type, sub_type = content_type.split('/', 1)\n if main_type == 'image':\n msg = MIMEImage(img_data.read(), _subtype=sub_type)\n else:\n msg = MIMEBase(main_type, sub_type)\n msg.set_payload(img_data.read())\n\n msg.add_header('Content-Id', '<' + image_name.split('.')[0] + '>') # angle brackets are important\n message.attach(msg)\n\n # https://www.pronoy.in/2016/10/20/python-3-5-x-base64-encoding-3/\n return {'raw': base64.urlsafe_b64encode(message.as_bytes()).decode('ascii')}", "def create_next_message(self, **kwargs):\n message = self._builder.create_message(**kwargs)\n return message", "def create_base_message(subject, msg=None):\n message = EmailMessage()\n message[\"From\"] = email_config.from_address\n message[\"To\"] = email_config.to_addresses\n message[\"Subject\"] = subject\n message.set_content(\"\\n\".join([\n 50 * \"_\",\n f\"Hostname: {gethostname()}\",\n f\"Time: {datetime.now().strftime(config.datetime_format)}\",\n f\"Log file: {LOGGER.log_file_path}\",\n 50 * \"_\",\n ]))\n # Add `msg` to the contents if it is not None\n if msg is not None:\n _append_content(message, msg)\n # Attach the log file if it is available\n if LOGGER.log_file_path is not None and config.email_attach_log_file:\n _attach_log_file(message)\n return message.as_string()", "def build_hello_email():\n from_email = Email(\"test@example.com\")\n subject = \"Hello World from the SendGrid Python Library\"\n to_email = Email(\"test@example.com\")\n content = Content(\"text/plain\", \"some text here\")\n mail = Mail(from_email, subject, to_email, content)\n mail.personalizations[0].add_to(Email(\"test2@example.com\"))\n\n return mail.get()", "def create_user_message(self, *args, **kwargs) -> UserMessage:\n \n raise NotImplementedError()", "def create(cls, course_id, sender, to_option, subject, html_message, text_message=None):\r\n # automatically generate the stripped version of the text from the HTML markup:\r\n if text_message is None:\r\n text_message = html_to_text(html_message)\r\n\r\n # perform some validation here:\r\n if to_option not in TO_OPTIONS:\r\n fmt = 'Course email being sent to unrecognized to_option: \"{to_option}\" for \"{course}\", subject \"{subject}\"'\r\n msg = fmt.format(to_option=to_option, course=course_id, subject=subject)\r\n raise ValueError(msg)\r\n\r\n # create the task, then save it immediately:\r\n course_email = cls(\r\n course_id=course_id,\r\n sender=sender,\r\n to_option=to_option,\r\n subject=subject,\r\n html_message=html_message,\r\n text_message=text_message,\r\n )\r\n course_email.save_now()\r\n\r\n return course_email", "def create_message(message_author, message_text):\n\n message = Message(message_author=message_author, message_text=message_text)\n\n db.session.add(message)\n db.session.commit()\n\n return message", "def cmd_conversation_create(client, args):\n create_message = client.create_message(args.recipient, args.body)\n generate_output({'create_message': create_message})", "def _message(self, recipient, connection, context=None):\n base_subject = '{{ event.calendar.course.name }} {{ event.title }}'\n if not self.event.get_documents(True):\n template_name = self.REQUEST_TEMPLATE\n subject = 'Got a {} study guide?'.format(base_subject)\n else:\n template_name = self.PUBLISH_TEMPLATE\n subject = '{} study guide'.format(base_subject)\n\n subject = Template(subject).render(context)\n body = get_template(template_name).render(context)\n\n return make_email_message(subject, body,\n make_display_email(\n self.sender_address,\n self.sender_name),\n recipient, connection)", "def submit_message_from_message(message,body,by=None):\n if not by:\n by = Person.objects.get(name=\"(System)\")\n msg = Message.objects.create(\n by = by,\n subject = message.get('subject',''),\n frm = message.get('from',''),\n to = message.get('to',''),\n cc = message.get('cc',''),\n bcc = message.get('bcc',''),\n reply_to = message.get('reply_to',''),\n body = body,\n time = utc_from_string(message.get('date', ''))\n )\n return msg", "def new_email(self, context, payload):\n\n access_token = util.get_access_token(context['headers'])\n url = util.get_url(context) + f\"messages/{payload['id']}\"\n response = util.rest(\"GET\", url, access_token)\n\n if response.status_code > 400:\n raise Exception(\"Error \", response.text)\n\n email_obj = json.loads(response.text)\n\n return GmailApi.get_email_data(email_obj)", "def message_new(self, cr, uid, msg, custom_values=None, context=None):\n if custom_values is None: custom_values = {}\n\n desc = html2plaintext(msg.get('body')) if msg.get('body') else ''\n custom_values.update({\n 'name': msg.get('subject') or _(\"No Subject\"),\n 'description': desc,\n 'email_from': msg.get('from'),\n 'email_cc': msg.get('cc'),\n 'user_id': False,\n })\n if msg.get('priority') in dict(crm.AVAILABLE_PRIORITIES):\n custom_values['priority'] = msg.get('priority')\n return super(crm_lead, self).message_new(cr, uid, msg, custom_values=custom_values, context=context)", "def get_message(self, email):\n\n message = MIMEText(self.message, 'html')\n\n message['Subject'] = self.subject\n message['From'] = self.from_\n message['To'] = email\n\n return message", "def create_group_message(group_name,subject,message,status,createdby):\n query=\"INSERT INTO groupmails(group_name,subject,message,status,createdby)VALUES('{}','{}','{}','{}','{}')\".format(\n group_name,subject,message,status,createdby\n )\n cur.execute(query)", "def create_message(message):\n return {\n \"id\": message.id,\n \"from\": message.sender,\n \"preview\": create_preview(message),\n \"subject\": message.subject,\n \"date\": message.date_created,\n }", "def compose(self, msg, recipient):\n email = Email(msg, self, recipient)\n self.mailman.send(email)", "def create_user_message(user, message):\n user_message = UserMessage(user=user, message=message)\n\n db.session.add(user_message)\n db.session.commit()\n\n return user_message", "def build_email(self, email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,\n attachments=None, message_id=None, references=None, object_id=False, subtype='plain', headers=None,\n body_alternative=None, subtype_alternative='plain'):\n email_from = email_from or self._get_default_from_address()\n assert email_from, \"You must either provide a sender address explicitly or configure \"\\\n \"using the combintion of `mail.catchall.domain` and `mail.default.from` \"\\\n \"ICPs, in the server configuration file or with the \"\\\n \"--email-from startup parameter.\"\n\n # Note: we must force all strings to to 8-bit utf-8 when crafting message,\n # or use encode_header() for headers, which does it automatically.\n\n headers = headers or {} # need valid dict later\n email_cc = email_cc or []\n email_bcc = email_bcc or []\n body = body or u''\n\n email_body = ustr(body)\n email_text_part = MIMEText(email_body, _subtype=subtype, _charset='utf-8')\n msg = MIMEMultipart()\n\n if not message_id:\n if object_id:\n message_id = tools.generate_tracking_message_id(object_id)\n else:\n message_id = make_msgid()\n msg['Message-Id'] = encode_header(message_id)\n if references:\n msg['references'] = encode_header(references)\n msg['Subject'] = encode_header(subject)\n msg['From'] = encode_rfc2822_address_header(email_from)\n del msg['Reply-To']\n if reply_to:\n msg['Reply-To'] = encode_rfc2822_address_header(email_from) #self.smtp_user\n else:\n msg['Reply-To'] = msg['From']\n msg['To'] = encode_rfc2822_address_header(COMMASPACE.join(email_to))\n if email_cc:\n msg['Cc'] = encode_rfc2822_address_header(COMMASPACE.join(email_cc))\n if email_bcc:\n msg['Bcc'] = encode_rfc2822_address_header(COMMASPACE.join(email_bcc))\n msg['Date'] = formatdate()\n # Custom headers may override normal headers or provide additional ones\n for key, value in headers.items():\n msg[pycompat.to_text(ustr(key))] = encode_header(value)\n\n if subtype == 'html' and not body_alternative:\n # Always provide alternative text body ourselves if possible.\n text = html2text.html2text(email_body)\n alternative_part = MIMEMultipart(_subtype=\"alternative\")\n alternative_part.attach(MIMEText(text, _charset='utf-8', _subtype='plain'))\n alternative_part.attach(email_text_part)\n msg.attach(alternative_part)\n elif body_alternative:\n # Include both alternatives, as specified, within a multipart/alternative part\n alternative_part = MIMEMultipart(_subtype=\"alternative\")\n body_alternative_ = ustr(body_alternative)\n alternative_body_part = MIMEText(body_alternative_, _subtype=subtype_alternative, _charset='utf-8')\n alternative_part.attach(alternative_body_part)\n alternative_part.attach(email_text_part)\n msg.attach(alternative_part)\n else:\n msg.attach(email_text_part)\n\n if attachments:\n for (fname, fcontent, mime) in attachments:\n filename_rfc2047 = encode_header_param(fname)\n if mime and '/' in mime:\n maintype, subtype = mime.split('/', 1)\n part = MIMEBase(maintype, subtype)\n else:\n part = MIMEBase('application', \"octet-stream\")\n\n # The default RFC2231 encoding of Message.add_header() works in Thunderbird but not GMail\n # so we fix it by using RFC2047 encoding for the filename instead.\n part.set_param('name', filename_rfc2047)\n part.add_header('Content-Disposition', 'attachment', filename=filename_rfc2047)\n\n part.set_payload(fcontent)\n encoders.encode_base64(part)\n msg.attach(part)\n return msg", "def CreateMessageWithAttachment(sender, to, subject, message_text, file_dir,\n filename):\n message = MIMEMultipart()\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n\n msg = MIMEText(message_text)\n message.attach(msg)\n\n path = os.path.join(file_dir, filename)\n content_type, encoding = mimetypes.guess_type(path)\n\n main_type, sub_type = content_type.split('/', 1)\n fp = open(path, 'rb')\n msg = MIMEBase(main_type, sub_type)\n msg.set_payload(fp.read())\n\n msg.add_header('Content-Disposition', 'attachment', filename=filename)\n encoders.encode_base64(msg)\n\n fp.close()\n\n message.attach(msg)\n\n return {'raw': base64.urlsafe_b64encode(bytes(message.as_string(), encoding='utf-8')).decode()}", "def _construct_message(self):\n self.message[\"text\"] = \"\"\n if self.from_:\n self.message[\"text\"] += \"From: \" + self.from_ + \"\\n\"\n if self.subject:\n self.message[\"text\"] += \"Subject: \" + self.subject + \"\\n\"\n\n self.message[\"text\"] += self.body\n self._add_attachments()", "def generate_withno_attachement(sender, recipient, subject, body):\n # Basic Email formatting\n message = email.message.EmailMessage()\n message[\"From\"] = sender\n message[\"To\"] = recipient\n message[\"Subject\"] = subject\n message.set_content(body)\n return message", "def send_created_email(self):\n if settings.NOTIFY_NEW_REG:\n to = settings.NOTIFY_NEW_REG\n message = \"\"\"\\\nGreetings,<br><br>\n\nA new vehicle registration has been submitted by %s.<br><br>\n\nGo here to view or edit the request: <br>\n<a href=\"%s\">%s</a>\n<br><br>\nSincerely,<br><br>\nThe Janelia Parking Permit Program\n \"\"\" % (self.user_display_name(), self.get_edit_url(True), self.get_edit_url(True))\n subject = 'A new parking permit request has been entered'\n from_email = 'parkingpermit-donotreply@janelia.hhmi.org'\n text_content = re.sub(r'<[^>]+>','',message)\n html_content = message\n msg = EmailMultiAlternatives(subject, text_content, from_email, to)\n msg.attach_alternative(html_content, \"text/html\")\n msg.send()", "def format_message(to_email, to_name):\r\n\r\n message = MIMEMultipart(\"alternative\")\r\n message[\"From\"] = my_email\r\n message[\"To\"] = to_email\r\n\r\n # This message is formatted in HTML since it's the only way to embed links\r\n with open('message.html', 'r') as f:\r\n message[\"Subject\"] = f.readline().split('Subject: ', 1)[-1]\r\n raw = f.read() % to_name\r\n content = MIMEText(raw, \"html\")\r\n message.attach(content)\r\n return message", "def action_create_mail_messages(self):\n self.check_recipients()\n self.check_message()\n messages = self.env['mail.message']\n for recipient in self.recipient_ids:\n messages |= recipient._create_mail_message()\n return messages", "def get_email_message(self, subject, body, sender, recipients, attachment=None): # pylint: disable=too-many-arguments\n msg = {}\n source = {}\n data = {}\n\n data[\"body\"] = body\n data[\"from\"] = sender\n data[\"subject\"] = subject\n data[\"type\"] = \"cla-email-event\"\n if isinstance(recipients, str):\n data[\"recipients\"] = [recipients]\n else:\n data[\"recipients\"] = recipients\n # Added MailChip/Mandrill support by setting the template and adding\n # email body to the parameters list under the BODY attribute\n data[\"template_name\"] = \"EasyCLA System Email Template\"\n data[\"parameters\"] = {\n \"BODY\": body\n }\n\n msg[\"data\"] = data\n\n source[\"client_id\"] = \"easycla-service\"\n source[\"description\"] = \"EasyCLA Service\"\n source[\"name\"] = \"EasyCLA Service\"\n msg[\"source_id\"] = source\n\n msg[\"id\"] = str(uuid.uuid4())\n msg[\"type\"] = \"cla-email-event\"\n msg[\"version\"] = \"0.1.0\"\n json_string = json.dumps(msg)\n # cla.log.debug(f'Email JSON: {json_string}')\n return json_string", "def rpc_campaign_message_new(self, campaign_id, email_id, target_email, company_name, first_name, last_name):\n\t\tsession = db_manager.Session()\n\t\tmessage = db_models.Message()\n\t\tmessage.id = email_id\n\t\tmessage.campaign_id = campaign_id\n\t\tmessage.target_email = target_email\n\t\tmessage.company_name = company_name\n\t\tmessage.first_name = first_name\n\t\tmessage.last_name = last_name\n\t\tsession.add(message)\n\t\tsession.commit()\n\t\tsession.close()\n\t\treturn", "def makeMessage( name, *structure ):\n return X12Message( name, *structure )", "def create_user_message(user_id, message_id):\n user_message = UserMessage(user_id=user_id, message_id=message_id)\n\n db.session.add(user_message)\n db.session.commit()\n\n return user_message", "def emailMessage(subjectTitle, recipientEmail, bodyMessage, attachmentName = None, attachmentFilePath = None):\n msg = Message(\n subjectTitle,\n sender = os.getenv(tag+'email',base_config['email']) \n )\n for email in recipientEmail: \n msg.add_recipient(email)\n\n msg.body = bodyMessage\n\n if attachmentName is not None and attachmentFilePath is not None:\n with app.open_resource(attachmentFilePath) as fp:\n msg.attach(attachmentName, \"text/plain\", fp.read())\n\n mail.send(msg)", "def build_message(self, recipient_email):\n\t\tmessage = self.queue_doc.message\n\t\tif not message:\n\t\t\treturn \"\"\n\n\t\tmessage = message.replace(\n\t\t\tself.message_placeholder(\"tracker\"), self.get_tracker_str(recipient_email)\n\t\t)\n\t\tmessage = message.replace(\n\t\t\tself.message_placeholder(\"unsubscribe_url\"), self.get_unsubscribe_str(recipient_email)\n\t\t)\n\t\tmessage = message.replace(self.message_placeholder(\"cc\"), self.get_receivers_str())\n\t\tmessage = message.replace(\n\t\t\tself.message_placeholder(\"recipient\"), self.get_recipient_str(recipient_email)\n\t\t)\n\t\tmessage = self.include_attachments(message)\n\t\treturn message", "def create_chat_message(self, author, message):\n\n return ChatMessage.objects.create(conversation=self.conversation,\n author=author,\n message=message)", "def get_sample_message(self):\n message = MailgunMessage(self.bob, self.alice, \"Test Subject\", body_text=\"this is a test\")\n message.add_to(self.frank)\n message.add_to(self.bob)\n message.add_cc(self.frank)\n message.add_cc(self.alice)\n message.add_cc(self.bob)\n message.add_bcc(self.bob)\n message.add_bcc(self.alice)\n message.add_bcc(self.frank)\n return message", "def _create_message(self, data, channel):\n if data['type'] != 'message':\n logging.info(\"Skipping message of type `%s'.\", data['type'])\n return\n\n logging.debug('Message data: %s', json.dumps(data))\n\n try:\n user = self.q(o.User).\\\n filter(o.User.slackid == data['user']).one()\n except KeyError:\n user = self.q(o.User).\\\n filter(o.User.slackid == data['comment']['user']).one()\n\n if not data['text'].strip():\n logging.info(\"Skipping message from `%s' since it's empty\",\n user.name)\n return\n\n message = o.Message(data)\n message.channel = channel\n message.user = user\n\n if data.get('is_starred'):\n message.is_starred = True\n\n if 'reactions' in data:\n for reaction_data in data['reactions']:\n message.reactions.append(o.Reaction(reaction_data))\n\n if data.get('subtype') == 'file_share':\n self._file_data(message, data['file'], data['file']['is_external'])\n elif data.get('subtype') == 'pinned_item':\n if data.get('attachments'):\n self._att_data(message, data['attachments'])\n elif data.get('item'):\n self._file_data(message, data['item'],\n data['item']['is_external'])\n elif data.get('attachments'):\n self._att_data(message, data['attachments'])\n\n self.session.add(message)", "def prepare_message(self, body, priority=None, content_type=None,\n content_encoding=None, headers=None, properties=None):\n return amqp.Message(body, priority=priority,\n content_type=content_type,\n content_encoding=content_encoding,\n application_headers=headers,\n **properties)", "def CreateMessageWithAttachment(\n sender, to, subject, message_text, file_dir, filename):\n message = MIMEMultipart()\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n\n msg = MIMEText(message_text)\n message.attach(msg)\n\n path = os.path.join(file_dir, filename)\n content_type, encoding = mimetypes.guess_type(path)\n\n if content_type is None or encoding is not None:\n content_type = 'application/octet-stream'\n main_type, sub_type = content_type.split('/', 1)\n if main_type == 'text':\n fp = open(path, 'rb')\n msg = MIMEText(fp.read(), _subtype=sub_type)\n fp.close()\n elif main_type == 'image':\n fp = open(path, 'rb')\n msg = MIMEImage(fp.read(), _subtype=sub_type)\n fp.close()\n elif main_type == 'audio':\n fp = open(path, 'rb')\n msg = MIMEAudio(fp.read(), _subtype=sub_type)\n fp.close()\n else:\n fp = open(path, 'rb')\n msg = MIMEBase(main_type, sub_type)\n msg.set_payload(fp.read())\n fp.close()\n\n msg.add_header('Content-Disposition', 'attachment', filename=filename)\n message.attach(msg)\n\n return {'raw': base64.urlsafe_b64encode(message.as_string())}", "def create_message(self, ticket_number, author, body, note=True):\n data = {\n 'author': author,\n 'body': body,\n 'note': note\n }\n url = ('https://api.groovehq.com/v1/tickets/{}/messages'\n .format(ticket_number))\n resp = self._session.post(url, json=data)\n\n result = resp.json()\n new_url = ret['message']['href']\n nums = re.findall(r'\\d+', new_url)\n\n if len(nums) > 0:\n return nums[-1]", "def construct_message(self):\n msg_type = self.msg_type\n if msg_type == \"PUBMSG\":\n msg_type = \"PRIVMSG\"\n ret = \"{} {}\".format(msg_type, self.target)\n if self.content:\n ret += \" :{}\".format(self.content)\n return ret + \"\\r\\n\"", "def new_message(self, body=''):\r\n m = self.message_class(self, body)\r\n m.queue = self\r\n return m", "def draft_message(self, text=None, template_path=None, template_args=None):\n self.message['From'] = self.sender\n self.message['To'] = '; '.join(self.destinations)\n self.message['BCC'] = '; '.join(self.bcc)\n self.message['CC'] = '; '.join(self.cc)\n self.message['Subject'] = self.subject\n\n # check if email template is used\n if template_path:\n text = self.body_template(template_path)\n text = text.format(**template_args)\n\n # attach text part of message\n self.message.attach(MIMEText(text))\n\n # return self to encourage method chaining\n return self", "def send(self):\n # Validate message\n if self._text_body is None and self._html_body is None:\n raise Exception(\"Error! Must specify at least one body type (HTML or Text)\")\n if len(self._to) == 0:\n raise Exception(\"Must specify at least one recipient\")\n\n # Create the message part\n if self._text_body is not None and self._html_body is None:\n msg = MIMEText(self._text_body, \"plain\")\n elif self._text_body is None and self._html_body is not None:\n msg = MIMEText(self._html_body, \"html\")\n else:\n msg = MIMEMultipart(\"alternative\")\n msg.attach(MIMEText(self._text_body, \"plain\"))\n msg.attach(MIMEText(self._html_body, \"html\"))\n # Add attachments, if any\n if len(self._attach) != 0:\n tmpmsg = msg\n msg = MIMEMultipart()\n msg.attach(tmpmsg)\n for fname,attachname in self._attach:\n if not os.path.exists(fname):\n print \"File '%s' does not exist. Not attaching to email.\" % fname\n continue\n if not os.path.isfile(fname):\n print \"Attachment '%s' is not a file. Not attaching to email.\" % fname\n continue\n # Guess at encoding type\n ctype, encoding = mimetypes.guess_type(fname)\n if ctype is None or encoding is not None:\n # No guess could be made so use a binary type.\n ctype = 'application/octet-stream'\n maintype, subtype = ctype.split('/', 1)\n if maintype == 'text':\n fp = open(fname)\n attach = MIMEText(fp.read(), _subtype=subtype)\n fp.close()\n elif maintype == 'image':\n fp = open(fname, 'rb')\n attach = MIMEImage(fp.read(), _subtype=subtype)\n fp.close()\n elif maintype == 'audio':\n fp = open(fname, 'rb')\n attach = MIMEAudio(fp.read(), _subtype=subtype)\n fp.close()\n else:\n fp = open(fname, 'rb')\n attach = MIMEBase(maintype, subtype)\n attach.set_payload(fp.read())\n fp.close()\n # Encode the payload using Base64\n encoders.encode_base64(attach)\n # Set the filename parameter\n if attachname is None:\n filename = os.path.basename(fname)\n else:\n filename = attachname\n attach.add_header('Content-Disposition', 'attachment', filename=filename)\n msg.attach(attach)\n # Some header stuff\n msg['Subject'] = self._subject\n msg['From'] = self._from\n msg['To'] = \", \".join(self._to)\n if self._reply_to:\n msg['Reply-To'] = self._reply_to\n if len(self._cc) > 0:\n msg['Cc'] = \", \".join(self._cc)\n if len(self._bcc) > 0:\n msg['Bcc'] = \", \".join(self._bcc)\n msg.preamble = \"You need a MIME enabled mail reader to see this message\"\n # Send message\n msg = msg.as_string()\n server = None\n if self._smtp_ssl:\n server = smtplib.SMTP_SSL(host=self._smtp_server,\n port=self._smtp_port)\n else:\n server = smtplib.SMTP(host=self._smtp_server, port=self._smtp_port)\n if self._smtp_user:\n server.login(self._smtp_user, self._smtp_password)\n server.sendmail(self._from, self._to, msg)\n server.quit()", "def _create_message(self, msg):\n head = msg[\"head\"]\n body = msg[\"body\"]\n body = body.format(**self.data)\n length = len(body)\n head = head.format(length=length, **self.data)\n return head + body", "def create_email(username, provider):\n print(f\"Your new email is {username}@{provider}.com\")", "def create_multipart_message(\n sender: str, recipients: list, title: str, text: str = None, html_text: str = None, attachments: list = None) \\\n -> MIMEMultipart:\n multipart_content_subtype = 'alternative' if text and html_text else 'mixed'\n msg = MIMEMultipart(multipart_content_subtype)\n msg['Subject'] = title\n msg['From'] = sender\n msg['To'] = ', '.join(recipients)\n\n # Record the MIME types of both parts - text/plain and text/html.\n # According to RFC 2046, the last part of a multipart message, in this case the HTML message, is best and preferred.\n if text:\n part = MIMEText(text, 'plain')\n msg.attach(part)\n if html_text:\n part = MIMEText(html_text, 'html')\n msg.attach(part)\n\n # Add attachments\n for attachment in attachments or []:\n with open(attachment, 'rb') as f:\n part = MIMEApplication(f.read())\n part.add_header('Content-Disposition', 'attachment', filename=os.path.basename(attachment))\n msg.attach(part)\n\n return msg", "def send_email(self, message):\n pass", "def createMessageWithAttachment(\r\n sender, to, subject, msgHtml, msgPlain, attachmentFile):\r\n message = MIMEMultipart('mixed')\r\n message['to'] = to\r\n message['from'] = sender\r\n message['subject'] = subject\r\n\r\n messageA = MIMEMultipart('alternative')\r\n messageR = MIMEMultipart('related')\r\n\r\n messageR.attach(MIMEText(msgHtml, 'html'))\r\n messageA.attach(MIMEText(msgPlain, 'plain'))\r\n messageA.attach(messageR)\r\n\r\n message.attach(messageA)\r\n\r\n print(\"create_message_with_attachment: file: %s\" % attachmentFile)\r\n content_type, encoding = mimetypes.guess_type(attachmentFile)\r\n\r\n if content_type is None or encoding is not None:\r\n content_type = 'application/octet-stream'\r\n main_type, sub_type = content_type.split('/', 1)\r\n if main_type == 'text':\r\n fp = open(attachmentFile, 'rb')\r\n msg = MIMEText(fp.read(), _subtype=sub_type)\r\n fp.close()\r\n elif main_type == 'image':\r\n fp = open(attachmentFile, 'rb')\r\n msg = MIMEImage(fp.read(), _subtype=sub_type)\r\n fp.close()\r\n elif main_type == 'audio':\r\n fp = open(attachmentFile, 'rb')\r\n msg = MIMEAudio(fp.read(), _subtype=sub_type)\r\n fp.close()\r\n else:\r\n fp = open(attachmentFile, 'rb')\r\n msg = MIMEBase(main_type, sub_type)\r\n msg.set_payload(fp.read())\r\n fp.close()\r\n filename = os.path.basename(attachmentFile)\r\n msg.add_header('Content-Disposition', 'attachment', filename=filename)\r\n message.attach(msg)\r\n\r\n return {'raw': base64.urlsafe_b64encode((message.as_string()).encode('UTF-8')).decode('ascii')}", "def replyMessage(_email, _name):\n\n _mailer = app.config['MAIL_USERNAME']\n mesg = Message(\"Message Received\", sender=('iSOLveIT Contact', f'{_mailer}'), recipients=[_email])\n mesg.body = f'''Hello {_name},\nThe message you sent to Randy has been received. \nRandy will contact you within 24 hours.\nThank you.\n\nRegards,\nRandy\n\nDate Sent: {dt.now(tz=GMT_tz).strftime('%B %d, %Y, %H:%M ') + 'GMT'}\n'''\n mail.send(mesg)\n return 'OK'", "def create_message(self,message_information):\n\n #create the message\n if message_information['service'] == 'whatsapp':\n #The details of the message sent is saved here\n message = self.client.messages.create(\n body='Your order of {food} from the {store_name} store is ready. Additional Information: {addit_info}'.format(**message_information),\n from_='whatsapp:{sender_number}'.format(**self.config),\n to='whatsapp:{mobile}'.format(**message_information)\n )\n #Set the ready state to waiting for collection\n #self.firebase.db.child('orders').child('{store_name}'.format(**message_information)).child('{order_id}'.format(**message_information)).update({\"ready\":\"waiting for collection\"})\n self.firebase.update([\"orders\", STORE_NAME, message_information[\"order_id\"]], {\"ready\": \"waiting for collection\"})", "def _copy_message(self, message):\r\n gmsg = aeemail.EmailMessage(sender=message.from_email,\r\n to=message.to,\r\n subject=message.subject,\r\n body=message.body)\r\n if message.extra_headers.get('Reply-To', None):\r\n gmsg.reply_to = message.extra_headers['Reply-To']\r\n if message.cc:\r\n gmsg.cc = list(message.cc)\r\n if message.bcc:\r\n gmsg.bcc = list(message.bcc)\r\n if message.attachments:\r\n # Must be populated with (filename, filecontents) tuples.\r\n attachments = []\r\n for attachment in message.attachments:\r\n if isinstance(attachment, MIMEBase):\r\n attachments.append((attachment.get_filename(),\r\n attachment.get_payload(decode=True)))\r\n else:\r\n attachments.append((attachment[0], attachment[1]))\r\n gmsg.attachments = attachments\r\n # Look for HTML alternative content.\r\n if isinstance(message, EmailMultiAlternatives):\r\n for content, mimetype in message.alternatives:\r\n if mimetype == 'text/html':\r\n gmsg.html = content\r\n break\r\n return gmsg", "def create_email(_from, _to, _subj, _body, files):\r\n msg = MIMEMultipart()\r\n msg['From'] = _from\r\n msg['To'] = _to\r\n msg['Subject'] = _subj\r\n msg.attach(MIMEText(_body, 'plain'))\r\n\r\n if files:\r\n for file in files:\r\n part = MIMEBase('application', 'octet-stream')\r\n part.set_payload(open(file, 'rb').read())\r\n encoders.encode_base64(part)\r\n part.add_header('Content-Disposition', 'attachment; filename=' + os.path.basename(file))\r\n msg.attach(part)\r\n\r\n return msg", "def sendEmail(_name, _email, _body):\n\n _mailer = app.config['MAIL_USERNAME']\n msg = Message(\"Contact Form\", sender=('iSOLveIT Contact', f'{_mailer}'), recipients=[f'{_mailer}'])\n msg.body = f'''{_body}\n\n\nSender's Name: {_name}\nSender's Email: {_email}\nDate Sent: {dt.now(tz=GMT_tz).strftime('%B %d, %Y, %H:%M ') + 'GMT'}\n'''\n mail.send(msg)\n return 'OK'", "def make_email(to_list=None, cc_list=None, bcc_list=None, from_address=None, reply_address=None, attachments=None,\n subject=None, body=None, template=None, html=False):\n if template is not None and (subject is not None or body is not None):\n raise ValueError('Only subject/body or template can be passed')\n\n if template:\n subject = template.get_subject()\n body = template.get_body()\n if config.DEBUG and '\\n' in subject:\n raise ValueError('Email subject contains linebreaks')\n subject = re.sub(r'\\s+', ' ', subject)\n if to_list is None:\n to_list = set()\n if cc_list is None:\n cc_list = set()\n if bcc_list is None:\n bcc_list = set()\n to_list = {to_list} if isinstance(to_list, str) else to_list\n cc_list = {cc_list} if isinstance(cc_list, str) else cc_list\n bcc_list = {bcc_list} if isinstance(bcc_list, str) else bcc_list\n reply_address = {reply_address} if (isinstance(reply_address, str) and reply_address) else (reply_address or set())\n return {\n 'to': set(to_list),\n 'cc': set(cc_list),\n 'bcc': set(bcc_list),\n 'from': from_address or config.NO_REPLY_EMAIL,\n 'reply_to': set(reply_address),\n 'attachments': attachments or [],\n 'subject': subject.strip(),\n 'body': body.strip(),\n 'html': html,\n }", "def create_sent_email(self, *args, **kwargs):\n receiver = kwargs['receiver']\n sender = kwargs['sender']\n user = kwargs['user']\n body = kwargs['body']\n subject = kwargs['subject']\n if receiver and sender and subject and body:\n sent_email = SentEmail()\n sent_email.receiver = receiver\n sent_email.subject = subject\n sent_email.sender = sender\n sent_email.status = 'sent'\n sent_email.user = user\n sent_email.body = body\n sent_email.save()\n return True\n else:\n return False", "def create(\n cls,\n recipient_id: str,\n recipient_email: str,\n sender_id: str,\n sender_email: str,\n intent: str,\n subject: str,\n html_body: str,\n sent_datetime: datetime.datetime\n ) -> None:\n instance_id = cls._generate_id(intent)\n email_model_instance = cls(\n id=instance_id, recipient_id=recipient_id,\n recipient_email=recipient_email, sender_id=sender_id,\n sender_email=sender_email, intent=intent, subject=subject,\n html_body=html_body, sent_datetime=sent_datetime)\n\n email_model_instance.update_timestamps()\n email_model_instance.put()", "def make_and_mail(receiver, subject, body):\n emailbody = MIMEText(body, 'html')\n msg = MIMEMultipart('alternative')\n msg['From'] = SENDER\n msg['To'] = receiver\n msg['Subject'] = subject\n msg.attach(emailbody)\n #pylint: disable=invalid-name\n try:\n s = smtplib.SMTP('smtp.gmail.com', 587)\n s.starttls()\n s.login(YOUPAPER_EMAIL, YOUPAPER_PASSWORD)\n s.sendmail(SENDER, receiver, msg.as_string())\n s.quit()\n return 1 #returns 1 if success\n except smtplib.SMTPException:\n return 0 #returns 0 if failure", "def post(self, request):\n # get message from request\n message = request.POST.get('message')\n # create message\n\n ChatMessage.objects.create(\n message=message, _from=request.user, _to=request.user.client.trainer.user)\n # return response\n return HttpResponse('success')", "def send_feedback_message_email(recipient_id, feedback_messages):\n email_subject_template = (\n 'You\\'ve received %s new message%s on your explorations')\n\n email_body_template = (\n 'Hi %s,<br>'\n '<br>'\n 'You\\'ve received %s new message%s on your Oppia explorations:<br>'\n '<ul>%s</ul>'\n 'You can view and reply to your messages from your '\n '<a href=\"https://www.oppia.org/creator_dashboard\">dashboard</a>.'\n '<br>'\n '<br>Thanks, and happy teaching!<br>'\n '<br>'\n 'Best wishes,<br>'\n 'The Oppia Team<br>'\n '<br>%s')\n\n if not feconf.CAN_SEND_EMAILS:\n log_new_error('This app cannot send emails to users.')\n return\n\n if not feconf.CAN_SEND_FEEDBACK_MESSAGE_EMAILS:\n log_new_error('This app cannot send feedback message emails to users.')\n return\n\n if not feedback_messages:\n return\n\n recipient_user_settings = user_services.get_user_settings(recipient_id)\n\n messages_html = ''\n count_messages = 0\n for exp_id, reference in feedback_messages.iteritems():\n messages_html += (\n '<li><a href=\"https://www.oppia.org/create/%s#/feedback\">'\n '%s</a>:<br><ul>' % (exp_id, reference['title']))\n for message in reference['messages']:\n messages_html += ('<li>%s<br></li>' % message)\n count_messages += 1\n messages_html += '</ul></li>'\n\n email_subject = email_subject_template % (\n (count_messages, 's') if count_messages > 1 else ('a', ''))\n\n email_body = email_body_template % (\n recipient_user_settings.username, count_messages if count_messages > 1\n else 'a', 's' if count_messages > 1 else '', messages_html,\n EMAIL_FOOTER.value)\n\n _send_email(\n recipient_id, feconf.SYSTEM_COMMITTER_ID,\n feconf.EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION,\n email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)", "def send_email(to, subject, body, attachment=None):\n outlook = win32.Dispatch('outlook.application')\n new_mail = outlook.CreateItem(0)\n new_mail.Subject = subject\n new_mail.HTMLBody = body\n new_mail.To = to\n\n if attachment:\n new_mail.Attachments.Add(attachment)\n\n new_mail.Send()", "def create_and_deliver(self, **kwargs: Any) -> Awaitable[None]:\n\n msg = self.create_message(**kwargs)\n return self.deliver(msg)", "def new_message(self,\n intent=\"\",\n message=\"\",\n type=None,\n not_handled=False,\n time_stamp=None):\n self.messages.append(Message(api_key=self.api_key,\n platform=self.platform,\n version=self.version,\n user_id=self.user_id,\n intent=intent,\n message=message,\n type=type,\n not_handled=not_handled,\n time_stamp=time_stamp))\n return self.messages[-1]", "def _create_msg(self, tr_id, payload, confirm, expire_time, encoding):\n tmp = [\"<SSAP_message><transaction_type>INSERT</transaction_type>\",\n \"<message_type>REQUEST</message_type>\"]\n tmp.extend([\"<transaction_id>\", str(tr_id), \"</transaction_id>\"])\n tmp.extend([\"<node_id>\", str(self.node_id), \"</node_id>\"])\n tmp.extend([\"<space_id>\", str(self.targetSS), \"</space_id>\"])\n tmp.extend(['<parameter name=\"insert_graph\" encoding=\"%s\">' % encoding.upper(),\n str(payload), \"</parameter>\"])\n tmp.extend(['<parameter name = \"confirm\">',\n str(confirm).upper(),\n \"</parameter>\",\n \"</SSAP_message>\"])\n return \"\".join(tmp)", "def test_post_success_creates_message(self):\n sender, recipient = UserFactory(), UserFactory()\n\n data = {\n 'senderId': sender.id,\n 'recipientId': recipient.id,\n 'text': 'Hello World!',\n }\n\n response = self.client.post(\n reverse('messages:list'),\n content_type='application/json',\n data=data,\n )\n actual_message = Message.objects.get()\n self.assertEqual(sender.id, actual_message.sender.id)\n self.assertEqual(recipient.id, actual_message.recipient.id)\n self.assertEqual(data['text'], actual_message.text)", "def send_msg(self, body, who, subject=None, **kwargs):\n\n # ============================================================\n # Parse and bail out if needed\n # ============================================================\n\n self._parse_who(who)\n\n if not self._ensure_recipients_exist():\n return\n\n # Kwargs\n attachment = kwargs.get('attachment', None)\n disable_email = kwargs.get('disable_email', False)\n disable_sms = kwargs.get('disable_sms', False)\n\n if not self._ensure_attachment_exists(attachment):\n return\n\n for b, n in zip([disable_email, disable_sms], ['disable_email', 'disable_sms']):\n if not isinstance(b, bool):\n msg = f'\\'{n}\\' must be boolean but you gave type {type(b)}'\n raise TypeError(msg)\n\n # ============================================================\n # Main\n # ============================================================\n\n with self._setup_smtp_server() as sess:\n\n # Create msg object\n msg = EmailMessage()\n\n # Personalize the template\n body_from_template = self.template.substitute(BODY=body)\n\n # Set msg parameters;\n # Note we will assign msg['To'] below when iterating over email addresses\n msg['From'] = self.sender_address\n msg['Subject'] = subject.upper() if isinstance(subject, str) else None\n\n # Copy outgoing emails to cc list\n if isinstance(self.cc_email_list, list):\n if len(self.cc_email_list) > 0:\n msg['CC'] = ','.join(self.cc_email_list)\n\n # Base text message\n msg.set_content(body_from_template)\n\n # HTML version\n body_html = re.sub(r'[\\n]', '<br>', body_from_template) # Replace /n with <br>\n logo_cid = make_msgid()\n msg.add_alternative(\"\"\"\\\n <html>\n <head></head>\n <body>\n <p>\"\"\" + body_html + '</p>' + \"\"\"\n <a href=\"https://www.liveline.tech\">\n <img src=\"cid:{logo_cid}\" />\n </a>\n </body>\n </html>\n \"\"\".format(logo_cid=logo_cid[1:-1]), subtype='html')\n\n # Add logo to the HTML version\n if EMAIL_SIGNATURE_LOGO_FILE is not None:\n t = root / EMAIL_SIGNATURE_LOGO_FILE\n if t.exists():\n with open(t, 'rb') as img:\n r = img.read()\n # noinspection PyUnresolvedReferences\n msg.get_payload()[1].add_related(r, 'image', 'png', cid=logo_cid)\n\n # Optionally attach a file\n # First use mimetypes to try and guess content type based on file extension:\n if attachment is not None:\n attachment = Path(attachment)\n ctype, encoding = mimetypes.guess_type(str(attachment))\n if ctype is None or encoding is not None:\n # No guess could be made, or the file is encoded (compressed), so\n # use a generic bag-of-bits type.\n ctype = 'application/octet-stream'\n maintype, subtype = ctype.split('/', 1)\n maintype += f'; name=\"{attachment.name}\"'\n with open(attachment, 'rb') as file:\n r = file.read()\n msg.add_attachment(r, maintype=maintype, subtype=subtype)\n\n # ============================================================\n # Email\n # ============================================================\n\n # For each email & phone in current lists, send messages\n if not disable_email:\n for e in self.current_email_list:\n\n # Console out\n stdout_msg = f'COMMUNICATOR MESSAGE: Sending email to: '\n fancy_print(stdout_msg, fg=COMMUNICATOR_MSG_COLOR, end='')\n fancy_print(e, fg='hlink')\n\n # Update msg 'To:' field\n if msg['To'] is not None:\n del msg['To']\n msg['To'] = e\n\n # # Make a local copy of what we are going to send... to a log file?\n # with open('outgoing.msg', 'wb') as f:\n # f.write(bytes(msg))\n\n try:\n sess.send_message(msg)\n except:\n stdout_msg = f'COMMUNICATOR WARNING: Failed sending email message'\n fancy_print(stdout_msg, fg=COMMUNICATOR_WARN_COLOR)\n\n # ============================================================\n # SMS\n # ============================================================\n\n if not disable_sms:\n for m in self.current_mobile_list:\n\n # Console out\n stdout_msg = f'COMMUNICATOR MESSAGE: Sending SMS message to: '\n fancy_print(stdout_msg, fg=COMMUNICATOR_MSG_COLOR, end='')\n fancy_print(m[0:3] + '.' + m[3:6] + '.' + m[6:10], fg='cerulean')\n\n any_ok = False\n candidates = list()\n\n # Try all the stubs!\n # We don't know the carrier name.\n # Assume the invalid addresses will get black-holed by the various carriers.\n for stub in self.sms_email_stubs:\n candidates.append(m + self.sms_email_stubs[stub])\n\n # Update msg 'To:' field\n if msg['To'] is not None:\n del msg['To']\n msg['To'] = candidates\n\n # # Make a local copy of what we are going to send... to a log file?\n # with open('outgoing.msg', 'wb') as f:\n # f.write(bytes(msg))\n\n try:\n sess.send_message(msg)\n any_ok = True\n except:\n pass\n\n if not any_ok:\n stdout_msg = f'COMMUNICATOR WARNING: Failed sending SMS message'\n fancy_print(stdout_msg, fg=COMMUNICATOR_WARN_COLOR)\n\n return", "def create_email(user):\n if 'research' in user.get_domains():\n domain = 'research'\n else: domain = 'academic'\n subject = \"ECE/CIS Account Created\"\n helprequest = \"https://www.eecis.udel.edu/service\"\n \n message = \"Your ECE/CIS %s account has been created with the username: %s\\n\\n\" % (domain, user.username)\n message += \"Please do not reply to this message. If you need assistance with your account, please visit:\\n\"\n message += \"%s\\n\\n\" % helprequest\n message += \"-- EE/CIS Labstaff\\n\"\n\n send('account@eecis.udel.edu', 'ECE/CIS Account System', \\\n [user.email], subject, message, MAILHOST)", "def build_message(self, subject, body, attachment=None):\n msg = MIMEMultipart()\n msg['From'] = self.src_addr\n msg['To'] = self.dest_addr\n msg['Subject'] = subject\n msg.attach(MIMEText(body, 'plain'))\n\n if attachment is not None:\n if os.path.exists(attachment):\n with open(attachment, 'rb') as a:\n part = MIMEBase('application', 'octet-stream')\n part.set_payload(a.read())\n encoders.encode_base64(part)\n part.add_header('Content-Disposition', 'attachment; filename= {}'.format(attachment))\n msg.attach(part)\n\n self.outbox.append(msg)", "def br_msg_to(msg_body):\r\n msg = Message()\r\n msg.body = msg_body\r\n msg.set_metadata(\"performative\", \"inform\")\r\n return msg", "def message_new(\n self, cr, uid, msg_dict, custom_values=None, context=None): \n if context is None:\n context = {}\n # prevent changes in context from \"bubbling up\" to calling methods\n local_context = dict(context)\n\n users_pool = self.pool.get('res.users')\n base_model = self.pool.get('ir.model.data')\n partner_model = self.pool.get('res.partner') \n\n # As the scheduler is run without language,\n # set the administrator's language\n if not local_context.get('lang'):\n user = users_pool.browse(cr, uid, uid, context=local_context)\n local_context['lang'] = user.partner_id.lang\n\n if custom_values is None:\n custom_values = {}\n email_from = msg_dict.get('from', False)\n if email_from:\n custom_values['name'] = _(\"Received by email from %s\") % email_from\n email_date = msg_dict.get('date', False)\n if email_date:\n custom_values['date_invoice'] = email_date\n\n company_id = (\n ('force_company' in local_context\n and local_context['force_company']) or False)\n\n # Retrieve partner_id from message dictionary.\n # Partner might be:\n # 1. Supplier sending email (author_id in msg dict.)\n # 2. Partner receiving message (special partner setup to receive\n # email). Should be linked to the appropiate company in multi-\n # company databases.\n # 3. Dummy invoice partner.\n # Partner MUST be a supplier.\n\n # 1. Try author:\n supplier_partner_id = False\n author_id = (\n 'author_id' in msg_dict and msg_dict['author_id'] or False)\n if (author_id\n and self._is_partner_supplier(\n cr, uid, author_id, context=local_context)):\n supplier_partner_id = author_id\n\n # 2. Try recipients:\n # Unfortunately we have to do a new lookup on partner, because\n # the method message_process in mail_thread removes the partner_ids\n # already found, from the message dictionary:\n if not supplier_partner_id:\n s = ', '.join(\n [msg_dict.get(h)\n for h in ['to', 'cc'] if msg_dict.get(h)])\n for email_address in tools.email_split(s):\n partner_ids = self.get_partner_from_mail(\n cr, uid, email_address, company_id, force_supplier=True,\n context=local_context)\n if partner_ids:\n supplier_partner_id = partner_ids[0]\n break\n\n # 3. Try default partner for company (company might be False):\n if not supplier_partner_id:\n args = [('fetchmail_invoice_default', '=', True),]\n if company_id:\n args.append(('company_id', '=', company_id))\n default_ids = partner_model.search(\n cr, uid, args, context=local_context)\n if default_ids: # can be only one\n supplier_partner_id = default_ids[0]\n\n # We should have a supplier/partner by now....\n assert supplier_partner_id, _('No partner found to link invoice to')\n\n # Get company for supplier, if any. If present, should be the same\n # as company for fetchmail config, if present. If still no\n # company is found, use main company.\n supplier_record = partner_model.read(\n cr, uid, supplier_partner_id, ['company_id', 'supplier'],\n context=local_context)\n supplier_company_id = (\n supplier_record['company_id'] and supplier_record['company_id'][0]\n or False)\n if supplier_company_id:\n if company_id:\n assert company_id == supplier_company_id, (_(\n 'Supplier found not valid for company %d.') %\n company_id)\n else:\n company_id = supplier_company_id\n if not company_id:\n # Last resort, use main company\n company_id = base_model.get_object_reference( \n cr, uid, 'base', 'main_company')[1]\n \n # Now we should have a company, and we should use it for everything\n assert company_id, (_(\n 'All attempts to determine company for invoice failed'))\n local_context['force_company'] = company_id\n \n # Paranoid check\n assert supplier_record['supplier'], (_(\n 'Partner %d is not a supplier') % supplier_partner_id)\n\n # And we should have an account property\n # (read again, as company might have changed)\n supplier_record = partner_model.read(\n cr, uid, supplier_partner_id, ['property_account_payable_id'],\n context=local_context)\n assert supplier_record['property_account_payable_id'], (\n _('No account payable on partner %d.') % supplier_partner_id)\n\n # And we need some information in context as well\n local_context.update({\n 'company_id': company_id,\n 'type': 'in_invoice',\n })\n\n supplier = partner_model.browse(cr, uid, supplier_partner_id, context=local_context)\n\n journal_id = self.pool.get('account.invoice').default_get(cr, uid, ['journal_id'], context=local_context)['journal_id']\n if not journal_id:\n raise UserError(_('Please define an accounting sale journal for this company.'))\n\n custom_values.update({\n 'company_id': company_id,\n 'partner_id': supplier_partner_id,\n 'type': 'in_invoice',\n\n 'account_id': supplier.property_account_payable_id.id,\n 'journal_id': journal_id,\n })\n\n\n # custom_values.update(\n # self.onchange_partner_id(\n # cr, uid, [], 'in_invoice', supplier_partner_id,\n # company_id=company_id)['value'])\n\n # Create the resource\n res_id = super(account_invoice, self).message_new(\n cr, uid, msg_dict, custom_values=custom_values,\n context=local_context)\n return res_id", "def _generate_email(email, name=None):\n result = {'email': email}\n if name:\n result['name'] = name\n return result", "def post(self, request):\n # get message from request\n message = request.POST.get('message')\n # create message\n ChatMessage.objects.create(\n message=message, _from=request.user, _to=User.objects.get(pk=request.POST.get('user_id')))\n # return response\n return HttpResponse('success')", "def create(cls, sender, recipient, body, timestamp=None):\n if timestamp is None:\n timestamp = datetime.datetime.utcnow()\n msg = cls(sender, recipient, body, timestamp)\n db.session.add(msg)\n\n try:\n db.session.commit()\n db.session.flush()\n return msg.id\n except BaseException as exc:\n db.session.rollback()\n raise exc", "def create_message(id_user: int, message: str):\n # Create datetime of today at midnight\n today = datetime.date.today()\n today = today.strftime(\"%Y-%m-%d %H:%M:%S\")\n # Query to check if there is already a message for this user today\n mycursor.execute(f\"\"\"SELECT id_message\n FROM Daily_message\n WHERE date_message >= '{today}'\n AND id_user = {id_user}\n \"\"\")\n rowcount = mycursor.rowcount\n if rowcount == 1:\n # If there is already a message today, user can't add a new one\n return 'Impossible d\\'ajouter ce message. Il y a déjà un message aujourd\\'hui, veuillez le modifier.'\n else:\n # Create datetime of today at current time\n date_message = datetime.datetime.today()\n date_message = date_message.strftime(\"%Y/%m/%d %H:%M:%S\")\n columns_mess = 'id_user, text, date_message'\n # Add the message infos to the Daily_message table\n add_in_database((id_user, message, date_message), 'Daily_message', columns_mess)\n mydb.commit()\n # Get the id_message\n mycursor.execute(f\"\"\"SELECT id_message\n FROM Daily_message\n WHERE (id_user = {id_user})\n AND (date_message >= '{today}')\n \"\"\")\n infos = mycursor.fetchall()\n id_message = infos[0][0]\n # Fill emotion table for the new message\n create_emotion(id_message, message)\n return {'id_user': id_user, 'message': message}", "def create_message_talk(msg_talk_as_str):\n msg_comps = msg_talk_as_str.split('\\n')\n from_id = msg_comps[1]\n origin_id = msg_comps[2]\n message_id = msg_comps[3]\n topics = msg_comps[4].split(',')\n data = msg_comps[5]\n return MessageTalk(from_id, origin_id, topics, data, message_id)", "def create(self, data):\n\n\t\tmessage = data\n\t\tmessage[\"id\"] = self.count = self.count + 1\n\t\tself.messages.append(message)\n\n\t\treturn(message)", "def send_email(self, to_address, subject, body, cc_recipients=[]):\n\n # Build and send message\n msg = Message(\n account=self.account,\n folder=self.account.sent,\n subject=subject,\n body= HTMLBody(body),\n to_recipients=[Mailbox(email_address=to_address)],\n cc_recipients=[(Mailbox(email_address=x)) for x in cc_recipients]\n )\n\n msg.send_and_save()\n print(\"Message to {} sent.\".format(to_address))", "def _create_msg(self, tr_id, i_triples, i_type, r_triples, r_type, confirm):\n params = SSAP_UPDATE_PARAM_TEMPLATE % (str(i_type).upper(),\n str(i_triples),\n str(r_type).upper(),\n str(r_triples),\n str(confirm).upper())\n tmp = SSAP_MESSAGE_TEMPLATE % (str(self.node_id), str(self.targetSS),\n self.tr_type, str(tr_id), params)\n return tmp", "def quick_email(self, send_to, subject, body, style=None):\n message = Message(body, style=style)\n\n self.send_message(message, send_to, subject)", "def get_email():\n return Email(\n subject='[Messages] Integration Test',\n body='Conducting Integration Testing',\n attachments=str(TESTDIR.joinpath('file2.png')))" ]
[ "0.8220897", "0.783499", "0.7542338", "0.7535529", "0.7519684", "0.74869245", "0.7417828", "0.73909754", "0.7373257", "0.7355638", "0.73106784", "0.7297086", "0.7269137", "0.719846", "0.7196095", "0.7140178", "0.7047853", "0.7021525", "0.70037013", "0.70037013", "0.68474555", "0.68418366", "0.6781704", "0.6777992", "0.6744443", "0.67329454", "0.6714459", "0.671162", "0.67051923", "0.66659385", "0.6665762", "0.66546285", "0.665063", "0.6643006", "0.6626429", "0.6617764", "0.6596703", "0.65357643", "0.65236783", "0.6523545", "0.6514001", "0.6502263", "0.6484727", "0.6484322", "0.6482396", "0.6468869", "0.6463164", "0.6459357", "0.6409988", "0.6361861", "0.6359212", "0.6342248", "0.63373715", "0.6314738", "0.6312374", "0.6297892", "0.6277734", "0.6273468", "0.62713385", "0.62654835", "0.62596893", "0.6253985", "0.6252849", "0.62511957", "0.62458163", "0.6242815", "0.6183823", "0.61819816", "0.6176082", "0.61633307", "0.61334467", "0.61282927", "0.61141455", "0.6113214", "0.6104701", "0.60809094", "0.6062849", "0.6053299", "0.6051991", "0.6018882", "0.6012688", "0.601257", "0.60062265", "0.59931654", "0.5989733", "0.5986193", "0.59848374", "0.5981605", "0.598052", "0.5966138", "0.59649146", "0.5963139", "0.59590113", "0.5958782", "0.59541005", "0.5953965", "0.594576", "0.5932744", "0.5932339", "0.5930283" ]
0.6165352
69
Return the next `batch_size` examples from this data set.
def next_batch(self, batch_size, shuffle=True): start = self._index_in_epoch # Shuffle for the first epoch if self._epochs_completed == 0 and start == 0 and shuffle: perm0 = np.arange(self._num_examples) np.random.shuffle(perm0) self._images = self.images[perm0] self._labels = self.labels[perm0] # Go to the next epoch if start + batch_size > self._num_examples: # Finished epoch self._epochs_completed += 1 # Get the rest examples in this epoch rest_num_examples = self._num_examples - start images_rest_part = self._images[start:self._num_examples] labels_rest_part = self._labels[start:self._num_examples] # Shuffle the data if shuffle: perm = np.arange(self._num_examples) np.random.shuffle(perm) self._images = self.images[perm] self._labels = self.labels[perm] # Start next epoch start = 0 self._index_in_epoch = batch_size - rest_num_examples end = self._index_in_epoch images_new_part = self._images[start:end] labels_new_part = self._labels[start:end] return np.concatenate( (images_rest_part, images_new_part), axis=0), np.concatenate( (labels_rest_part, labels_new_part), axis=0) else: self._index_in_epoch += batch_size end = self._index_in_epoch return self._images[start:end], self._labels[start:end]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n self._epochs_completed += 1\n start = 0\n self._index_in_epoch = batch_size\n end = self._index_in_epoch\n return self._samples[start:end], self._labels[start:end]", "def next_batch(self,batch_size):\r\n end_indicator = self._indicator + batch_size\r\n if end_indicator > self._num_examples:\r\n if self._need_shuffle:\r\n self._shuffle_data()\r\n end_indicator = batch_size\r\n else:\r\n raise Exception(\"have no more examples.\")\r\n\r\n if end_indicator > self._num_examples:\r\n raise Exception(\"batch size is larger than all examples.\")\r\n batch_data = self._data[self._indicator: end_indicator]\r\n batch_labels = self._labels[self._indicator: end_indicator]\r\n self._indicator = end_indicator\r\n return batch_data,batch_labels", "def next_batch(self, batch_size):\r\n start = self._index_in_epoch\r\n self._index_in_epoch += batch_size\r\n\r\n if self._index_in_epoch > self._num_examples:\r\n # After each epoch we update this\r\n self._epochs_done += 1\r\n start = 0\r\n self._index_in_epoch = batch_size\r\n #print(\"numexamples \",self._num_examples)\r\n assert batch_size <= self._num_examples\r\n end = self._index_in_epoch\r\n\r\n return self._images[start:end], self._labels[start:end], self._img_names[start:end], self._cls[start:end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n end = min(start + batch_size, self._num_examples)\n batch_data = self._data[start:end]\n if self._label_used:\n batch_labels = self._labels[start:end]\n\n if end == self._num_examples:\n self._epochs_completed += 1\n self._index_in_epoch = 0\n if self._shuffled:\n perm = np.arange(self._num_examples)\n random.shuffle(perm)\n self._data = self._data[perm]\n if self._label_used:\n self._labels = self._labels[perm]\n else:\n self._index_in_epoch = end\n\n if self._label_used:\n return batch_data,batch_labels\n else:\n return batch_data", "def next_batch(self,batch_size):\r\n end_indicator = self._indicator + batch_size\r\n if end_indicator > self._num_examples:\r\n if self._need_shuffle:\r\n self._shuffle_data()\r\n self._indicator = 0\r\n end_indicator = batch_size\r\n else:\r\n raise Exception(\"have no more examples.\")\r\n if end_indicator > self._num_examples:\r\n raise Exception(\"too lager batch size than examples.\")\r\n batch_data = self._data[self._indicator: end_indicator]\r\n batch_label = self._label[self._indicator: end_indicator]\r\n self._indicator = end_indicator\r\n return batch_data, batch_label", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n\n # Shuffle the data\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._features = self._features[perm]\n self._targets = self._targets[perm]\n\n # start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n\n end = self._index_in_epoch\n return self._features[start:end], self._targets[start:end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._number_examples:\n # finished epoch\n self._epochs_completed += 1\n # Shuffle the data \n if self._shuffle:\n new_index = np.random.permutation(self._number_examples)\n self._X = self._X[new_index]\n self._y = self._y[new_index]\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._number_examples\n end = self._index_in_epoch\n return self._X[start:end], self._y[start:end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._number_examples:\n # finished epoch\n self._epochs_completed += 1\n # Shuffle the data \n if self._shuffle:\n new_index = np.random.permutation(self._number_examples)\n self._X = self._X[new_index]\n self._y = self._y[new_index]\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._number_examples\n end = self._index_in_epoch\n return self._X[start:end], self._y[start:end]", "def next_batch(self,batch_size):\r\n start=self._index_in_epoch\r\n self._index_in_epoch=+batch_size\r\n\r\n if self._index_in_epoch > self._number_examples:\r\n # finished epoch\r\n self._epochs_completed += 1\r\n # Shuffle the data\r\n if self._shuffle:\r\n new_index = np.random.permutation(self._number_examples)\r\n self._X = self._X[new_index]\r\n self._y = self._y[new_index]\r\n start = 0\r\n self._index_in_epoch = batch_size\r\n assert batch_size <= self._number_examples\r\n end = self._index_in_epoch\r\n return self._X[start:end], self._y[start:end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._number_examples:\n # finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n if self._shuffle:\n new_index = np.random.permutation(self._number_examples)\n self._X = self._X[new_index]\n self._y = self._y[new_index]\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._number_examples\n end = self._index_in_epoch\n return self._X[start:end], self._y[start:end]", "def next(self, batch_size=np.inf):\n if self.batch_id == len(self.data):\n self.batch_id = 0\n # shuffle the data each pass over it.\n rng_state = np.random.get_state()\n np.random.shuffle(self.data)\n np.random.set_state(rng_state)\n np.random.shuffle(self.labels)\n \n end_idx = min(self.batch_id + batch_size, len(self.data))\n batch_data = (self.data[self.batch_id:end_idx])\n batch_labels = self.labels[self.batch_id:end_idx]\n batch_seqlen = (self.seqlen[self.batch_id:end_idx])\n self.batch_id = end_idx\n return batch_data, batch_labels, batch_seqlen", "def next_batch(self, batch_size):\r\n raise NotImplementedError", "def next_batch(self, batch_size):\n raise NotImplementedError", "def next_batch(self, batch_size):\n batch_data = np.zeros([batch_size,] + list(self.example_shape))\n for i in range(batch_size):\n index = self.q.pop()\n batch_data[i,...] = self.data[index]\n if len(self.q)==0:\n self.__new_epoch()\n\n return batch_data", "def next_batch(self, batch_size=8):\n raise NotImplementedError()", "def next_batch(self, batch_size):\n \n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start: end], self._labels[start: end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size, shuffle=True):", "def next_batch(self, batch_size, seed=None):\n if seed:\n np.random.seed(seed)\n\n start = self.index_in_epoch\n self.index_in_epoch += batch_size\n if self.index_in_epoch > self.num_examples:\n # Finished epoch\n self.epochs_completed += 1\n # Shuffle the data\n perm = np.arange(self.num_examples)\n np.random.shuffle(perm)\n self.data_X = self.data_X[perm]\n self.data_Y = self.data_Y[perm]\n # Start next epoch\n start = 0\n self.index_in_epoch = batch_size\n assert batch_size <= self.num_examples\n end = self.index_in_epoch\n\n return self.data_X[start:end], self.data_Y[start:end]", "def next(self, batch_size):\n if self.batch_id == len(self.data):\n self.batch_id = 0\n batch_data = (self.data[self.batch_id:min(self.batch_id + batch_size, len(self.data))])\n batch_labels = (self.labels[self.batch_id:min(self.batch_id + batch_size, len(self.data))])\n batch_seqlen = (self.seqlen[self.batch_id:min(self.batch_id + batch_size, len(self.data))])\n self.batch_id = min(self.batch_id + batch_size, len(self.data))\n\n\n return batch_data, batch_labels, batch_seqlen", "def next(self, batch_size):\n if self.batch_id == len(self.data):\n self.batch_id = 0\n batch_data = (self.data[self.batch_id:min(self.batch_id + batch_size, len(self.data))])\n batch_labels = (self.labels[self.batch_id:min(self.batch_id + batch_size, len(self.data))])\n batch_seqlen = (self.seqlen[self.batch_id:min(self.batch_id + batch_size, len(self.data))])\n self.batch_id = min(self.batch_id + batch_size, len(self.data))\n\n\n return batch_data, batch_labels, batch_seqlen", "def next(self, batch_size):\n if self.batch_id == len(self.data):\n self.batch_id = 0\n self.data, self.labels, self.seqlen = shuffle(self.data, self.labels, self.seqlen)\n batch_data = (self.data[self.batch_id:min(self.batch_id +\n batch_size, len(self.data))])\n batch_labels = (self.labels[self.batch_id:min(self.batch_id +\n batch_size, len(self.data))])\n batch_seqlen = (self.seqlen[self.batch_id:min(self.batch_id +\n batch_size, len(self.data))])\n self.batch_id = min(self.batch_id + batch_size, len(self.data))\n return batch_data, batch_labels, batch_seqlen", "def next_batch(self, batch_size):\n start = self.index_in_epoch\n self.index_in_epoch += batch_size\n self.epoch += batch_size/self.num_examples\n\n # When all the training data is ran, shuffles it\n if self.index_in_epoch > self.num_examples and self.shuffle:\n self.indexer = np.random.permutation(self.num_examples)\n # Start next epoch\n start = 0\n self.index_in_epoch = batch_size\n assert batch_size <= self.num_examples\n\n if self.iterate:\n batch_df = pd.DataFrame()\n if self.epoch < 1:\n batch_df = pd.read_csv(self.path, nrows=batch_size, skiprows=start)\n else:\n for i in range(batch_size):\n item = pd.read_csv(self.path, nrows=1, skiprows=self.indexer[start+i])\n batch_df = pd.concat(item)\n else:\n batch_df = self.df[start: self.index_in_epoch]\n\n examples = np.multiply(batch_df.iloc[:, 1:].values.astype(np.float), 1.0 / 255.0)\n labels = self.dense_to_one_hot(batch_df.iloc[:, 0].values.ravel(), 10)\n\n batch = {'features': examples, 'labels': labels}\n return batch", "def train_next_batch(self, batch_size=None):", "def next_batch(self, batch_size, fake_data=False):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = numpy.arange(self._num_examples)\n numpy.random.shuffle(perm)\n self._texts = self._texts[perm]\n self._topologys = self._topologys[perm]\n self._urls = self._urls[perm]\n self._demos = self._demos[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._texts[start:end], self._topologys[start:end], self._urls[start:end], self._demos[start:end], self._labels[start:end]", "def next_batch(self, batch_size: int) -> Tuple[numpy.ndarray, numpy.ndarray]:\n assert batch_size <= self._num_examples\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n start = self._start_new_epoch(batch_size, start)\n end = self._index_in_epoch\n return self._input[start:end], self._labels[start:end]", "def next_batch(self, batch_size):\n start = self._index_in_epoch\n if start + batch_size > self.num_examples:\n self._epochs_completed += 1\n rest_num_examples = self.num_examples - start\n images_rest_part = self._images[start:self.num_examples]\n labels_rest_part = self._labels[start:self.num_examples]\n self.permute()\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n images_new_part = self._images[start:end]\n labels_new_part = self._labels[start:end]\n\n result_images = np.concatenate(\n (images_rest_part, images_new_part), axis=0\n )\n result_labels = np.concatenate(\n (labels_rest_part, labels_new_part), axis=0\n )\n return result_images, result_labels\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size):\n X_batch = self.X_data[self.batch_num*batch_size:(self.batch_num+1)*batch_size]\n Y_batch = self.Y_data[self.batch_num*batch_size:(self.batch_num+1)*batch_size]\n self.batch_num += 1\n return X_batch, Y_batch", "def next_batch(data, labels, batch_size):\n global _index_in_epoch\n start = _index_in_epoch\n _index_in_epoch += batch_size\n _num_examples = len(data)\n\n if _index_in_epoch > _num_examples:\n # Shuffle the data\n perm = np.arange(_num_examples)\n np.random.shuffle(perm)\n data = data[perm]\n labels = labels[perm]\n # Start next epoch\n start = 0\n _index_in_epoch = batch_size\n assert batch_size <= _num_examples\n\n end = _index_in_epoch\n return data[start:end], labels[start:end]", "def next_batch(self, batch_size, fake_data=False):\r\n if fake_data:\r\n fake_image = [1.0 for _ in range(784)]\r\n fake_label = 0\r\n return [fake_image for _ in range(batch_size)], [fake_label for _ in range(batch_size)]\r\n start = self._index_in_epoch\r\n self._index_in_epoch += batch_size\r\n #print (0)\r\n #print(self._index_in_epoch,self._num_examples)\r\n #若当前训练读取的index>总体的images数时,则读取读取开始的batch_size大小的数据\r\n if self._index_in_epoch > self._num_examples:\r\n #print (0)\r\n # Finished epoch\r\n self._epochs_completed += 1\r\n # Shuffle the data\r\n perm = numpy.arange(self._num_examples)\r\n numpy.random.shuffle(perm)\r\n self._images = self._images[perm]\r\n self._labels = self._labels[perm]\r\n # Start next epoch\r\n start = 0\r\n self._index_in_epoch = batch_size\r\n assert batch_size <= self._num_examples\r\n end = self._index_in_epoch\r\n #print (\"start is:%d,end is:%d\"%(start,end))\r\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batchSize, use_labels=False):\n start = self._index_in_epochs\n self._index_in_epochs += batchSize\n\n if self._index_in_epochs >= self.xtrain.shape[0]:\n self._epochs_completed += 1\n perm = np.arange(self.xtrain.shape[0])\n np.random.shuffle(perm)\n self.xtrain = self.xtrain[perm, :]\n self.ytrain = self.ytrain[perm]\n start = 0\n self._index_in_epochs = batchSize\n\n end = self._index_in_epochs\n if use_labels:\n return self.xtrain[start:end, :], self.ytrain[start:end]\n else:\n return self.xtrain[start:end, :]", "def next_batch(self, batch_size):\n i_bucket = self.bucket_order[self.bucket_cursor]\n # Increment cursor and shuffle in case of new round\n self.bucket_cursor = (self.bucket_cursor + 1) % self.num_buckets\n if self.bucket_cursor == 0:\n self.bucket_order = np.random.permutation(self.num_buckets)\n\n if self.cursor[i_bucket] + batch_size > self.buckets_size[i_bucket]:\n self.shuffle(i_bucket)\n\n # Handle too big batch sizes\n if (batch_size > self.buckets_size[i_bucket]):\n batch_size = self.buckets_size[i_bucket]\n\n res = self.buckets[i_bucket].iloc[self.cursor[i_bucket]:\n self.cursor[i_bucket]+batch_size]\n self.cursor[i_bucket] += batch_size\n\n # PAD input sequence and output\n input_max = max(res['in_length'])\n\n input_imgs = np.zeros(\n (batch_size, self.slider[0], input_max, 1), dtype=np.uint8)\n for i, img in enumerate(res['images']):\n input_imgs[i][:, :res['in_length'].values[i], 0] = img\n \n if self.train:\n input_imgs = self.augmentation.augment_images(input_imgs)\n input_imgs = input_imgs.astype(np.float32)\n\n targets = sequences_to_sparse(res['targets'].values)\n return input_imgs, targets, res['in_length'].values", "def next_batch(self, batch_size, whitened=False):\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n if whitened:\n return self.images[start:end], self._whitened_images[start:end]\n else: return self._images[start:end], self.images[start:end]", "def next_batch(self, batch_size):\n\n all_idx = np.arange(0, self.length)\n np.random.shuffle(all_idx)\n batch_idx = all_idx[:batch_size]\n batch_imgs = [self.images[i] for i in batch_idx]\n batch_traces = [self.traces[i] for i in batch_idx]\n return batch_imgs, batch_traces", "def next_batch(self, batch_size, fake_data=False):\n if fake_data:\n fake_image = [1] * 784\n if self.one_hot:\n fake_label = [1] + [0] * 9\n else:\n fake_label = 0\n return [fake_image for _ in range(batch_size)], [\n fake_label for _ in range(batch_size)]\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n\n # Shuffle the data\n np.random.seed(0)\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, dataset, batch_size=128, replace=False):\n func_name = '_next_' + dataset + '_batch'\n if not hasattr(self, func_name):\n raise ValueError('Invalid dataset name: %s' % dataset)\n func = getattr(self, func_name)\n return func(batch_size, replace)", "def next_batch(self, batch_size, fake_data=False):\n if fake_data:\n #fake_image = [1.0 for _ in xrange(784)]\n fake_image = [1.0 for _ in range(784)]\n fake_label = 0\n #return [fake_image for _ in xrange(batch_size)], [\n # fake_label for _ in xrange(batch_size)]\n return [fake_image for _ in range(batch_size)], [\n fake_label for _ in range(batch_size)]\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = numpy.arange(self._num_examples)\n numpy.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next(self):\n prev_doc_id, prev_in_doc_pos = self._state.update_state(\n self.dataset,\n self.batch_size,\n self.context_size,\n self._num_examples_in_doc)\n\n # generate the actual batch\n batch = _NCEBatch(self.context_size)\n\n while len(batch) < self.batch_size:\n if prev_doc_id == len(self.dataset):\n # last document exhausted\n batch.torch_()\n return batch\n if prev_in_doc_pos <= (len(self.dataset[prev_doc_id].text) - 1\n - self.context_size):\n # more examples in the current document\n self._add_example_to_batch(prev_doc_id, prev_in_doc_pos, batch)\n prev_in_doc_pos += 1\n else:\n # go to the next document\n prev_doc_id += 1\n prev_in_doc_pos = self.context_size\n\n batch.torch_()\n return batch", "def next(self):\n prev_doc_id, prev_in_doc_pos = self._state.update_state(\n self.dataset,\n self.batch_size,\n self.context_size,\n self._num_examples_in_doc)\n\n # generate the actual batch\n batch = _NCEBatch(self.context_size)\n\n while len(batch) < self.batch_size:\n if prev_doc_id == len(self.dataset):\n # last document exhausted\n batch.torch_()\n return batch\n if prev_in_doc_pos <= (len(self.dataset[prev_doc_id].text) - 1\n - self.context_size):\n # more examples in the current document\n self._add_example_to_batch(prev_doc_id, prev_in_doc_pos, batch)\n prev_in_doc_pos += 1\n else:\n # go to the next document\n prev_doc_id += 1\n prev_in_doc_pos = self.context_size\n\n batch.torch_()\n return batch", "def next_batch(self, batch_size, fake_data=False):\n if fake_data:\n fake_image = [1] * 784\n if self.one_hot:\n fake_label = [1] + [0] * 9\n else:\n fake_label = 0\n return [fake_image for _ in xrange(batch_size)], [\n fake_label for _ in xrange(batch_size)]\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size, fake_data=False):\n if fake_data:\n fake_image = [1] * 784\n if self.one_hot:\n fake_label = [1] + [0] * 9\n else:\n fake_label = 0\n return [fake_image for _ in xrange(batch_size)], [\n fake_label for _ in xrange(batch_size)\n ]\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Shuffle the data\n perm = numpy.arange(self._num_examples)\n numpy.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size, shuffle=False):\n\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self._epochs_completed == 0 and start == 0 and shuffle:\n perm0 = np.arange(self._num_examples)\n np.random.shuffle(perm0)\n self._data_index = self._data_index[perm0]\n # Go to the next epoch\n if start + batch_size > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self._num_examples - start\n _data_index_rest_part = self._data_index[start:self._num_examples]\n imgs_batch_rest, labels_batch_rest = self._read_batch_data(_data_index_rest_part)\n # Shuffle the data\n if shuffle:\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._data_index = self._data_index[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n _data_index_new_part = self._data_index[start:end]\n imgs_batch_new_part, labels_batch_new_part = self._read_batch_data(_data_index_new_part)\n imgs_batch = np.concatenate((imgs_batch_rest, imgs_batch_new_part), axis=0)\n labels_batch = np.concatenate((labels_batch_rest, labels_batch_new_part), axis=0)\n return imgs_batch, labels_batch\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n imgs_batch, labels_batch = self._read_batch_data(self._data_index[start:end])\n return imgs_batch, labels_batch", "def next_batch(self, batch_size, fake_data=False):\n if fake_data:\n fake_image = [1] * 784\n if self.one_hot:\n fake_label = [1] + [0] * 9\n else:\n fake_label = 0\n return [fake_image for _ in range(batch_size)], [fake_label for _ in range(batch_size)]\n start = self._index_in_epoch\n self._index_in_epoch += batch_size\n\n if self._index_in_epoch > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n\n # Shuffle data\n np.random.seed(0)\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._images = self._images[perm]\n self._labels = self._labels[perm]\n\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size\n assert batch_size <= self._num_examples\n\n end = self._index_in_epoch\n\n return self._images[start:end], self._labels[start:end]", "def next(self):\n # Most batches will be equal to batch_size\n if self.cur < (self.n - self.batch_size):\n # Get positions of files in batch\n positions = self.order[self.cur:self.cur + self.batch_size]\n\n self.cur += self.batch_size\n\n # create Batches\n X_train, y_train, sample_weights = self.createBatches(positions)\n\n return X_train, y_train, sample_weights\n\n # Final batch is smaller than batch_size\n if self.cur < self.n:\n positions = self.order[self.cur::]\n\n # Step is maximum - next will return None\n self.cur = self.n\n\n # Create Batches\n X_train, y_train, sample_weights = self.createBatches(positions)\n\n return X_train, y_train, sample_weights\n\n else:\n # reshuffle order for next batch\n np.random.shuffle(self.order)\n\n # Reset cur\n self.cur = 0\n\n # Signal end of epoch\n return None", "def next_batch_random(self, size):\n index = np.random.randint(0, self.dataset_size, size=int(size))\n return ([x[index, :] for x in self.data])", "def next_batch(self, batch_size=8):\n if not self.db:\n raise AssertionError(\"Database not set. Please call setup_read() before calling next_batch().\")\n\n assert self.f[self.label_key].shape[0] == self.f[self.image_key].shape[0]\n\n if self.row_idx + batch_size > self.f[self.label_key].shape[0]:\n self.row_idx = 0\n\n start_idx = self.row_idx\n self.row_idx += batch_size\n\n if self.randomize_access:\n perm = np.sort(self.permutation[start_idx:start_idx + batch_size]).tolist()\n excerpt = self.f[self.image_key][perm], self.f[self.label_key][perm]\n else:\n excerpt = self.f[self.image_key][start_idx:start_idx + batch_size], self.f[self.label_key][\n start_idx:start_idx + batch_size]\n\n return excerpt", "def next_batch(self, batch_size):\n # If the caller wants all of the data simply return the whole data set as a triple\n if batch_size is None:\n self.__num_epochs += 1\n return (self.__x, self.__y1, self.__y2)\n\n if batch_size > self.__data_size:\n print(\"Please specify a batch size less than the number of entries in the data set\")\n sys.exit(2)\n\n if batch_size + self.__batch_cursor < self.__data_size:\n # If the batch size is less than the number of entries left in the data:\n # Take the next batch size number of elements and move the cursor forwards.\n x_batch = self.__x[self.__batch_cursor:batch_size + self.__batch_cursor]\n y1_batch = self.__y1[self.__batch_cursor:batch_size + self.__batch_cursor]\n y2_batch = self.__y2[self.__batch_cursor:batch_size + self.__batch_cursor]\n self.__batch_cursor = self.__batch_cursor + batch_size\n else:\n # If there is not enough data left then take the remaining data from the end and start again at the begining.\n x_batch = self.__x[self.__batch_cursor:]\n y1_batch = self.__y1[self.__batch_cursor:]\n y2_batch = self.__y2[self.__batch_cursor:]\n number_still_required = batch_size - (self.__data_size - self.__batch_cursor)\n x_batch = np.concatenate((x_batch, self.__x[0:number_still_required]))\n y1_batch = np.concatenate((y1_batch, self.__y1[0:number_still_required]))\n y2_batch = np.concatenate((y2_batch, self.__y2[0:number_still_required]))\n self.__batch_cursor = number_still_required\n self.__num_epochs += 1\n\n return (x_batch, y1_batch, y2_batch)", "def next_batch(self):\n if self.ptr + self.batch_size >= self.size:\n head = 0\n tail = self.batch_size\n self.ptr = self.batch_size\n else:\n head = self.ptr\n tail = self.ptr + self.batch_size\n self.ptr += self.batch_size\n return self.train_x[head:tail, 0:self.fig_w**2], self.train_y[head:tail, 0:10]", "def next_batch(self, batch_size, shuffle=True):\n\n\n\t\tstart = self._index_in_epoch\n\t\t# Shuffle for the first epoch\n\t\tif self._epochs_completed == 0 and start == 0 and shuffle:\n\t\t\tperm0 = numpy.arange(self._num_examples)\n\t\t\tnumpy.random.shuffle(perm0)\n\t\t\tself._images = self.images[perm0]\n\t\t\tself._labels = self.labels[perm0]\n\n\t\t# Go to the next epoch\n\t\tif start + batch_size > self._num_examples:\n\t\t\t# Finished epoch\n\t\t\tself._epochs_completed += 1\n\t\t\t# Get the rest examples in this epoch\n\t\t\trest_num_examples = self._num_examples - start\n\t\t\timages_rest_part = self._images[start:self._num_examples]\n\t\t\tlabels_rest_part = self._labels[start:self._num_examples]\n\t\t\t# Shuffle the data\n\t\t\tif shuffle:\n\t\t\t\tperm = numpy.arange(self._num_examples)\n\t\t\t\tnumpy.random.shuffle(perm)\n\t\t\t\tself._images = self.images[perm]\n\t\t\t\tself._labels = self.labels[perm]\n\t\t\t# Start next epoch\n\t\t\tstart = 0\n\t\t\tself._index_in_epoch = batch_size - rest_num_examples\n\t\t\tend = self._index_in_epoch\n\t\t\timages_new_part = self._images[start:end]\n\t\t\tlabels_new_part = self._labels[start:end]\n\t\t\treturn numpy.concatenate((images_rest_part, images_new_part), axis=0), numpy.concatenate((labels_rest_part, labels_new_part), axis=0)\n\t\telse:\n\t\t\tself._index_in_epoch += batch_size\n\t\t\tend = self._index_in_epoch\n\t\t\treturn self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size):\n # Get batch\n assert(batch_size == 1)\n em, mask_list, seed_list = self.next_example(self.K)\n\n # Reshape for batch size 1\n em_batch = np.expand_dims(em, 0)\n mask_list = [np.expand_dims(m,0) for m in mask_list]\n \n return em_batch, mask_list", "def next(self, batch_size = -1):\n if not self.batch_iter:\n self.batch_iter = self.get_homogenous_batch_iter(batch_size)\n try:\n batch = next(self.batch_iter)\n except StopIteration:\n return None\n return batch", "def next_batch(self, batch_size, shuffle=True):\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self._epochs_completed == 0 and start == 0 and shuffle:\n perm0 = numpy.arange(self._num_examples)\n numpy.random.shuffle(perm0)\n self._images = self.images[perm0]\n self._labels = self.labels[perm0]\n # Go to the next epoch\n if start + batch_size > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self._num_examples - start\n images_rest_part = self._images[start:self._num_examples]\n labels_rest_part = self._labels[start:self._num_examples]\n # Shuffle the data\n if shuffle:\n perm = numpy.arange(self._num_examples)\n numpy.random.shuffle(perm)\n self._images = self.images[perm]\n self._labels = self.labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n images_new_part = self._images[start:end]\n labels_new_part = self._labels[start:end]\n return numpy.concatenate((images_rest_part, images_new_part), axis=0), numpy.concatenate(\n (labels_rest_part, labels_new_part), axis=0)\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n return self._images[start:end], self._labels[start:end]", "def next_batch(self, batch_size, shuffle=True):\n\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self._epochs_completed == 0 and start == 0 and shuffle:\n perm0 = np.arange(self._num_examples)\n np.random.shuffle(perm0)\n self._inps = self.inps[perm0]\n self._outs = self.outs[perm0]\n # Go to the next epoch\n if start + batch_size > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self._num_examples - start\n inps_rest_part = self._inps[start:self._num_examples]\n outs_rest_part = self._outs[start:self._num_examples]\n # Shuffle the data\n if shuffle:\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._inps = self.inps[perm]\n self._outs = self.outs[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n inps_new_part = self._inps[start:end]\n outs_new_part = self._outs[start:end]\n return np.concatenate((inps_rest_part, inps_new_part), axis=0) , np.concatenate((outs_rest_part, outs_new_part), axis=0)\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n return self._inps[start:end], self._outs[start:end]", "def next_batch(self, batch_size, shuffle=True):\n\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self.epochs_completed == 0 and start == 0 and shuffle:\n perm0 = numpy.arange(self.num_examples)\n numpy.random.shuffle(perm0)\n self._images = self.images[perm0]\n\n # Go to the next epoch\n if start + batch_size > self.num_examples:\n # Finished epoch\n self.epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self.num_examples - start\n images_rest_part = self._images[start:self.num_examples]\n # Shuffle the data\n if shuffle:\n perm = numpy.arange(self.num_examples)\n numpy.random.shuffle(perm)\n self._images = self.images[perm]\n\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n images_new_part = self._images[start:end]\n\n return numpy.concatenate( (images_rest_part, images_new_part), axis=0)\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n return self._images[start:end]", "def next_batch(self):\n\n start = self._index_in_epoch\n self._index_in_epoch += self._batch_size\n\n if self._index_in_epoch >= (self._dataset.num_examples - 1):\n # set to last image in data set\n self._index_in_epoch = self._dataset.num_examples - 1\n assert self._batch_size <= self._dataset.num_examples\n\n end = self._index_in_epoch\n\n return self._dataset.images[start:end], self._dataset.labels[start:end]", "def nextBatch(self, batch_size):\n self._start = self._cursor\n self._cursor += batch_size\n if self._start + batch_size > self._num_samples:\n rest_num_samples = self._num_samples - self._start\n word_batch = np.zeros((batch_size, self._sentences.shape[1]), dtype=np.int32)\n tag_batch = np.zeros((batch_size), dtype=np.int32)\n word_batch[0:rest_num_samples] = self._sentences[self._start:self._num_samples]\n tag_batch[0:rest_num_samples] = self.labels[self._start:self._num_samples]\n\n return word_batch, tag_batch\n else:\n end = self._cursor\n return self._sentences[self._start:end], self._labels[self._start:end]", "def next_batch(self):\n # Whether an epoch is done.\n done = False\n samples = []\n for _ in range(self.batch_size):\n # Indeed, `>` will not occur.\n if self.ptr >= self.dataset_size:\n done = True\n break\n else:\n self.ptr += 1\n sample = self.enqueuer.queue.get()\n samples.append(sample)\n # print 'queue size: {}'.format(self.enqueuer.queue.qsize())\n # Indeed, `>` will not occur.\n if self.ptr >= self.dataset_size:\n done = True\n return samples, done", "def next(self):\n # Keeps under lock only the mechanism which advances\n # the indexing of each batch.\n with self.lock:\n index_array = next(self.index_generator)\n # The transformation of images is not under thread lock\n # so it can be done in parallel\n return self._get_batches_of_transformed_samples(index_array)", "def _next(self):\n batch_start, batch_end = self.batch_start, self.batch_start + self.batch_size\n if batch_end > self.X.shape[0]:\n self.shuffle()\n return self._next()\n else:\n batch_indices = self.indices[batch_start:batch_end]\n X_batch, y_batch = self.X[batch_indices], self.y[batch_indices]\n X_batch, y_batch = self.process_batch(X_batch, y_batch)\n self.batch_start = batch_end\n return X_batch, y_batch", "def _extract_batch(self, data, batch_size):\n\n batch_size = batch_size or BATCH_SIZE\n\n batch = []\n try:\n for i in range(batch_size):\n batch.append(data.next())\n except StopIteration:\n pass\n\n return batch", "def next_training_data_batch(self, batch_size, expected_length):\n if self._training_data is None:\n self._load_training_data()\n if expected_length in self._training_data:\n actual_length = expected_length\n else:\n differences = np.abs(self._available_training_lengths - expected_length)\n mininimum_loc = np.argmin(differences)\n actual_length = self._available_training_lengths[mininimum_loc]\n all_data, all_labels = self._training_data[actual_length]\n if batch_size > len(all_data):\n print(\"Probably shouldn't do this; your batch size is greater than the size of the dataset\")\n data = None\n labels = None\n while batch_size > 0:\n if len(all_data) - self.current_index[actual_length] < batch_size:\n # print(\"A\" + str(self.current_index))\n batch_size -= (len(all_data) - self.current_index[actual_length])\n if self.current_index[actual_length] != len(all_data):\n if data is None:\n data = np.array(all_data[self.current_index[actual_length]:])\n labels = np.array(all_labels[self.current_index[actual_length]:])\n else:\n data = np.concatenate((data, all_data[self.current_index[actual_length]:]), axis=0)\n labels = np.concatenate((labels, all_labels[self.current_index[actual_length]:]), axis=0)\n self.current_index[actual_length] = 0\n else:\n # print(\"B\" + str(self.current_index))\n if data is None:\n data = all_data[self.current_index[actual_length]:self.current_index[actual_length] + batch_size]\n labels = np.array(all_labels[self.current_index[actual_length]:self.current_index[actual_length] + batch_size])\n else:\n data = np.concatenate((data, all_data[self.current_index[actual_length]:self.current_index[actual_length] + batch_size]), axis=0)\n labels = np.concatenate((labels, all_labels[self.current_index[actual_length]:self.current_index[actual_length] + batch_size]),\n axis=0)\n self.current_index[actual_length] += batch_size\n batch_size = 0\n data = np.array(data)\n data = np.swapaxes(data, 0, 1)\n return (actual_length, (data, labels))", "def _next_train_batch(self, batch_size=128, replace=False):\n mask = np.random.choice(self.train_data.shape[0], batch_size, replace=replace)\n return self.train_data[mask], self.train_label[mask]", "def next_batch(index,feature,label,batch_size):\n epochs_completed = 0\n examples = feature.shape[0]\n start = index*batch_size\n index_in_epoch =index*batch_size+batch_size-1\n if index_in_epoch > examples:\n # Finished epoch\n epochs_completed += 1\n # Shuffle the data\n perm = np.arange(examples)\n np.random.shuffle(perm)\n feature = feature[perm]\n label = label[perm]\n # Start next epoch\n start = 0\n index_in_epoch = batch_size\n assert batch_size <= examples\n end = index_in_epoch\n return feature[start:end], label[start:end]", "def get_batches(self, batch_size):\n if self.data.shape[0] % batch_size != 0:\n raise RuntimeError('num of data tuples is not a multiple of batch size')\n num_batch = self.data.shape[0] // batch_size\n for b in range(num_batch):\n yield self.data[b*batch_size:(b+1)*batch_size, :], \\\n self.target[b*batch_size:(b+1)*batch_size, :]", "def next(self):\n\t\t# keep looping until we reach our batch size\n\t\twhile True:\n\t\t\tret = self.get_batch()\n\t\t\tself.index += self.batch_size\n\t\t\tif self.index >= len(self.texts) - self.batch_size:\n\t\t\t\tself.index = 0\n\t\t\tyield ret", "def train_batch_iter(self, batch_size, num_epochs):\n return self.batch_iter(0, batch_size, num_epochs)", "def next_batch(self, batch_size, batch_wrap=True, shuffle=True):\n start = self.i_in_epoch\n if self.epochs_completed == 0 and start == 0 and shuffle:\n self.shuffle()\n\n data_batch = [0] * self.nb_data\n if start + batch_size >= self.d_size:\n # Finished epoch\n self.epochs_completed += 1\n self.i_in_epoch = 0\n for idx_dt in range(self.nb_data):\n data_batch[idx_dt] = self.data_list[idx_dt][start:self.d_size]\n if shuffle:\n self.shuffle()\n\n if batch_wrap:\n # Start next epoch\n self.i_in_epoch = batch_size - (self.d_size - start)\n end = self.i_in_epoch\n\n for idx_dt in range(self.nb_data):\n data_new_part = self.data_list[idx_dt][0:end]\n # e.g.shape of two inputs: (58, 12), (70, 12)\n data_batch[idx_dt] = np.vstack([data_batch[idx_dt], data_new_part])\n return data_batch\n else:\n self.i_in_epoch += batch_size\n end = self.i_in_epoch\n for idx_dt in range(self.nb_data):\n data_batch[idx_dt] = self.data_list[idx_dt][start:end]\n return data_batch", "def next_batch(self, batch_size, img_size=64, shuffle=True):\n if self.keys.shape[0] == 0:\n self.keys_fetch()\n\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self._epochs_completed == 0 and start == 0 and shuffle:\n perm0 = np.random.permutation(self._num_examples)\n self.keys = self.keys[perm0]\n # Go to the next epoch\n if start + batch_size > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self._num_examples - start\n keys_rest_part = self.keys[start:self._num_examples]\n # Shuffle the data\n if shuffle:\n perm = np.random.permutation(self._num_examples)\n self.keys = self.keys[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n keys_new_part = self.keys[start:end]\n images = self.get_image_by_keys(\n np.concatenate((keys_rest_part, keys_new_part), axis=0),\n img_size=img_size)\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n images = self.get_image_by_keys(self.keys[start:end], \n img_size=img_size)\n return images", "def next_train_batch(self, batch_size):\n if (not self.has_next_train()):\n self._random_permutation()\n self.train_next = 0\n if (self.train_next + batch_size <= len(self.train_list)):\n real_batch_size = batch_size\n else:\n real_batch_size = len(self.train_list) - self.train_next\n img_set = np.zeros([batch_size, self.img_height, self.img_width, 3])\n ground_truth_set = np.zeros([batch_size, self.img_height, self.img_width])\n for i in range(self.train_next, self.train_next + real_batch_size):\n train_ind = self.train_list[self.train_permutation[i]]\n img_path = join(self.dataset_dir, 'data/jpg_images', train_ind + '.jpg')\n img_set[i - self.train_next] = self.load_image(img_path)\n mat_path = join(self.dataset_dir, 'data/label_mat', train_ind + '.mat')\n ground_truth_set[i - self.train_next] = self.load_ground_truth(mat_path)\n dup_cnt = 0\n while (real_batch_size < batch_size):\n img_set[real_batch_size] = img_set[dup_cnt]\n ground_truth_set[real_batch_size] = ground_truth_set[dup_cnt]\n dup_cnt = dup_cnt + 1\n real_batch_size = real_batch_size + 1\n self.train_next = self.train_next + batch_size\n return [img_set, ground_truth_set]", "def next(self):\n if self._curr_batch + 1 > self.num_batches:\n # no more batches in current iteration through data set so start\n # new epoch ready for another pass and indicate iteration is at end\n self.new_epoch()\n raise StopIteration()\n # create an index slice corresponding to current batch number\n batch_slice = slice(self._curr_batch * self.batch_size,\n (self._curr_batch + 1) * self.batch_size)\n inputs_batch = self.inputs[batch_slice]\n targets_batch = self.targets[batch_slice]\n self._curr_batch += 1\n return inputs_batch, targets_batch", "def next(self):\n with self.lock:\n index_array = next(self.index_generator)\n # The transformation of images is not under thread lock\n # so it can be done in parallel\n return self._get_batches_of_transformed_samples(index_array)", "def get_next_batch(self):\n if self.index_in_epoch + self.batch_size > self.X.shape[0]:\n idx = np.arange(0, self.X.shape[0])\n self.index_in_epoch = 0\n np.random.shuffle(idx)\n self.X = self.X[idx]\n self.y = self.y[idx]\n # idx = idx[self.index_in_epoch:self.index_in_epoch + self.batch_size]\n # data_shuffle = [self.X[i] for i in idx]\n # labels_shuffle = [self.y[i] for i in idx]\n # data_shuffle = np.asarray(data_shuffle)\n # data_shuffle = np.reshape(data_shuffle, newshape=(self.batch_size, 1024))\n #\n # labels_shuffle = np.asarray(labels_shuffle)\n # labels_shuffle = np.reshape(labels_shuffle, newshape=(self.batch_size, 26))\n data_shuffle = self.X[self.index_in_epoch:self.index_in_epoch + self.batch_size, :]\n data_shuffle = np.reshape(data_shuffle, newshape=(self.batch_size, 32, 32, 1))\n labels_shuffle = self.y[self.index_in_epoch:self.index_in_epoch + self.batch_size, :]\n labels_shuffle = np.reshape(labels_shuffle, newshape=(self.batch_size, 26))\n self.index_in_epoch += self.batch_size\n return data_shuffle, labels_shuffle", "def next_batch(self):\n for nb in xrange(self.num_batches):\n if self.batch_end < self.full_len:\n batch_X_raw = self.full_X[self.batch_start:self.batch_end]\n batch_y_raw = self.full_y[self.batch_start:self.batch_end]\n else:\n batch_X_raw = self.full_X[self.batch_start:]\n batch_y_raw = self.full_y[self.batch_start:]\n batch_X, batch_y = pad_sort_data(batch_X_raw, batch_y_raw)\n self.batch_start = self.batch_end\n self.batch_end += self.batch_size\n yield batch_X, batch_y", "def next_batch(self, batch_size, shuffle=True):\n\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self._epochs_completed == 0 and start == 0 and shuffle:\n perm0 = np.arange(self._num_examples)\n np.random.shuffle(perm0)\n self._enc_inps = self.enc_inps[perm0]\n self._dec_inps = self.dec_inps[perm0]\n self._dec_outs = self.dec_outs[perm0]\n # Go to the next epoch\n if start + batch_size > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self._num_examples - start\n enc_inps_rest_part = self._enc_inps[start:self._num_examples]\n dec_inps_rest_part = self._dec_inps[start:self._num_examples]\n dec_outs_rest_part = self._dec_outs[start:self._num_examples]\n # Shuffle the data\n if shuffle:\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._enc_inps = self.enc_inps[perm]\n self._dec_inps = self.dec_inps[perm]\n self._dec_outs = self.dec_outs[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = batch_size - rest_num_examples\n end = self._index_in_epoch\n enc_inps_new_part = self._enc_inps[start:end]\n dec_inps_new_part = self._dec_inps[start:end]\n dec_outs_new_part = self._dec_outs[start:end]\n return np.concatenate((enc_inps_rest_part, enc_inps_new_part), axis=0), \\\n np.concatenate((dec_inps_rest_part, dec_inps_new_part), axis=0), \\\n np.concatenate((dec_outs_rest_part, dec_outs_new_part), axis=0)\n else:\n self._index_in_epoch += batch_size\n end = self._index_in_epoch\n return self._enc_inps[start:end], self._dec_inps[start:end], self._dec_outs[start:end]", "def next_explode_batch(self, batch_size):\n\n all_idx = np.arange(0, self.explode_length)\n np.random.shuffle(all_idx)\n batch_idx = all_idx[:batch_size]\n batch_imgs = [self.explode_images[i] for i in batch_idx]\n batch_lbls = [self.explode_lbls[i] for i in batch_idx]\n batch_labels = [self.explode_labels[i] for i in batch_idx]\n batch_counts = [self.explode_counts[i] for i in batch_idx]\n batch_traces = [self.explode_traces[i] for i in batch_idx]\n return batch_imgs, batch_lbls, batch_labels, batch_counts, batch_traces", "def next_sample(self, batch_size=1):\n\n X = []\n y = []\n\n for count in range(batch_size):\n #check for abrupt drift\n if count % self.abrupt_drift_rate == 0:\n dimfaks = [round(np.random.rand() * 4, 1) for _ in range(self.dims)]\n dimpots = [1 + round(np.random.rand() * 2) for _ in range(self.dims)]\n dimvars = [np.random.rand() * self.var for _ in range(self.dims)]\n dimmeans = [5 + np.random.rand() * 10 for _ in range(self.dims)]\n print(\"Random Polynomconcept: \", end=\"\")\n for i in range(self.dims):\n print(dimfaks[i],\" * x\", i+1, \"^\", dimpots[i], \" + \",end=\"\", sep=\"\")\n print()\n\n value = 0\n sample = []\n for i in range(self.dims):\n sample.append(np.random.normal(loc=dimmeans[i], scale=dimvars[i]))\n value += dimfaks[i] * (sample[i] ** dimpots[i])\n \n X.append(sample)\n y.append(value)\n\n self._x_idx += batch_size\n\n return (X, y)", "def get_train_batch_generator(self, size):\n self.shuffle_train()\n while self.train_position + size < len(self.train):\n yield self.unzip_batch(self.train[self.train_position:self.train_position + size])\n self.train_position = self.train_position + size", "def next_batch(self, shuffle=True):\n start = self._index_in_epoch\n # Shuffle for the first epoch\n if self._epochs_completed == 0 and start == 0 and shuffle:\n perm0 = np.arange(self._num_examples)\n np.random.shuffle(perm0)\n self._units = self.units.iloc[perm0]\n self._labels = self.labels[perm0]\n # Go to the next epoch\n if start + self._batch_size > self._num_examples:\n # Finished epoch\n self._epochs_completed += 1\n # Get the rest examples in this epoch\n rest_num_examples = self._num_examples - start\n units_rest_part = self._units[start:self._num_examples]\n labels_rest_part = self._labels[start:self._num_examples]\n # Shuffle the data\n if shuffle:\n perm = np.arange(self._num_examples)\n np.random.shuffle(perm)\n self._units = self.units.iloc[perm]\n self._labels = self.labels[perm]\n # Start next epoch\n start = 0\n self._index_in_epoch = self._batch_size - rest_num_examples\n end = self._index_in_epoch\n units_new_part = self._units[start:end]\n labels_new_part = self._labels[start:end]\n return np.concatenate((units_rest_part, units_new_part), axis=0), np.concatenate(\n (labels_rest_part, labels_new_part), axis=0)\n else:\n self._index_in_epoch += self._batch_size\n end = self._index_in_epoch\n return self._units[start:end], self._labels[start:end]", "def next_batch(self, batch_size, fake_data=False, shuffle=True):\r\n if fake_data:\r\n fake_image = [1] * 784\r\n if self.one_hot:\r\n fake_label = [1] + [0] * 9\r\n else:\r\n fake_label = 0\r\n return [fake_image for _ in xrange(batch_size)], [\r\n fake_label for _ in xrange(batch_size)\r\n ]\r\n start = self._index_in_epoch\r\n # Shuffle for the first epoch\r\n if self._epochs_completed == 0 and start == 0 and shuffle:\r\n perm0 = numpy.arange(self._num_examples)\r\n numpy.random.shuffle(perm0)\r\n self._images = self.images[perm0]\r\n self._labels = self.labels[perm0]\r\n # Go to the next epoch\r\n if start + batch_size > self._num_examples:\r\n # Finished epoch\r\n self._epochs_completed += 1\r\n # Get the rest examples in this epoch\r\n rest_num_examples = self._num_examples - start\r\n images_rest_part = self._images[start:self._num_examples]\r\n labels_rest_part = self._labels[start:self._num_examples]\r\n # Shuffle the data\r\n if shuffle:\r\n perm = numpy.arange(self._num_examples)\r\n numpy.random.shuffle(perm)\r\n self._images = self.images[perm]\r\n self._labels = self.labels[perm]\r\n # Start next epoch\r\n start = 0\r\n self._index_in_epoch = batch_size - rest_num_examples\r\n end = self._index_in_epoch\r\n images_new_part = self._images[start:end]\r\n labels_new_part = self._labels[start:end]\r\n return numpy.concatenate((images_rest_part, images_new_part), axis=0), numpy.concatenate((labels_rest_part, labels_new_part), axis=0)\r\n else:\r\n self._index_in_epoch += batch_size\r\n end = self._index_in_epoch\r\n return self._images[start:end], self._labels[start:end]", "def get_batch(self, data, batch_size):\n minibatch, size_so_far = [], 0\n for ex in data:\n minibatch.append(ex)\n size_so_far = self.simple_batch_size_fn(ex, len(minibatch))\n if size_so_far == batch_size:\n yield minibatch\n minibatch, size_so_far = [], 0\n elif size_so_far > batch_size:\n yield minibatch[:-1]\n minibatch, size_so_far = minibatch[-1:], self.simple_batch_size_fn(ex, 1)\n if minibatch:\n yield minibatch", "def generate_next_batch(self, data): \n \n batch_words = np.array(data[self.batch_lookup[self.batch_index]][0])\n batch_labels = np.array(data[self.batch_lookup[self.batch_index]][1])\n self.batch_index += 1\n if self.batch_index == len(data) - 1:\n self.epoch += 1\n return batch_words, batch_labels", "def next(self):\n if self._curr_batch + 1 > self.num_batches:\n # no more batches in current iteration through data set so start\n # new epoch ready for another pass and indicate iteration is at end\n self.new_epoch()\n raise StopIteration()\n # create an index slice corresponding to current batch number\n batch_slice = slice(self._curr_batch * self.batch_size,\n (self._curr_batch + 1) * self.batch_size)\n inputs_batch = self.inputs[batch_slice]\n targets_batch = self.targets[batch_slice]\n # target_ids_global = self.target_ids[batch_slice]\n target_ids_batch = self.target_ids[batch_slice]\n self._curr_batch += 1\n\n batch_inputs, batch_target_ids, batch_targets = \\\n self.transform_batch(inputs_batch, target_ids_batch, targets_batch)\n\n return batch_inputs, batch_targets, batch_target_ids", "def next_sample(self, batch_size=1):\n\n X = []\n y = []\n self.cont = 2.\n for x in range(batch_size):\n if x > batch_size/2 and self.cont == 2:\n self.cont = 5.*np.random.rand()\n if self._random_state.rand() < 1/self.abrupt_drift_rate:\n #self.offset += 1\n pass\n y.append(self.offset + (x*self.cont)/self.granularity + self._random_state.normal(scale=1))\n X.append(float(x))\n self._x_idx += batch_size\n zipped = list(zip(X, y))\n if self.shuffleData:\n shuffle(zipped)\n \n\n return ([float(zipped[i][0]) for i in range(len(X))], [float(zipped[i][1]) for i in range(len(X))])", "def get_batch(self, batch_size: int) -> jnp.ndarray:\n\n self._rng, key = jax.random.split(self._rng)\n samples = jax.random.choice(\n key,\n self.dataset.shape[0] - self.eval_batch_size,\n shape=(batch_size,),\n replace=False)\n return self.dataset[samples, ...]", "def next_batch(x, y, batch_size):\n index = np.arange(n_labeled)\n random_index = np.random.permutation(index)[:batch_size]\n return x[random_index], y[random_index]", "def next_batch(x, y, batch_size):\n\n def as_batch(data, start, count):\n part = []\n for i in range(start, start + count):\n part.append(data[i])\n return np.array(part)\n\n for i in range(0, len(x)-batch_size, batch_size):\n yield as_batch(x, i, batch_size), as_batch(y, i, batch_size)", "def next_sample(self, batch_size=1):\n\n X = []\n y = []\n for x in range(batch_size):\n if self._random_state.rand() < 1/self.abrupt_drift_rate:\n self.offset = self.offset + self._random_state.rand()\n X.append(x + self._x_idx)\n y.append(math.sin(self.offset + x/(2*math.pi*self.granularity))\n + self._random_state.normal(scale=0.05) + self.cont)\n\n self._x_idx += batch_size\n\n return (X, y)", "def get_training_batch(self, batch_size):\n if self.current_state == 0:\n random.shuffle(self.training_indices)\n\n if (self.current_state + batch_size) > (len(self.training_indices) + 1):\n self.current_state = 0\n return self.get_training_batch(batch_size)\n else:\n self.current_state += batch_size\n batch_indices = self.training_indices[self.current_state:(self.current_state + batch_size)]\n if len(batch_indices) != batch_size:\n self.current_state = 0\n return self.get_training_batch(batch_size)\n return self.data_handler.slice_data(batch_indices)", "def next_batch(X1, X2, batch_size):\n tot = X1.shape[0]\n total = math.ceil(tot / batch_size)\n for i in range(int(total)):\n start_idx = i * batch_size\n end_idx = (i + 1) * batch_size\n end_idx = min(tot, end_idx)\n batch_x1 = X1[start_idx: end_idx, ...]\n batch_x2 = X2[start_idx: end_idx, ...]\n yield (batch_x1, batch_x2, (i + 1))", "def _next(self):\n batch_start, batch_end = self.batch_start, self.batch_start + self.batch_size\n X_batch, y_batch = self.X[batch_start:batch_end], self.y[batch_start:batch_end]\n X_batch, y_batch = self.process_batch(X_batch, y_batch)\n if batch_end > self.X.shape[0]:\n self.batch_start = 0\n else:\n self.batch_start = batch_end\n return X_batch, y_batch", "def next(self):\n if self.curr_idx == len(self.idx):\n raise StopIteration\n #i = batches index, j = starting record\n i, j = self.idx[self.curr_idx] \n self.curr_idx += 1\n\n indices = self.ndindex[i][j:j + self.batch_size]\n sentences = self.ndsent[i][j:j + self.batch_size]\n characters = self.ndchar[i][j:j + self.batch_size]\n label = self.ndlabel[i][j:j + self.batch_size]\n\n return DataBatch([sentences, characters], [label], pad=0, index = indices, bucket_key=self.buckets[i],\n provide_data=[DataDesc(name=self.data_names[0], shape=sentences.shape, layout=self.layout),\n DataDesc(name=self.data_names[1], shape=characters.shape, layout=self.layout)],\n provide_label=[DataDesc(name=self.label_name, shape=label.shape, layout=self.layout)])", "def batches(self, batch_size, count):\n entries = self.entries()\n for _ in range(count):\n yield [next(entries) for _ in range(batch_size)]", "def data_generator(delta=1, batch_size=32):\n while True:\n yield generate_samples(delta=delta, n=batch_size)", "def generate_batch(self, batch_size):\n n_words = len(self.center_words)\n while self.data_index <= n_words:\n self.data_index += batch_size\n yield self.center_words[self.data_index-batch_size:self.data_index], self.context_words[self.data_index-batch_size:self.data_index], self.neg_samples[self.data_index-batch_size:self.data_index, :]", "def get_batch(self, batch_size):\n n, _ = self.contexts.shape\n if self.buffer_s == -1:\n # use all the data\n ind = np.random.choice(range(n), batch_size)\n else:\n # use only buffer (last buffer_s observations)\n ind = np.random.choice(range(max(0, n - self.buffer_s), n), batch_size)\n return self.contexts[ind, :], self.rewards[ind, :]", "def get_batch(self, batch_size):\n return random.sample(self.buffer, batch_size)", "def batch_data(cls, train_data, train_labels, batch_size):\n for batch in range(int(np.ceil(train_data.shape[0] / batch_size))):\n start = batch_size * batch\n end = start + batch_size\n if end > train_data.shape[0]:\n yield batch, (train_data[start:train_data.shape[0]], \\\n train_labels[start:train_data.shape[0]])\n else:\n yield batch, (train_data[start:end], \\\n train_labels[start:end])", "def get_next_batch(self, onehot=True):\n if self.current_batch_idx == 0:\n self.permutation()\n next_beg = self.current_batch_idx * self.batch_size\n next_end = (self.current_batch_idx + 1) * self.batch_size\n if next_end > self.n_samples:\n next_end = self.n_samples\n self.current_batch_idx = 0\n data_batch = self.data.values[next_beg:next_end][:]\n if onehot is True:\n labels_batch = self.labels_onehot.values[next_beg:next_end][:]\n else:\n labels_batch = self.labels.values[next_beg:next_end][:]\n self.current_batch_idx += 1\n return data_batch, labels_batch", "def next(self):\n batches = [self._last_batch]\n for step in range(self._num_unrollings):\n batches.append(self._next_batch())\n self._last_batch = batches[-1]\n return batches" ]
[ "0.8355541", "0.8247464", "0.8239602", "0.8215164", "0.81823456", "0.81794965", "0.8149099", "0.8149099", "0.8118594", "0.8114728", "0.810647", "0.8063478", "0.80631614", "0.8062337", "0.8052529", "0.804097", "0.80327535", "0.80327535", "0.7869733", "0.78591037", "0.7831932", "0.7831932", "0.7800168", "0.7773927", "0.7714327", "0.7695777", "0.76740056", "0.7661208", "0.76463217", "0.75893563", "0.7578935", "0.75585455", "0.747297", "0.7417186", "0.74081373", "0.7390832", "0.73771477", "0.7372974", "0.7366086", "0.7366086", "0.73575705", "0.7355597", "0.73476404", "0.7346943", "0.733451", "0.7300296", "0.7281608", "0.7279639", "0.7234739", "0.72249275", "0.72092617", "0.7196126", "0.71929145", "0.7167144", "0.71582454", "0.7152521", "0.7144748", "0.7119212", "0.71091855", "0.71048313", "0.7050077", "0.7044903", "0.7001679", "0.7000799", "0.6961455", "0.69199204", "0.687444", "0.6869441", "0.684929", "0.68438524", "0.68164295", "0.6812866", "0.67921495", "0.67753863", "0.677472", "0.6765194", "0.67338383", "0.67282486", "0.6718009", "0.67155385", "0.6707198", "0.6704132", "0.66977346", "0.66852415", "0.6682645", "0.6661231", "0.6639616", "0.6626169", "0.6600478", "0.6596224", "0.65523374", "0.6526222", "0.651464", "0.6503261", "0.65005404", "0.649963", "0.64995855", "0.6474084", "0.6470436", "0.6470196" ]
0.7211136
50
Checks that certain pipeline files are not modified from template output. Iterates through the pipeline's directory content and compares specified files against output from the template using the pipeline's metadata. File content should not be modified / missing.
def files_unchanged(self): passed = [] failed = [] ignored = [] fixed = [] could_fix = False # Check that we have the minimum required config required_pipeline_config = {"manifest.name", "manifest.description", "manifest.author"} missing_pipeline_config = required_pipeline_config.difference(self.nf_config) if missing_pipeline_config: return {"ignored": [f"Required pipeline config not found - {missing_pipeline_config}"]} try: prefix, short_name = self.nf_config["manifest.name"].strip("\"'").split("/") except ValueError: log.warning( "Expected manifest.name to be in the format '<repo>/<pipeline>'. Will assume it is <pipeline> and default to repo 'nf-core'" ) short_name = self.nf_config["manifest.name"].strip("\"'") prefix = "nf-core" # NB: Should all be files, not directories # List of lists. Passes if any of the files in the sublist are found. files_exact = [ [".gitattributes"], [".prettierrc.yml"], ["CODE_OF_CONDUCT.md"], ["LICENSE", "LICENSE.md", "LICENCE", "LICENCE.md"], # NB: British / American spelling [os.path.join(".github", ".dockstore.yml")], [os.path.join(".github", "CONTRIBUTING.md")], [os.path.join(".github", "ISSUE_TEMPLATE", "bug_report.yml")], [os.path.join(".github", "ISSUE_TEMPLATE", "config.yml")], [os.path.join(".github", "ISSUE_TEMPLATE", "feature_request.yml")], [os.path.join(".github", "PULL_REQUEST_TEMPLATE.md")], [os.path.join(".github", "workflows", "branch.yml")], [os.path.join(".github", "workflows", "linting_comment.yml")], [os.path.join(".github", "workflows", "linting.yml")], [os.path.join("assets", "email_template.html")], [os.path.join("assets", "email_template.txt")], [os.path.join("assets", "sendmail_template.txt")], [os.path.join("assets", f"nf-core-{short_name}_logo_light.png")], [os.path.join("docs", "images", f"nf-core-{short_name}_logo_light.png")], [os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")], [os.path.join("docs", "README.md")], [os.path.join("lib", "nfcore_external_java_deps.jar")], [os.path.join("lib", "NfcoreTemplate.groovy")], ] files_partial = [ [".gitignore", ".prettierignore", "pyproject.toml"], ] # Only show error messages from pipeline creation logging.getLogger("nf_core.create").setLevel(logging.ERROR) # Generate a new pipeline with nf-core create that we can compare to tmp_dir = tempfile.mkdtemp() # Create a template.yaml file for the pipeline creation template_yaml = { "name": short_name, "description": self.nf_config["manifest.description"].strip("\"'"), "author": self.nf_config["manifest.author"].strip("\"'"), "prefix": prefix, } template_yaml_path = os.path.join(tmp_dir, "template.yaml") with open(template_yaml_path, "w") as fh: yaml.dump(template_yaml, fh, default_flow_style=False) test_pipeline_dir = os.path.join(tmp_dir, f"{prefix}-{short_name}") create_obj = nf_core.create.PipelineCreate( None, None, None, no_git=True, outdir=test_pipeline_dir, template_yaml_path=template_yaml_path ) create_obj.init_pipeline() # Helper functions for file paths def _pf(file_path): """Helper function - get file path for pipeline file""" return os.path.join(self.wf_path, file_path) def _tf(file_path): """Helper function - get file path for template file""" return os.path.join(test_pipeline_dir, file_path) # Files that must be completely unchanged from template for files in files_exact: # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file elif not any([os.path.isfile(_pf(f)) for f in files]): ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file has an identical match else: for f in files: try: if filecmp.cmp(_pf(f), _tf(f), shallow=True): passed.append(f"`{f}` matches the template") else: if "files_unchanged" in self.fix: # Try to fix the problem by overwriting the pipeline file shutil.copy(_tf(f), _pf(f)) passed.append(f"`{f}` matches the template") fixed.append(f"`{f}` overwritten with template file") else: failed.append(f"`{f}` does not match the template") could_fix = True except FileNotFoundError: pass # Files that can be added to, but that must contain the template contents for files in files_partial: # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): ignored.append(f"File ignored due to lint config: {self._wrap_quotes(files)}") # Ignore if we can't find the file elif not any([os.path.isfile(_pf(f)) for f in files]): ignored.append(f"File does not exist: {self._wrap_quotes(files)}") # Check that the file contains the template file contents else: for f in files: try: with open(_pf(f), "r") as fh: pipeline_file = fh.read() with open(_tf(f), "r") as fh: template_file = fh.read() if template_file in pipeline_file: passed.append(f"`{f}` matches the template") else: if "files_unchanged" in self.fix: # Try to fix the problem by overwriting the pipeline file with open(_tf(f), "r") as fh: template_file = fh.read() with open(_pf(f), "w") as fh: fh.write(template_file) passed.append(f"`{f}` matches the template") fixed.append(f"`{f}` overwritten with template file") else: failed.append(f"`{f}` does not match the template") could_fix = True except FileNotFoundError: pass # cleaning up temporary dir shutil.rmtree(tmp_dir) return {"passed": passed, "failed": failed, "ignored": ignored, "fixed": fixed, "could_fix": could_fix}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def files_exist(self):\n\n passed = []\n warned = []\n failed = []\n ignored = []\n\n # NB: Should all be files, not directories\n # List of lists. Passes if any of the files in the sublist are found.\n #: test autodoc\n try:\n _, short_name = self.nf_config[\"manifest.name\"].strip(\"\\\"'\").split(\"/\")\n except ValueError:\n log.warning(\"Expected manifest.name to be in the format '<repo>/<pipeline>'. Will assume it is '<pipeline>'.\")\n short_name = self.nf_config[\"manifest.name\"].strip(\"\\\"'\").split(\"/\")\n\n files_fail = [\n [\".gitattributes\"],\n [\".gitignore\"],\n [\".nf-core.yml\"],\n [\".editorconfig\"],\n [\".prettierignore\"],\n [\".prettierrc.yml\"],\n [\"CHANGELOG.md\"],\n [\"CITATIONS.md\"],\n [\"CODE_OF_CONDUCT.md\"],\n [\"CODE_OF_CONDUCT.md\"],\n [\"LICENSE\", \"LICENSE.md\", \"LICENCE\", \"LICENCE.md\"], # NB: British / American spelling\n [\"nextflow_schema.json\"],\n [\"nextflow.config\"],\n [\"README.md\"],\n [os.path.join(\".github\", \".dockstore.yml\")],\n [os.path.join(\".github\", \"CONTRIBUTING.md\")],\n [os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"bug_report.yml\")],\n [os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"config.yml\")],\n [os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"feature_request.yml\")],\n [os.path.join(\".github\", \"PULL_REQUEST_TEMPLATE.md\")],\n [os.path.join(\".github\", \"workflows\", \"branch.yml\")],\n [os.path.join(\".github\", \"workflows\", \"ci.yml\")],\n [os.path.join(\".github\", \"workflows\", \"linting_comment.yml\")],\n [os.path.join(\".github\", \"workflows\", \"linting.yml\")],\n [os.path.join(\"assets\", \"email_template.html\")],\n [os.path.join(\"assets\", \"email_template.txt\")],\n [os.path.join(\"assets\", \"sendmail_template.txt\")],\n [os.path.join(\"assets\", f\"nf-core-{short_name}_logo_light.png\")],\n [os.path.join(\"conf\", \"modules.config\")],\n [os.path.join(\"conf\", \"test.config\")],\n [os.path.join(\"conf\", \"test_full.config\")],\n [os.path.join(\"docs\", \"images\", f\"nf-core-{short_name}_logo_light.png\")],\n [os.path.join(\"docs\", \"images\", f\"nf-core-{short_name}_logo_dark.png\")],\n [os.path.join(\"docs\", \"output.md\")],\n [os.path.join(\"docs\", \"README.md\")],\n [os.path.join(\"docs\", \"README.md\")],\n [os.path.join(\"docs\", \"usage.md\")],\n [os.path.join(\"lib\", \"nfcore_external_java_deps.jar\")],\n [os.path.join(\"lib\", \"NfcoreTemplate.groovy\")],\n [os.path.join(\"lib\", \"Utils.groovy\")],\n [os.path.join(\"lib\", \"WorkflowMain.groovy\")],\n ]\n\n files_warn = [\n [\"main.nf\"],\n [os.path.join(\"assets\", \"multiqc_config.yml\")],\n [os.path.join(\"conf\", \"base.config\")],\n [os.path.join(\"conf\", \"igenomes.config\")],\n [os.path.join(\".github\", \"workflows\", \"awstest.yml\")],\n [os.path.join(\".github\", \"workflows\", \"awsfulltest.yml\")],\n [os.path.join(\"lib\", f\"Workflow{short_name[0].upper()}{short_name[1:]}.groovy\")],\n [\"modules.json\"],\n [\"pyproject.toml\"],\n ]\n\n # List of strings. Fails / warns if any of the strings exist.\n files_fail_ifexists = [\n \"Singularity\",\n \"parameters.settings.json\",\n \".nf-core.yaml\", # yml not yaml\n os.path.join(\"bin\", \"markdown_to_html.r\"),\n os.path.join(\"conf\", \"aws.config\"),\n os.path.join(\".github\", \"workflows\", \"push_dockerhub.yml\"),\n os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"bug_report.md\"),\n os.path.join(\".github\", \"ISSUE_TEMPLATE\", \"feature_request.md\"),\n os.path.join(\"docs\", \"images\", f\"nf-core-{short_name}_logo.png\"),\n \".markdownlint.yml\",\n \".yamllint.yml\",\n os.path.join(\"lib\", \"Checks.groovy\"),\n os.path.join(\"lib\", \"Completion.groovy\"),\n os.path.join(\"lib\", \"Workflow.groovy\"),\n ]\n files_warn_ifexists = [\".travis.yml\"]\n\n # Remove files that should be ignored according to the linting config\n ignore_files = self.lint_config.get(\"files_exist\", [])\n\n def pf(file_path):\n return os.path.join(self.wf_path, file_path)\n\n # First - critical files. Check that this is actually a Nextflow pipeline\n if not os.path.isfile(pf(\"nextflow.config\")) and not os.path.isfile(pf(\"main.nf\")):\n failed.append(\"File not found: nextflow.config or main.nf\")\n raise AssertionError(\"Neither nextflow.config or main.nf found! Is this a Nextflow pipeline?\")\n\n # Files that cause an error if they don't exist\n for files in files_fail:\n if any([f in ignore_files for f in files]):\n continue\n if any([os.path.isfile(pf(f)) for f in files]):\n passed.append(f\"File found: {self._wrap_quotes(files)}\")\n else:\n failed.append(f\"File not found: {self._wrap_quotes(files)}\")\n\n # Files that cause a warning if they don't exist\n for files in files_warn:\n if any([f in ignore_files for f in files]):\n continue\n if any([os.path.isfile(pf(f)) for f in files]):\n passed.append(f\"File found: {self._wrap_quotes(files)}\")\n else:\n warned.append(f\"File not found: {self._wrap_quotes(files)}\")\n\n # Files that cause an error if they exist\n for file in files_fail_ifexists:\n if file in ignore_files:\n continue\n if os.path.isfile(pf(file)):\n failed.append(f\"File must be removed: {self._wrap_quotes(file)}\")\n else:\n passed.append(f\"File not found check: {self._wrap_quotes(file)}\")\n\n # Files that cause a warning if they exist\n for file in files_warn_ifexists:\n if file in ignore_files:\n continue\n if os.path.isfile(pf(file)):\n warned.append(f\"File should be removed: {self._wrap_quotes(file)}\")\n else:\n passed.append(f\"File not found check: {self._wrap_quotes(file)}\")\n\n # Files that are ignoed\n for file in ignore_files:\n ignored.append(f\"File is ignored: {self._wrap_quotes(file)}\")\n\n return {\"passed\": passed, \"warned\": warned, \"failed\": failed, \"ignored\": ignored}", "def checkAllFilesGenerated(self):\n root = get_exhale_root(self)\n containmentFolder = self.getAbsContainmentFolder()\n for node in root.all_nodes:\n if node.kind in [\"enumvalue\", \"group\"]:\n continue\n gen_file_path = os.path.join(containmentFolder, node.file_name)\n self.assertTrue(\n os.path.isfile(gen_file_path),\n \"File for {kind} node with refid=[{refid}] not generated to [{gen_file_path}]!\".format(\n kind=node.kind, refid=node.refid, gen_file_path=gen_file_path\n )\n )", "def validate_files(dir, files_to_merge):\r\n for path in files_to_merge:\r\n pathname = dir.joinpath(path)\r\n if not pathname.exists():\r\n raise Exception(\"I18N: Cannot generate because file not found: {0}\".format(pathname))", "def check_generated_files(out_dir, output_list_file):\n xcpd_dir = os.path.join(out_dir, \"xcp_d\")\n found_files = sorted(glob(os.path.join(xcpd_dir, \"**/*\"), recursive=True))\n found_files = [os.path.relpath(f, out_dir) for f in found_files]\n\n # Ignore figures\n found_files = [f for f in found_files if \"figures\" not in f]\n\n with open(output_list_file, \"r\") as fo:\n expected_files = fo.readlines()\n expected_files = [f.rstrip() for f in expected_files]\n\n if sorted(found_files) != sorted(expected_files):\n expected_not_found = sorted(list(set(expected_files) - set(found_files)))\n found_not_expected = sorted(list(set(found_files) - set(expected_files)))\n\n msg = \"\"\n if expected_not_found:\n msg += \"\\nExpected but not found:\\n\\t\"\n msg += \"\\n\\t\".join(expected_not_found)\n\n if found_not_expected:\n msg += \"\\nFound but not expected:\\n\\t\"\n msg += \"\\n\\t\".join(found_not_expected)\n raise ValueError(msg)", "def test_yaml_file_watch(self):\n # Set initial data\n _setup_template_value('yaml_file_test_values.tmp.yml', 'yaml_file_test_values_1.yml')\n\n with TemplateRenderThread('yaml_file_test.t', 'yaml_file_test.tmp.out') as renderer:\n self.assertStringEqualToTemplateFileWithIterations(renderer.output_data_getter,\n 'yaml_file_test_values_expected_1.out')\n\n # Set updated data\n print('Updating file..')\n _setup_template_value('yaml_file_test_values.tmp.yml', 'yaml_file_test_values_2.yml')\n self.assertStringEqualToTemplateFileWithIterations(renderer.output_data_getter,\n 'yaml_file_test_values_expected_2.out')", "def test_duo_yaml_files_watch(self):\n # Set initial data\n _setup_template_value('yaml_file_test_values_first.tmp.yml', 'yaml_file_test_values_1.yml')\n _setup_template_value('yaml_file_test_values_second.tmp.yml', 'yaml_file_test_values_2.yml')\n\n with TemplateRenderThread('yaml_file_test_duo.t', 'yaml_file_test_duo.tmp.out') as renderer:\n self.assertStringEqualToTemplateFileWithIterations(renderer.output_data_getter,\n 'yaml_file_test_duo_expected_1.out')\n\n # Set updated data\n print('Updating first file..')\n _setup_template_value('yaml_file_test_values_first.tmp.yml', 'yaml_file_test_values_2.yml')\n self.assertStringEqualToTemplateFileWithIterations(renderer.output_data_getter,\n 'yaml_file_test_duo_expected_2.out')\n\n # Set updated data\n print('Updating second file..')\n _setup_template_value('yaml_file_test_values_second.tmp.yml', 'yaml_file_test_values_1.yml')\n self.assertStringEqualToTemplateFileWithIterations(renderer.output_data_getter,\n 'yaml_file_test_duo_expected_3.out')", "def test_files(self):\r\n\r\n for path in self.get_files():\r\n self.assertTrue(datetime.fromtimestamp(os.path.getmtime(path)) > self.start_time,\r\n msg='File not recently modified: %s' % os.path.basename(path))", "def check_out_files_exist(self):\n for filetype in self.filetypes:\n filename = self.out_filename(filetype)\n if not filename.is_file():\n log.error('MISSING: {}'.format(filename))\n return False\n\n return True", "def check_out_files_exist(self):\n for filetype in self.filetypes:\n filename = self.out_filename(filetype)\n if not filename.is_file():\n log.error('MISSING: {}'.format(filename))\n return False\n\n return True", "def check_out_files_exist(self):\n for filetype in self.filetypes:\n filename = self.out_filename(filetype)\n if not filename.is_file():\n log.error('MISSING: {}'.format(filename))\n return False\n\n return True", "def process_all_files():\n src_files = get_doc_files()\n\n for src_pathname in src_files:\n if src_pathname.suffix in MARKDOWN_EXTENSIONS:\n process_file_markdown(src_pathname)\n elif src_pathname.suffix in STATIC_ASSET_EXTENSIONS:\n process_file_copytodest(src_pathname)", "def output_files_exist(self):\n return all([split.exists() for split in self.split_files])", "def lint_every_rendered_component_has_a_fixture(files_to_lint):\n files_to_lint = lintutil.filter(files_to_lint, suffix='.html')\n\n for f in files_to_lint:\n contents_of_f = lintutil.file_contents(f)\n for m in RENDER_REACT_RE.finditer(contents_of_f):\n component_file = m.group(1)\n # To be server-side renderable, the fixture file has to be\n # a javascript file, not jsx or something else.\n fixture_file = component_file + '.fixture.js'\n if not os.path.exists(ka_root.join(fixture_file)):\n linenum = contents_of_f.count('\\n', 0, m.start()) + 1\n yield (f, linenum,\n '%s must have an associated fixture file %s'\n % (component_file, fixture_file))", "def file_checker():\n\n PATH_RELEASE1_IDEN = os.getcwd()+'/archive_all_2014-10/'\n PATH_RELEASE1_UNIDE = None\n #PATH_RELEASE1_UNIDE = os.getcwd()+'/archive_all_2014-10/'\n\n PATH_RELEASE2_IDEN = os.getcwd()+'/archive_all_2016-10/archive_identified_2016-10/'\n PATH_RELEASE2_UNIDE = os.getcwd() + '/archive_all_2016-10/archive_unidentified_2016-10/'\n\n\n #From here don't change anything.\n #This global function finds the .mgf files in paths\n list_of_files_release1_ide = glob.glob(PATH_RELEASE1_IDEN+'*.mgf')\n list_of_files_release1_unide = None #REMOVE THIS PART AND UNCOMMENT NEXT LINE IN NEXT RELEASES.\n\n #list_of_files_release1_unid = glob.glob(PATH_RELEASE1_UNID'+*.mgf')\n\n list_of_files_release2_ide = glob.glob(PATH_RELEASE2_IDEN+'*.mgf')\n list_of_files_release2_unide = glob.glob(PATH_RELEASE2_UNIDE+'*.mgf')\n\n\n #Check if exist cache folder. If not will make it. \n #RELEASE 1 \n if not os.path.exists(PATH_RELEASE1_IDEN+'cache'):\n os.makedirs(PATH_RELEASE1_IDEN+'cache')\n\n # if not os.path.exists(PATH_RELEASE1_UNIDE'+cache'):\n # os.makedirs(PATH_RELEASE1_UNIDE'+cache')\n\n #RELEASE2\n if not os.path.exists(PATH_RELEASE2_IDEN+'cache'):\n os.makedirs(PATH_RELEASE2_IDEN+'cache')\n\n if not os.path.exists(PATH_RELEASE2_UNIDE+'cache'):\n os.makedirs(PATH_RELEASE2_UNIDE+'cache')\n \n\n return PATH_RELEASE1_IDEN, \\\n PATH_RELEASE2_IDEN, \\\n PATH_RELEASE2_UNIDE, \\\n list_of_files_release1_ide, \\\n list_of_files_release2_ide, \\\n list_of_files_release2_unide", "def test_matched_pairs():\n template_filelist = listdir(RTEMPLATE_PATH)\n\n R_files = []\n json_files = []\n orphan_files = []\n for file in template_filelist:\n if '.r' in file:\n file = file.replace('.r', '')\n R_files.append(file)\n elif '.json' in file:\n file = file.replace('.json', '')\n json_files.append(file)\n else:\n orphan_files.append(file)\n\n ## make sure there are no non R/json files\n assert not bool(orphan_files) #file in path isn't .json or .R\n\n ## make sure every R file has a json pair\n assert not bool(\n set(R_files) - set(json_files)\n )", "def _assert_correct_files_are_present(outputdir: Path) -> None:\n for plane in PLANES:\n assert (outputdir / f\"{AMP_BETA_NAME}{plane.lower()}.tfs\").is_file()\n assert (outputdir / f\"{BETA_NAME}{plane.lower()}.tfs\").is_file()\n assert (outputdir / f\"{PHASE_NAME}{plane.lower()}.tfs\").is_file()\n assert (outputdir / f\"{TOTAL_PHASE_NAME}{plane.lower()}.tfs\").is_file()\n assert (outputdir / f\"{ORBIT_NAME}{plane.lower()}.tfs\").is_file()\n assert (outputdir / f\"{DISPERSION_NAME}x.tfs\").is_file()\n assert (outputdir / f\"{NORM_DISP_NAME}x.tfs\").is_file() # no norm disp in Y plane\n\n for rdt in [\"1001\", \"1010\"]:\n assert (outputdir / f\"f{rdt}.tfs\").is_file()", "def test_filecompare(self):\n cmp = filecmp.dircmp(self.root_gold, self.root_target, ignore=[])\n self.recursive_dircmp(cmp)", "def CheckFilesMatch(config):\n\n diff_errors = []\n\n file_pairs = _GetFilePairs(config)\n missing_files, stale_files = _GetMissingAndStaleFiles(file_pairs)\n\n for pair in missing_files:\n diff_errors.append(\"File %s does not exist\" % pair.target)\n continue\n\n for pair in stale_files:\n diff_errors.append(\"File %s is out of date\" % pair.target)\n\n if diff_errors:\n error_msg = \"Files out of date!\\n\\n\"\n error_msg += \"To fix run THIS command:\\n\"\n error_msg += \" bazel-bin/%s/%s --fix\\n\\n\" % (config.package_name,\n config.target_name)\n error_msg += \"Errors:\\n\"\n error_msg += \" \" + \"\\n \".join(diff_errors)\n return error_msg\n else:\n return None", "def __render_templates(files_to_render, dest_location, jinja_env):\n errors = []\n\n from jinja2.exceptions import TemplateNotFound\n\n for template_file in files_to_render:\n filename = os.path.abspath(os.path.join(dest_location, template_file))\n\n print(\"Pillar template_file: {} --> {}\".format(template_file, filename))\n\n if not os.path.isdir(os.path.dirname(filename)):\n os.makedirs(os.path.dirname(filename))\n\n try:\n print(\"Attempting to load template_file: {}\".format(template_file))\n template_rendered = jinja_env.get_template(template_file).render(env=env)\n print(green(\"Pillar template_file rendered: {} --> {}\".format(template_file, filename)))\n\n # Only write the template file if we can actually render it\n with open(os.path.join(dest_location, template_file), 'w') as f:\n f.write(template_rendered)\n\n except TemplateNotFound:\n errors.append(template_file)\n print(red(\"Pillar template_file not found: {} --> {}\".format(template_file, filename)))\n\n if not len(errors):\n print(green(\"Pillar was successfully rendered in: {}\".format(dest_location)))\n else:\n print(red(\"Pillar could not compile the following templates:\"))\n for error in errors:\n print(red(\" - {}\").format(error))\n\n return len(errors) == 0", "def build(self) -> None:\n def do_process(fname) -> bool:\n for sfx in skip_suffixes:\n if fname.endswith(sfx):\n return False\n return True\n\n for dirpath, _, fnames in os.walk(self.template_dir):\n for fname in fnames:\n if do_process(fname):\n self.process(dirpath, fname)", "def check_comps(root, comps):\n for key, comp in comps.items():\n\n filename = os.path.join(root, comp['filename'])\n if not os.path.isfile(filename):\n warnings.warn(\n 'The file {0} could not be found'.format(filename))", "def _check_file_not_used(self):\n module_files = set(self._get_module_files())\n referenced_files = set(self._get_manifest_referenced_files()).union(\n set(self._get_xml_referenced_files())\n )\n excluded_dirs = ['static', 'test', 'tests', 'migrations']\n no_referenced_files = [\n f for f in (module_files - referenced_files)\n if f.split(os.path.sep)[0] not in excluded_dirs\n ]\n self.msg_args = no_referenced_files\n return not no_referenced_files", "def comp_files(cfg, atom_id_dict, type_dicts):\n first_content, first_section_order = proc_data_file(cfg, cfg[DATA_FILE], atom_id_dict, type_dicts,)\n second_content, second_section_order = proc_data_file(cfg, cfg[DATA_COMP], atom_id_dict, type_dicts,)\n\n for section in second_section_order:\n if section not in first_section_order:\n warning(\"Skipping section '{}'; section found in the file: {}\\n\"\n \" but not in file: {}\".format(section, cfg[DATA_COMP], cfg[DATA_FILE]))\n\n diffs = [\"Differences in head section:\"]\n compare_heads(first_content[SEC_HEAD], second_content[SEC_HEAD], diffs)\n\n for section in first_section_order:\n if section not in second_section_order:\n warning(\"Skipping section '{}'; section found in the file: {}\\n\"\n \" but not in file: {}\".format(section, cfg[DATA_FILE], cfg[DATA_COMP]))\n elif section in [SEC_VELOS]:\n diffs.append(\"\\nSkipping section '{}'\".format(section))\n elif section in COMP_ORD_SEC_COL_DICT:\n diffs.append(\"\\nDifferences in section '{}':\".format(section))\n num_col_to_compare = COMP_ORD_SEC_COL_DICT[section]\n compare_lists(first_content[section], second_content[section], 0, num_col_to_compare, diffs,\n SEC_FORMAT_DICT[section][0], SEC_FORMAT_DICT[section][1])\n elif section in NUM_SEC_DICT:\n diffs.append(\"\\nDifferences in section '{}':\".format(section))\n num_col_to_compare = NUM_SEC_DICT[section][1]\n compare_lists(first_content[section], second_content[section], 1, num_col_to_compare, diffs,\n SEC_FORMAT_DICT[section][0], SEC_FORMAT_DICT[section][1])\n else:\n print(\"Encountered unexpected section '{}'\".format(section))\n\n f_name = create_out_fname(cfg[DATA_COMP], prefix='diffs_', ext='.txt')\n list_to_file(diffs, f_name)\n print('Completed writing {}'.format(f_name))", "def test_input_files(self):\n files = list_files_folder(data_dir + \"build-custom/files/\", ext=\"fna.gz\")\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_files\"\n params[\"input\"] = files\n params[\"input_extension\"] = \"\"\n cfg = Config(\"build-custom\", **params)\n self.assertTrue(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom run failed\")\n res = build_sanity_check_and_parse(vars(cfg))\n self.assertIsNotNone(res, \"ganon build-custom sanity check failed\")\n\n self.assertTrue(res[\"target\"][\"file\"].isin(files).all(), \"Files missing from target\")\n self.assertEqual(len(files), res[\"target\"].shape[0], \"Wrong number of files on target\")\n self.assertTrue(res[\"info\"][\"file\"].isin(files).all(), \"Files missing from info\")\n self.assertEqual(len(files), res[\"info\"].shape[0], \"Wrong number of files on info\")\n\n # All files are invalid\n files = [f+\".xxx\" for f in files]\n params = self.default_params.copy()\n params[\"db_prefix\"] = self.results_dir + \"test_input_files_invalid\"\n params[\"input\"] = files\n params[\"input_extension\"] = \"\"\n cfg = Config(\"build-custom\", **params)\n self.assertFalse(run_ganon(cfg, params[\"db_prefix\"]), \"ganon build-custom ran but it should fail\")", "def test_verify_corrupt_archive_compare_data(self):\n self.backup(u\"full\", u\"testfiles/various_file_types\", options=[])\n output_files = os.listdir(\"testfiles/output\")\n archives = [elem for elem in output_files if \"vol\" in elem]\n for archive in archives:\n # Edit source file\n with open(\"testfiles/output/\" + archive, 'r+') as f:\n f.write('This writes text into each archive file to corrupt it.')\n # Test verify for the file\n try:\n self.verify(u'testfiles/various_file_types/executable', file_to_verify=u'executable',\n options=[u\"--compare-data\"])\n except CmdError as e:\n # Should return a 21 error code for \"hash mismatch\"\n self.assertEqual(e.exit_status, 21, str(e))\n else:\n self.fail('Expected Hash Mismatch Error not thrown')", "def precheck(self):\n if (not dfs.exists(self.outputpath)):\n logger.debug(\"precheck(%s): outputpath %s does not exist, ready to run.\" \n % (self, self.outputpath))\n return 'ready'\n inTSs = [dfs.modtime(file) for file in self.inputpaths]\n outTS = dfs.modtime(self.outputpath)\n newer = reduce(lambda x,y: x or y, [(inTS>outTS) for inTS in inTSs])\n logger.debug(\"Input timestamps: %s\" % inTSs)\n logger.debug(\"Output timestamp: %s\" % outTS)\n if newer:\n logger.debug(\"At least one input file is newer than outputfile, ready to run.\")\n dfs.delete(self.outputpath)\n return 'ready'\n else:\n logger.debug(\"All input files are newer than outputfile, skipping.\")\n return 'skip'", "def check_all_files_and_dirs(self):\n err = 0\n err_m = ''\n warning = 0\n warning_m = ''\n # Check the pdb file for refinement\n if self.refine_pdb_in == None:\n err = 1\n err_m += '\\nPdb file should be supplied'\n else:\n if self.check_single_file(self.refine_pdb_in):\n self.refine_pdb_in = os.path.abspath(self.refine_pdb_in)\n else:\n err = 1\n err_m += '\\nFile not found: %s' %(self.refine_pdb_in)\n\n # Check the pdb file for distance analysis\n if self.check_single_file(self.X8_pdb_in):\n self.X8_pdb_in = os.path.abspath(self.X8_pdb_in)\n else:\n self.X8_pdb_in != None\n warning = 1\n warning_m += '\\nXtrapol8 pdb_in not found. No additional analysis will be applied'\n\n # Check additional files and append them to a string\n additional = \"\"\n for fle in self.additional:\n if len(fle)>0:\n if self.check_single_file(fle):\n new_add = os.path.abspath(fle)\n additional = additional + \"%s \" % (new_add)\n else:\n err = 1\n err_m += '\\nFile not found: %s' %(fle)\n self.additional = additional\n\n #Check the output directory\n if os.path.isdir(self.outdir):\n self.outdir = os.path.abspath(self.outdir)\n else:\n err = 1\n err_m += \"\\nXtrapol8 output directory cannot be found.\" \\\n \"Please run this from the same directory from which you ran Xtrapol8.\"\n\n #Check the phil file for reciprocal space refinement\n if self.check_single_file(self.reciprocal_space_phil):\n self.reciprocal_space_phil = os.path.abspath(self.reciprocal_space_phil)\n else:\n self.reciprocal_space_phil = ''\n warning = 1\n warning_m += '\\nPhil for reciprocal space refinement not found. Refinement will use default parameters.'\n\n\n #Check the phil file for real space refinement\n if self.check_single_file(self.real_space_phil):\n self.real_space_phil = os.path.abspath(self.real_space_phil)\n else:\n self.real_space_phil = ''\n warning = 1\n warning_m += '\\nPhil for real space refinement not found. Refinement will use default parameters.'\n\n #Check the residue list for distance analysis\n if self.check_single_file(self.residue_list):\n self.residue_list = os.path.abspath(self.residue_list)\n else:\n self.residue_list = None\n warning = 1\n warning_m += '\\nResidue list not found. Distance analysis (if required) will be performed without residue list.'\n\n return err, err_m, warning, warning_m", "def test_filter_file_exceptions_early_dupes():\n exceptions = Exceptions(os.path.join(os.path.dirname(__file__),\n 'early_exceptions.yaml'))\n\n package = Package('test', os.path.dirname(__file__))\n files = [os.path.join(os.path.dirname(__file__),\n 'unlikelystring'),\n os.path.join(os.path.dirname(__file__),\n 'unlikelystring')]\n\n filtered_files = exceptions.filter_file_exceptions_early(package, files)\n\n assert not filtered_files", "def should_run(self):\n # from IPython.html.tasks.py\n\n css_targets = [pjoin(static, 'css', 'style.min.css')]\n css_maps = [t + '.map' for t in css_targets]\n targets = css_targets + css_maps\n if not all(os.path.exists(t) for t in targets):\n # some generated files don't exist\n return True\n earliest_target = sorted(mtime(t) for t in targets)[0]\n\n # check if any .less files are newer than the generated targets\n for dirpath, dirnames, filenames in os.walk(static):\n for f in filenames:\n if f.endswith('.less'):\n path = pjoin(static, dirpath, f)\n timestamp = mtime(path)\n if timestamp > earliest_target:\n return True\n\n return False", "def affected_testfiles(files_changed: Iterable[Text],\n skip_dirs: Optional[Set[Text]] = None,\n manifest_path: Optional[Text] = None,\n manifest_update: bool = True\n ) -> Tuple[Set[Text], Set[Text]]:\n if skip_dirs is None:\n skip_dirs = {\"conformance-checkers\", \"docs\", \"tools\"}\n affected_testfiles = set()\n # Exclude files that are in the repo root, because\n # they are not part of any test.\n files_changed = [f for f in files_changed if not _in_repo_root(f)]\n nontests_changed = set(files_changed)\n wpt_manifest = load_manifest(manifest_path, manifest_update)\n\n test_types = [\"crashtest\", \"print-reftest\", \"reftest\", \"testharness\", \"wdspec\"]\n support_files = {os.path.join(wpt_root, path)\n for _, path, _ in wpt_manifest.itertypes(\"support\")}\n wdspec_test_files = {os.path.join(wpt_root, path)\n for _, path, _ in wpt_manifest.itertypes(\"wdspec\")}\n test_files = {os.path.join(wpt_root, path)\n for _, path, _ in wpt_manifest.itertypes(*test_types)}\n\n interface_dir = os.path.join(wpt_root, 'interfaces')\n interfaces_files = {os.path.join(wpt_root, 'interfaces', filename)\n for filename in os.listdir(interface_dir)}\n\n interfaces_changed = interfaces_files.intersection(nontests_changed)\n nontests_changed = nontests_changed.intersection(support_files)\n\n tests_changed = {item for item in files_changed if item in test_files}\n\n nontest_changed_paths = set()\n rewrites: Dict[Text, Text] = {\"/resources/webidl2/lib/webidl2.js\": \"/resources/WebIDLParser.js\"}\n for full_path in nontests_changed:\n rel_path = os.path.relpath(full_path, wpt_root)\n path_components = rel_path.split(os.sep)\n top_level_subdir = path_components[0]\n if top_level_subdir in skip_dirs:\n continue\n repo_path = \"/\" + os.path.relpath(full_path, wpt_root).replace(os.path.sep, \"/\")\n if repo_path in rewrites:\n repo_path = rewrites[repo_path]\n full_path = os.path.join(wpt_root, repo_path[1:].replace(\"/\", os.path.sep))\n nontest_changed_paths.add((full_path, repo_path))\n\n interfaces_changed_names = [os.path.splitext(os.path.basename(interface))[0]\n for interface in interfaces_changed]\n\n def affected_by_wdspec(test: Text) -> bool:\n affected = False\n if test in wdspec_test_files:\n for support_full_path, _ in nontest_changed_paths:\n # parent of support file or of \"support\" directory\n parent = os.path.dirname(support_full_path)\n if os.path.basename(parent) == \"support\":\n parent = os.path.dirname(parent)\n relpath = os.path.relpath(test, parent)\n if not relpath.startswith(os.pardir):\n # testfile is in subtree of support file\n affected = True\n break\n return affected\n\n def affected_by_interfaces(file_contents: Text) -> bool:\n if len(interfaces_changed_names) > 0:\n if 'idlharness.js' in file_contents:\n for interface in interfaces_changed_names:\n regex = '[\\'\"]' + interface + '(\\\\.idl)?[\\'\"]'\n if re.search(regex, file_contents):\n return True\n return False\n\n for root, dirs, fnames in os.walk(wpt_root):\n # Walk top_level_subdir looking for test files containing either the\n # relative filepath or absolute filepath to the changed files.\n if root == wpt_root:\n for dir_name in skip_dirs:\n dirs.remove(dir_name)\n for fname in fnames:\n test_full_path = os.path.join(root, fname)\n # Skip any file that's not a test file.\n if test_full_path not in test_files:\n continue\n if affected_by_wdspec(test_full_path):\n affected_testfiles.add(test_full_path)\n continue\n\n with open(test_full_path, \"rb\") as fh:\n raw_file_contents: bytes = fh.read()\n if raw_file_contents.startswith(b\"\\xfe\\xff\"):\n file_contents: Text = raw_file_contents.decode(\"utf-16be\", \"replace\")\n elif raw_file_contents.startswith(b\"\\xff\\xfe\"):\n file_contents = raw_file_contents.decode(\"utf-16le\", \"replace\")\n else:\n file_contents = raw_file_contents.decode(\"utf8\", \"replace\")\n for full_path, repo_path in nontest_changed_paths:\n rel_path = os.path.relpath(full_path, root).replace(os.path.sep, \"/\")\n if rel_path in file_contents or repo_path in file_contents or affected_by_interfaces(file_contents):\n affected_testfiles.add(test_full_path)\n continue\n\n return tests_changed, affected_testfiles", "def test_script_exists(self):\n get_files=os.listdir(\"../../taxonomy/src_files\")\n self.assertIn(\"validate_match_batch.py\", get_files)", "def test_with_files(self, files):\n files_to_rename = list(set(self.files) - set(files))\n files_to_skip = []\n\n # Generate a unique suffix to append to files we want to ignore.\n index = 0\n file_rename_suffix = '___%d' % index\n while any([f.endswith(file_rename_suffix) for f in files_to_rename]):\n index += 1\n file_rename_suffix = '___%d' % index\n\n # Rename all files in the test case's file list but not the specified one.\n for file_to_rename in files_to_rename:\n absolute_file_to_rename = os.path.join(self.input_directory,\n file_to_rename)\n try:\n os.rename(absolute_file_to_rename,\n '%s%s' % (absolute_file_to_rename, file_rename_suffix))\n except OSError:\n # This can happen if we have already renamed a directory with files\n # under it. In this case, make sure we don't try to change the name\n # back later.\n files_to_skip.append(file_to_rename)\n\n # Clean up any issues with modifications of resources in subdirectories.\n for file_to_skip in files_to_skip:\n files_to_rename.remove(file_to_skip)\n files_to_rename.reverse()\n\n result = self.run()\n\n # Restore previously renamed files to their original locations.\n for file_to_rename in files_to_rename:\n absolute_file_to_rename = os.path.join(self.input_directory,\n file_to_rename)\n os.rename('%s%s' % (absolute_file_to_rename, file_rename_suffix),\n absolute_file_to_rename)\n\n return self._handle_test_result(result)", "def _CheckNoOverlappingFileNamesInResourceDirsRule(input_api, output_api):\n res_dir_file_names_map = {}\n for f in input_api.AffectedFiles():\n local_path = input_api.os_path.relpath(\n f.AbsoluteLocalPath(),\n input_api.PresubmitLocalPath()).replace('\\\\', '/')\n for res_dir_local_path in RES_DIR_LOCAL_PATHS:\n if local_path.startswith(res_dir_local_path):\n file_name = input_api.os_path.basename(local_path)\n res_dir_file_names_map.setdefault(res_dir_local_path, set()).add(\n file_name)\n break\n\n if len(res_dir_file_names_map) == 0:\n return []\n\n overlapping_file_names = set()\n for res_dir, file_names in res_dir_file_names_map.items():\n for other_res_dir, other_file_names in res_dir_file_names_map.items():\n if res_dir == other_res_dir:\n continue\n\n # Check for affected files with identical name in |other_res_dir|.\n overlapping_file_names |= (file_names & other_file_names)\n\n # Check for existing files with identical name in |other_res_dir|.\n overlapping_file_names.update(\n _FindFileNamesInDirectory(input_api, other_res_dir, file_names))\n\n if len(overlapping_file_names) > 0:\n error_msg = ('Resources in different top level res/ directories {} should '\n 'have different names:').format(RES_DIR_LOCAL_PATHS)\n return [output_api.PresubmitError(error_msg,\n items=list(overlapping_file_names))]\n return []", "def test_provider_system_hook_file_shred(change_dir, clean_files):\n files = ['stuff', 'thing', 'foo']\n for f in files:\n file = open(f, \"w\")\n file.write(f)\n file.close()\n\n tackle('.', no_input=True, context_file='shred.yaml')\n\n for f in files:\n assert not os.path.isfile(f)", "def check_systtests_pickle_files(self):\n # Make sure that there have been no more new scan points run since this\n # last processing. To do this, get the number of output directories\n # Compare this to the number in the pickle files.\n self.num_systematics = {}\n for basename in nsort(os.listdir(self.logdir)):\n if 'pckl' in basename:\n continue\n basename_content = nsort(\n os.listdir(os.path.join(self.logdir, basename))\n )\n # This means it is a directory containing something useful\n if 'config_summary.json' in basename_content:\n bits = basename.split('toy_')[-1].split('_')\n toyname = None\n add_bit = True\n for bit in bits:\n if bit == '' or bit == 'inj':\n add_bit = False\n if add_bit:\n if toyname is None:\n toyname = bit\n else:\n toyname += '_%s'%bit\n if '_full_syst_baseline' in toyname:\n toyname = toyname.split('_full_syst_baseline')[0]\n toyname = 'toy_%s_asimov'%toyname\n if toyname not in self.num_systematics.keys():\n self.num_systematics[toyname] = 0\n if 'wrong' in basename:\n # Only want to include each systematic once, but\n # they will have two directions.\n if 'pve' in basename:\n self.num_systematics[toyname] += 1\n else:\n self.num_systematics[toyname] += 1\n data_sets = from_file(os.path.join(self.logdir,\n 'data_sets.pckl'))\n if sorted(data_sets.keys()) != sorted(self.num_systematics.keys()):\n logging.info(\n 'Found files I assume to be from a previous run of'\n ' this processing script containing these truths: %s. '\n 'However, based on the directories in the overall '\n 'output directory there should be these truths: %s, so '\n 'they will be regenerated.'%(\n sorted(data_sets.keys()),\n sorted(self.num_systematics.keys())\n )\n )\n pickle_there = True\n for toyname in sorted(self.num_systematics.keys()):\n if len(data_sets[toyname].keys()) != self.num_systematics[toyname]:\n pickle_there = False\n if pickle_there:\n logging.info(\n 'Found files I assume to be from a previous run of'\n ' this processing script containing %i sytematics. If '\n 'this seems incorrect please delete the files: '\n 'data_sets.pckl, all_params.pckl and labels.pckl '\n 'from the logdir you have provided.'%(\n self.num_systematics[self.num_systematics.keys()[0]])\n )\n else:\n logging.info(\n 'Found files I assume to be from a previous run of'\n ' this processing script containing %i systematics. '\n 'However, based on the number of directories in the overall '\n 'output directory there should be %i systematics in '\n 'these pickle files, so they will be regenerated.'%(\n len(data_sets[data_sets.keys()[0]].keys()),\n self.num_systematics[self.num_systematics.keys()[0]]\n )\n )\n pickle_there = False\n\n return pickle_there", "def checking_files(input_file , parser):\n if input_file == None:\n curr_dir= os.getcwd() \n number_files= (len([name for name in os.listdir(curr_dir) if name.endswith(\".pdb\")]))\n if number_files == 0:\n raise NoPDBFiles(number_files)\n if parser:\n s= \"%s PDB files found\\n\" %(number_files)\n sys.stdout.write(s)\n return create_dict_pdb(curr_dir) \n elif os.path.isdir(input_file) == True:\n number_files= (len([name for name in os.listdir(input_file) if name.endswith(\".pdb\")]))\n if number_files == 0:\n raise NoPDBFiles(number_files)\n if parser:\n s= \"%s PDB files found\\n\" %(number_files)\n sys.stdout.write(s) \n return create_dict_pdb(input_file)", "def walk_files():\n\n # TODO: not check twice the same dir or file\n for path in config.targets:\n abs_path = os.path.join(cwd, path)\n\n if not os.path.islink(abs_path) and os.path.isfile(abs_path):\n walked.append(abs_path)\n yield abs_path\n #process_file(abs_path)\n\n if os.path.isdir(abs_path):\n walked.append(abs_path)\n for root, dirs, files in os.walk(abs_path):\n for fname in files:\n if isbackup(fname):\n continue\n abs_path = os.path.join(root, fname)\n walked.append(abs_path)\n if not os.path.islink(abs_path) and\\\n os.path.isfile(abs_path):\n base, name = os.path.split(abs_path)\n XXX, ext = os.path.splitext(name)\n\n ignored = False\n for pattern in IGNORE_FILES:\n if pattern.search(fname):\n ignored = True\n break\n\n # maybe should be merged with IGNORE_FILES?\n for regexp in config.exclude_list:\n if regexp.search(fname):\n ignored = True\n break\n\n if not ignored:\n for test_ext in config.disallow_exts:\n if test_ext == ext:\n ignored = True\n break\n\n if not ignored:\n if config.allow_exts:\n ignored = True\n for test_ext in config.allow_exts:\n if test_ext == ext:\n ignored = False\n break\n\n if not ignored:\n yield abs_path\n #process_file(abs_path)\n\n for dir in dirs[:]:\n if dir in IGNORE_DIRS:\n dirs.remove(dir)\n if dir in dirs:\n dirs.remove(dir)\n # mayb be should be merged with IGNORE_DIRS?\n else:\n for regexp in config.exclude_list:\n if regexp.search(dir):\n # This check is required\n # because several different patterns\n # could match one file name\n if dir in dirs:\n dirs.remove(dir)\n\n for dir in dirs:\n abs_path = os.path.join(root, dir)\n walked.append(abs_path)", "def test_verify_corrupt_archive(self):\n self.backup(u\"full\", u\"testfiles/various_file_types\", options=[])\n output_files = os.listdir(\"testfiles/output\")\n archives = [elem for elem in output_files if \"vol\" in elem]\n for archive in archives:\n # Edit source file\n with open(\"testfiles/output/\" + archive, 'r+') as f:\n f.write('This writes text into each archive file to corrupt it.')\n # Test verify for the file\n try:\n self.verify(u'testfiles/various_file_types/executable', file_to_verify=u'executable', options=[])\n except CmdError as e:\n # Should return a 21 error code for \"hash mismatch\"\n self.assertEqual(e.exit_status, 21, str(e))\n else:\n self.fail('Expected Hash Mismatch Error not thrown')", "def _get_changes_not_staged_for_commit(wit_path):\n\n files = {os.path.relpath(file, wit_path):\n get_full_path(file, '.wit', 'staging_area')\n for file in _get_all_files_names(wit_path)}\n\n for file in _get_staging_area_files(wit_path):\n if os.path.relpath(file, wit_path) in files:\n yield {os.path.relpath(file, wit_path): _compare_file(file, files[os.path.relpath(file, wit_path)])}", "def main():\n for file_name in os.listdir(CONTENT_FOLDER):\n if file_name.endswith('.html'):\n try_generate_page(file_name)", "def output_out_of_date(self):\n if not os.path.exists(self.output_file):\n logging.info(\"will generate, missing binding output file\")\n return True\n output_mtime = os.path.getmtime(self.output_file)\n if self._any_files_newer(self.header_files, output_mtime):\n logging.info(\"will generate, header files newer\")\n return True\n if self._any_files_newer(self.interface_files, output_mtime):\n logging.info(\"will generate, interface files newer\")\n return True\n if self._file_newer(self.input_file, output_mtime):\n logging.info(\"will generate, swig input file newer\")\n return True\n if self._file_newer(self.extensions_file, output_mtime):\n logging.info(\"will generate, swig extensions file newer\")\n return True\n if self._file_newer(self.wrapper_file, output_mtime):\n logging.info(\"will generate, swig wrapper file newer\")\n return True\n if self._file_newer(self.typemaps_file, output_mtime):\n logging.info(\"will generate, swig typemaps file newer\")\n return True\n if self._file_newer(self.safecast_file, output_mtime):\n logging.info(\"will generate, swig safecast file newer\")\n return True\n\n # If we made it here, nothing is newer than the output file.\n # Thus, the output file is not out of date.\n return False", "def process_files(self):\n matcher = self.choose_algorithm()\n # process one file at the time for better memory management\n for i, element in enumerate(self.input):\n filepath, _ = element\n\n try:\n with open(filepath, \"r\", encoding=\"utf-8\") as readfile:\n for line in readfile:\n matcher.find_match(line, self.case_insensitive)\n\n # collect unreadeable files for error log\n except Exception:\n self.errors.append(str(filepath))\n\n # copy results and reset matcher for next file\n self.__results = matcher.results\n\n if self.counter:\n self.__results = matcher.counts\n\n matcher.reset()\n\n # output - print or json\n if self.results:\n self.output(element)\n\n # if json print progress bar\n if self.json:\n self.progress_bar(i+1, len(self.input), prefix=\"Matching:\",\n fixed_len=True, length=40)", "def test_provider_system_hook_file(change_dir, clean_files):\n tackle(no_input=True)\n assert 'thing.yaml' in os.listdir()\n assert 'stuff' in os.listdir()\n # If the file has been moved properly there should be only one file\n assert len(os.listdir('stuff')) == 3", "def check_for_inplace_data(file_list, file_name_list, job_sets, config):\n cache_path = config.get('global').get('data_cache_path')\n sim_end_year = int(config.get('global').get('simulation_end_year'))\n if not os.path.exists(cache_path):\n os.makedirs(cache_path)\n return\n\n patterns = config.get('global').get('output_patterns')\n input_dirs = [os.path.join(cache_path, key) for key, val in patterns.items()]\n for input_dir in input_dirs:\n file_type = input_dir.split(os.sep)[-1]\n for input_file in os.listdir(input_dir):\n input_file_path = os.path.join(input_dir, input_file)\n file_key = \"\"\n if file_type in ['ATM', 'MPAS_AM', 'MPAS_CICE', 'MPAS_RST']:\n file_key = filename_to_file_list_key(filename=input_file)\n index = file_key.find('-')\n year = int(file_key[:index])\n if year > sim_end_year:\n continue\n if not file_list[file_type][file_key] == SetStatus.IN_TRANSIT:\n file_list[file_type][file_key] = SetStatus.DATA_READY\n elif file_type == 'MPAS_CICE_IN':\n file_key = 'mpas-cice_in'\n if os.path.exists(os.path.join(input_dir, input_file)) and \\\n not file_list[file_type][file_key] == SetStatus.IN_TRANSIT:\n file_list[file_type][file_key] = SetStatus.DATA_READY\n elif file_type == 'MPAS_O_IN':\n file_key = 'mpas-o_in'\n if os.path.exists(os.path.join(input_dir, input_file)) and \\\n not file_list[file_type][file_key] == SetStatus.IN_TRANSIT:\n file_list[file_type][file_key] = SetStatus.DATA_READY\n elif file_type == 'STREAMS':\n for file_key in ['streams.cice', 'streams.ocean']:\n file_name_list[file_type][file_key] = input_file\n if os.path.exists(os.path.join(input_dir, input_file)) and \\\n not file_list[file_type][file_key] == SetStatus.IN_TRANSIT:\n file_list[file_type][file_key] = SetStatus.DATA_READY\n elif file_type == 'RPT':\n for file_key in ['rpointer.ocn', 'rpointer.atm']:\n file_name_list[file_type][file_key] = input_file\n if os.path.exists(os.path.join(input_dir, input_file)) and \\\n not file_list[file_type][file_key] == SetStatus.IN_TRANSIT:\n file_list[file_type][file_key] = SetStatus.DATA_READY\n file_name_list[file_type][file_key] = input_file\n\n for key, val in patterns.items():\n for file_key in file_list[key]:\n if file_list[key][file_key] != SetStatus.DATA_READY:\n # print 'file: {} {} is not ready'.format(key, file_key)\n # sys.exit()\n return False\n return True", "def test_pnictogen():\n for template in templates:\n template_prefix, extension = os.path.splitext(template)\n for xyz_file in example_xyz_files:\n input_prefix, xyz_file_extension = os.path.splitext(xyz_file)\n\n mol = Atoms(\n cclib.bridge.cclib2openbabel.readfile(xyz_file, xyz_file_extension[1:])\n )\n written_files = pnictogen(mol, input_prefix, template, extension[1:])\n\n assert_equals(type(written_files), list)\n for written_file in written_files:\n assert_equals(type(written_file), str)\n\n written_files2 = pnictogen(mol, input_prefix, template)\n assert_equals(written_files, written_files2)\n\n # Allow use of template in the parent directory\n with cd(\"pnictogen/repo\"):\n mol = Atoms(\n cclib.bridge.cclib2openbabel.readfile(\"../../data/water-dimer.xyz\", \"xyz\")\n )\n written_files = pnictogen(mol, \"../../data/water-dimer\", \"ADF.in\", \"in\")\n\n assert_equals(written_files, [\"../../data/water-dimer.in\"])\n\n main([\"-g\", \"/tmp/hello.world.ORCA.inp\"])\n mol = Atoms(cclib.bridge.cclib2openbabel.readfile(\"data/co.xyz\", \"xyz\"))\n written_files = pnictogen(mol, \"data/co\", \"/tmp/hello.world.ORCA.inp\", foo=\"bar\")\n\n assert_equals(written_files, [\"data/co.inp\"])", "def check_removed_files(store: dict[str, Any]) -> ValidationStepResult:\n labels: set[Label] = set()\n all_labels: dict[str, Label] = store[\"possible_labels\"]\n errors: dict[os.PathLike, list[str]] = {}\n deleted_files_in_hub_mirrored_dir: set[os.PathLike] = set()\n \n repository: Repository = store[\"repository\"]\n filtered_files: dict[PullRequestFileType, list[File]] = (\n store[\"filtered_files\"]\n )\n\n logger.info(\"Checking if the PR contains updates to existing forecasts/metadata...\")\n\n forecasts = filtered_files.get(PullRequestFileType.FORECAST, [])\n metadatas = filtered_files.get(PullRequestFileType.METADATA, [])\n removed_files: bool = False\n success: bool = True\n\n for forecast_file in forecasts:\n if forecast_file.status == \"removed\":\n existing_forecast_file = get_existing_forecast_file(\n repository,\n forecast_file,\n store[\"HUB_MIRRORED_DIRECTORY_ROOT\"]\n )\n if existing_forecast_file is not None:\n removed_files = True\n deleted_files_in_hub_mirrored_dir.add(existing_forecast_file)\n path = pathlib.Path(forecast_file.filename)\n errors[path] = [(\n \"The forecast CSV or metadata file is deleted.\"\n \"Please put the file back as we do not allow file deletion at the moment.\")]\n\n for metadata_file in metadatas:\n if metadata_file.status == \"removed\":\n existing_forecast_file = get_existing_forecast_file(\n repository,\n metadata_file,\n store[\"HUB_MIRRORED_DIRECTORY_ROOT\"]\n )\n if existing_forecast_file is not None:\n removed_files = True\n deleted_files_in_hub_mirrored_dir.add(existing_forecast_file)\n path = pathlib.Path(metadata_file.filename)\n errors[path] = [(\n \"The forecast CSV or metadata file is deleted. \"\n \"Please put the file back as we do not allow file deletion at the moment.\")]\n\n if removed_files:\n success = False\n logger.info(\"❌ PR deleted existing forecast/metadata file.\")\n labels.add(all_labels[\"file-deletion\"])\n\n else:\n logger.info(\"✔️ PR does not include file deletion.\")\n\n return ValidationStepResult(\n success=success,\n labels=labels,\n file_errors = errors,\n to_store={\n \"deleted_existing_files_paths\": deleted_files_in_hub_mirrored_dir\n }\n )", "def look_for_interesting_files(self, interesting_files):\n self.valid_interesting_files = []\n for i, f in interesting_files.items():\n if f.hash in self.files:\n self.logger.info(\"New interesting file : %s\", f.name)\n self.valid_interesting_files.append((f, self.files[f.hash]))", "def _validate_template_is_handled(self, filepath):\n # we're already sure we can open it ok\n zf = zipfile.ZipFile(str(filepath))\n\n tainted_filenames = []\n for name in zf.namelist():\n content = zf.read(name)\n if INIT_TEMPLATE_TOKEN in content:\n tainted_filenames.append(name)\n\n if tainted_filenames:\n raise CommandError(\n \"Cannot upload the charm as it include the following files with a leftover \"\n \"TEMPLATE-TODO token from when the project was created using the 'init' \"\n \"command: {}\".format(\", \".join(tainted_filenames))\n )", "def diff_bundle_contents():\n dir_package = os.listdir(ARCHIVE_TARGET)\n dir_setup = os.listdir(MODEL_TARGET)\n if dir_package != dir_setup:\n return True\n for bundle in dir_package:\n os.chdir(ARCHIVE_TARGET)\n subprocess.run([\"git\", \"clone\", bundle])\n os.chdir(\"..\")\n os.chdir(MODEL_TARGET)\n subprocess.run([\"git\", \"clone\", bundle])\n os.chdir(\"..\")\n dcmp = filecmp.dircmp(\n join(ARCHIVE_TARGET, bundle[: bundle.find(\".bundle\")]),\n join(MODEL_TARGET, bundle[: bundle.find(\".bundle\")]),\n )\n diff = Diff(dcmp)\n if diff.run():\n return True\n return False", "def _check_pofiles_content(self):\n\n # The list of invalid chars is specific to Catalan language\n invalid_chars = {u'á', u'ñ', u'ë', u'ù', u'â', u'ê', u'î', u'ô', u'û',\n u'ë', u'ÿ', u'ä', u'ö'}\n\n try:\n\n THRESHOLD_PERCENTAGE = 1\n findFiles = FindFiles()\n for filename in findFiles.find(self.temp_dir, \"*.po\"):\n poFile = pofile(filename)\n\n invalid = 0\n for entry in poFile:\n # Only localized segments. Skips developers names,\n # untranslated country names, etc\n if entry.msgid == entry.msgstr:\n continue\n\n for char in entry.msgstr.lower():\n if char in invalid_chars:\n invalid = invalid + 1\n\n if len(poFile) > 100 and invalid > 0:\n percentage = 100.0 * invalid / len(poFile)\n if percentage > THRESHOLD_PERCENTAGE:\n self.errors = self.errors + 1\n print \"Unsual number of invalid chars at {0} ({1}%)\".\\\n format(filename, str(percentage))\n\n except Exception as detail:\n print detail", "def main():\n\n parser = argparse.ArgumentParser(\n description=\"Compare the metadata content of two files\"\n )\n\n parser.add_argument(\n \"files\",\n nargs=2,\n metavar=\"FILE\",\n help=\"The names of two files to compare\",\n )\n\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n action=\"store_true\",\n help=\"print detailed output on screen\",\n )\n\n parser.add_argument(\n \"-s\",\n \"--ordered\",\n action=\"store_true\",\n help=\"When comparing lists, check the element order too.\",\n )\n\n parser.add_argument(\n \"-d\",\n \"--drop\",\n nargs=\"*\",\n default=None,\n metavar=\"KEY\",\n help=\"Keys to drop from metadata retrieved from file\",\n )\n\n parser.add_argument(\n \"-m\",\n \"--mode\",\n default=\"lite\",\n metavar=\"MODE\",\n type=str,\n choices=[\"tiny\", \"lite\", \"full\", \"peeker\"],\n help=\"\"\"\\\n This flag provides the user capability to select the amount of\n metadata retrieved. There three options:\n tiny (only those values used in PyJobTransforms),\n lite (same output as dump-athfile)\n and full ( all available data found)\n \"\"\",\n )\n\n parser.add_argument(\n \"-t\",\n \"--type\",\n default=None,\n metavar=\"TYPE\",\n type=str,\n choices=[\"POOL\", \"BS\"],\n help=\"\"\"\\\n The file type of the input filename. By default, it tries to\n determine itself the file type of the input.\n \"\"\",\n )\n\n parser.add_argument(\n \"-f\",\n \"--filter\",\n default=[],\n metavar=\"FILTER\",\n nargs=\"+\",\n type=str,\n help=\"Expression to select specific metadata fields to retrieve.\",\n )\n\n parser.add_argument(\n \"-x\",\n \"--diff-format\",\n default=\"simple\",\n type=str,\n choices=[\"simple\", \"diff\"],\n help=\"Switch between 'simple' or 'diff' style differences \",\n )\n\n parser.add_argument(\n \"--promote\",\n default=None,\n type=bool,\n help=\"Force promotion or not of the metadata keys \",\n )\n\n args = parser.parse_args()\n\n try:\n diff = meta_diff(\n args.files,\n verbose=args.verbose,\n ordered=args.ordered,\n drop=args.drop,\n mode=args.mode,\n meta_key_filter=args.filter,\n file_type=args.type,\n promote=args.promote,\n diff_format=args.diff_format,\n )\n except (ValueError, IndexError):\n print(\"you must supply two files to compare\")\n sys.exit(1)\n except ReferenceError:\n print(\"no such file\")\n sys.exit(1)\n\n if diff:\n print(\"\\n\".join(diff))\n sys.exit(1)\n\n sys.exit(0)", "def process_verified_files(self):\n\n # Scan s3 verified folder for files\n\n s3 = boto3.client('s3', aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key)\n response = s3.list_objects(Bucket=self.bucket, Prefix='UK_suppliers/Verified_Matches/')\n\n # Ignore first file entry in dict as is just the folder name. Returns a list of files\n files = response['Contents'][1:]\n\n # # For any files in /s3/verified/ - download them to local /verified_matches/\n for i in range(len(files)):\n verified_fp = os.path.join(self.directories['verified_matches_dir'].format(self.region_dir,self.proc_type),os.path.basename(files[i]['Key']))\n s3.download_file(self.bucket,\n files[i]['Key'],\n verified_fp)\n\n # Upload all files in verified_matches_dir to our database:\n if self.in_args.upload:\n self.runfile_mods.db_calls.DbCalls(self).addDataToTable()\n\n # Loop through retrieved verified matches files from S3 bucket\n\n for i in range(len(files)):\n try:\n # Delete from unverified folder (if hasn't been done by team already) so team know which haven't been\n # verified yet (located via date prefix of verified file incase of name change by team)\n if self.in_args.upload:\n s3.delete_object(Bucket=self.bucket, Key=os.path.join('UK_suppliers','Unverified_Matches', os.path.basename(files[i]['Key'])))\n except FileNotFoundError:\n pass\n\n # For each verified file, iterate over S3 zip files and download the corresponding zip.\n # Need to iterate over both files in /verified and /archive to make sure we aren't adding the wrong information\n # to multiple different files when theres >1 file in these folders.\n s3 = boto3.client('s3', aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key)\n response = s3.list_objects(Bucket=self.bucket, Prefix='UK_suppliers/Archive/')\n archive_files = response['Contents'][1:]\n\n # For each file in the archive folder, iterate over the names and find any matches to the date of the current\n # verified file in the iterator\n verified_zip_name = os.path.basename(files[i]['Key'])[:10] + '_files.zip'\n for a in range(len(archive_files)):\n # if they match, open that archive zip file and open the script performance stats file within it\n # and get the length/count of verified matches from the verified file\n archive_file = os.path.basename(archive_files[a]['Key'])\n if archive_file == verified_zip_name:\n # Download archive file to local verified folder\n dl_archive_fp = os.path.join(os.path.join(self.directories['verified_matches_dir']\n .format(self.region_dir, self.proc_type), archive_file))\n s3.download_file(self.bucket,\n os.path.join('UK_suppliers/Archive/', archive_file), dl_archive_fp)\n\n # Open archive file\n with ZipFile(dl_archive_fp, 'r') as z:\n\n # Open corresponding verified matches file\n verified_fp = os.path.join(\n self.directories['verified_matches_dir'].format(self.region_dir, self.proc_type),\n os.path.basename(files[i]['Key']))\n ver_file = pd.read_csv(verified_fp)\n with z.open('script_performance_stats.csv') as f:\n # Add additional stats to script performance stats csv\n stats_file = pd.read_csv(f)\n true_positives = len(ver_file[ver_file['Manual_Match_N'] == 'Y'])\n false_positives = len(ver_file[ver_file['Manual_Match_N'] == 'N'])\n unverified = len(ver_file) - true_positives - false_positives\n stats_file['true_positives'] = true_positives\n stats_file['false_positives'] = false_positives\n try:\n stats_file['script_precision'] = round((true_positives / (false_positives + true_positives)) * 100, 2)\n except ZeroDivisionError:\n stats_file['script_precision'] = 0\n stats_file['unverified'] = unverified\n stats_file.to_csv(self.directories['script_performance_stats_file'].format(self.region_dir, self.proc_type)\n ,index=False)\n\n stats_file_fp = self.directories['script_performance_stats_file'].format(self.region_dir,\n self.proc_type)\n with ZipFile(dl_archive_fp, 'a') as z:\n\n # Add/overwrite new stats file and verified matches file to zip file, then re-upload to S3 /Archive\n z.write(stats_file_fp, os.path.basename(stats_file_fp))\n z.write(verified_fp, os.path.basename(verified_fp))\n self.upload_file(dl_archive_fp, self.bucket, 'UK_suppliers/Archive/' + archive_file)\n\n # Delete matches csv from s3 verified folder (if 'upload' arg used)\n if self.in_args.upload:\n s3.delete_object(Bucket=self.bucket, Key=files[i]['Key'])", "def check_file_locations(store: dict[str, Any]) -> ValidationStepResult:\n success: bool = True\n filtered_files: dict[PullRequestFileType, list[File]] = (\n store[\"filtered_files\"]\n )\n all_labels: dict[str, Label] = store[\"possible_labels\"]\n labels: set[Label] = set()\n comments: list[str] = []\n errors: dict[os.PathLike, list[str]] = {}\n\n forecast_folder_name = store[\"FORECAST_FOLDER_NAME\"]\n logger.info(\n f\"Checking if the PR is updating outside the {forecast_folder_name}/ folder...\"\n )\n if (\n PullRequestFileType.OTHER_NONFS in filtered_files or\n PullRequestFileType.OTHER_FS in filtered_files\n ):\n logger.info((\n \"⚠️ PR contains file changes that are not part of a valid \"\n \"forecast submission (misnamed/misplaced forecast CSV, \"\n \"non CSV files, etc.)\"\n ))\n comments.append(\n \"⚠️ PR contains file changes that are not part of a valid \"\n \"forecast submission (misnamed/misplaced forecast CSV, \"\n \"non CSV files, etc.)\"\n )\n labels.add(all_labels[\"other-files-updated\"])\n\n if (\n PullRequestFileType.MODEL_OTHER_FS in filtered_files \n ):\n success = False\n logger.info((\n \"❌ PR contains files submitted in the model folder that are not part of a valid \"\n \"forecast submission\"\n ))\n comments.append(\n \"❌ PR contains files submitted in the model folder that are not part of a valid \"\n \"forecast submission\"\n )\n \n else:\n logger.info((\n \"✔️ PR does not contain file changes that are not part of a \"\n \"valid forecast submission (misnamed/misplaced forecast CSV, \"\n \"non CSV files, etc.)\"\n ))\n\n logger.info(\"Checking if the PR contains misplaced CSVs...\")\n submission_formatting_instruction = store[\"SUBMISSION_FORMATTING_INSTRUCTION\"]\n\n if (PullRequestFileType.FORECAST not in filtered_files and\n PullRequestFileType.OTHER_FS in filtered_files):\n success = False\n logger.info(\"❌ PR contains misplaced CSVs.\")\n for github_file in filtered_files[PullRequestFileType.OTHER_FS]:\n path = pathlib.Path(github_file.filename)\n errors[path] = [(\n \"The forecast CSV or metadata file is located in an \"\n \"incorrect location and/or is misnamed (see \"\n f\"[here]({submission_formatting_instruction})\"\n \" for submission instructions. Please correct the errors \"\n \"accordingly.\\n\"\n \"We will still check any misplaced CSV(s) for \"\n \"you, so that you can be sure that the CSVs are correct, \"\n \"or correct any actual file content validation errors if \"\n \"not.\"\n )]\n else:\n logger.info(\"✔️ PR does not contain misplaced forecasts\")\n\n logger.info(\"Checking if the PR contains metadata updates...\")\n if PullRequestFileType.METADATA in filtered_files:\n logger.info(\"💡 PR contains metadata updates\")\n comments.append(\"💡 PR contains metadata file changes.\")\n labels.add(all_labels[\"metadata-change\"])\n\n return ValidationStepResult(\n success=success,\n labels=labels,\n comments=comments,\n file_errors=errors\n )", "def check_files(filenames, fix, verboseout, summaryout):\n\tokmsg = \"OK\" if not fix else \"fixed\"\n\tbadmsg = \"non-conforming\"\n\tbad_files = 0\n\tfor fn in filenames:\n\t\tlines = read_file_and_maybe_fix_it(fn, fix)\n\t\tif check_content(fn, lines, verboseout):\n\t\t\tprint(\"{:s}: {}\".format(fn, okmsg), file=summaryout)\n\t\telse:\n\t\t\tbad_files += 1\n\t\t\tmsg = \"{:s}: {}\".format(fn, badmsg)\n\t\t\tprint(msg, file=summaryout)\n\treturn bad_files", "def run_checks(hooks_all, hooks_modified, modified, path):\n retcode = 0\n for command in hooks_all:\n if not isinstance(command, list):\n command = shlex.split(command)\n retcode |= subprocess.call(command, env={'PATH': path})\n\n for pattern, command in hooks_modified:\n if not isinstance(command, list):\n command = shlex.split(command)\n for filename in modified:\n if not fnmatch.fnmatch(filename, pattern):\n continue\n printed_filename = False\n proc = subprocess.Popen(command + [filename],\n env={'PATH': path},\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n output = proc.communicate()[0]\n if proc.returncode != 0:\n if not printed_filename:\n print(filename)\n print('=' * len(filename))\n printed_filename = True\n print(command[0])\n print('-' * len(command[0]))\n print(output)\n retcode |= proc.returncode\n\n return retcode", "def process(self, matches, tag):\n if isinstance(matches, str):\n matches = [matches]\n\n done = set()\n for match in matches:\n processed = False\n for path in self.dirs:\n for file in sorted(path.glob(match)):\n if file.name in done:\n continue\n self.__log.info('file %r matches %r' % (str(file), match))\n processed = True\n done.add(file.name)\n yield from str(file) >> tag >> self.out\n if not processed:\n raise ValueError('no matching files found for %r' % match)", "def compile(self):\n if self._outputs is None or len(self._outputs) == 0:\n print \"Error! No templates to compile! Did you specify a valid configuration file?\"\n else:\n # for each output file in output list \n for output in self._outputs:\n # Generate context\n context = output[\"context\"]\n # Generate template stream for that output\n tmpl = self._env.get_template(output[\"source_filename\"]).stream(context)\n # Verify output directory\n output_filename = output[\"output_filename\"]\n output_path = abspath(dirname(output_filename))\n if not exists(output_path):\n makedirs(output_path)\n # Render it to the output file\n tmpl.dump(output_filename)", "def process_cleanup(self, output_file=None, output_list=None):\n if output_file:\n self.check_output_file( output_file )\n elif output_list:\n for output_file in output_list:\n self.check_output_file( output_file )\n log.info('All expected output files found - process successful!\\n')", "def compare(self):\n self.success = True\n\n # evaluate if comparison should be made\n if not self.make_comparison:\n return\n\n msgall = \"\"\n msg = sfmt.format(\"Comparison test\", self.name)\n print(msg)\n\n if self.action is not None:\n cpth = os.path.join(self.simpath, self.action)\n files_cmp = None\n if self.action.lower() == \"compare\":\n files_cmp = []\n files = os.listdir(cpth)\n for file in files:\n files_cmp.append(file)\n elif \"mf6\" in self.action:\n fpth = os.path.join(cpth, \"mfsim.nam\")\n cinp, self.coutp = get_mf6_files(fpth)\n\n head_extensions = (\n \"hds\",\n \"hed\",\n \"bhd\",\n \"ahd\",\n \"bin\",\n )\n if \"mf6_regression\" in self.action:\n success, msgall = self._compare_heads(\n msgall,\n extensions=head_extensions,\n )\n if not success:\n self.success = False\n # non-regression runs - for new features\n else:\n files1 = []\n files2 = []\n exfiles = []\n ipos = 0\n for file1 in self.outp:\n ext = os.path.splitext(file1)[1][1:]\n\n if ext.lower() in head_extensions:\n\n # simulation file\n pth = os.path.join(self.simpath, file1)\n files1.append(pth)\n\n # look for an exclusion file\n pth = os.path.join(self.simpath, file1 + \".ex\")\n if os.path.isfile(pth):\n exfiles.append(pth)\n else:\n exfiles.append(None)\n\n # Check to see if there is a corresponding compare file\n if files_cmp is not None:\n\n if file1 + \".cmp\" in files_cmp:\n # compare file\n idx = files_cmp.index(file1 + \".cmp\")\n pth = os.path.join(cpth, files_cmp[idx])\n files2.append(pth)\n txt = sfmt.format(\n f\"Comparison file {ipos + 1}\",\n os.path.basename(pth),\n )\n print(txt)\n else:\n if self.coutp is not None:\n for file2 in self.coutp:\n ext = os.path.splitext(file2)[1][1:]\n\n if ext.lower() in head_extensions:\n # simulation file\n pth = os.path.join(cpth, file2)\n files2.append(pth)\n\n else:\n files2.append(None)\n\n if self.nam_cmp is None:\n pth = None\n else:\n pth = os.path.join(cpth, self.nam_cmp)\n\n for ipos in range(len(files1)):\n file1 = files1[ipos]\n ext = os.path.splitext(file1)[1][1:].lower()\n outfile = os.path.splitext(os.path.basename(file1))[0]\n outfile = os.path.join(\n self.simpath, outfile + \".\" + ext + \".cmp.out\"\n )\n if files2 is None:\n file2 = None\n else:\n file2 = files2[ipos]\n\n # set exfile\n exfile = None\n if file2 is None:\n if len(exfiles) > 0:\n exfile = exfiles[ipos]\n if exfile is not None:\n txt = sfmt.format(\n f\"Exclusion file {ipos + 1}\",\n os.path.basename(exfile),\n )\n print(txt)\n\n # make comparison\n success_tst = compare_heads(\n None,\n pth,\n precision=\"double\",\n text=extdict[ext],\n outfile=outfile,\n files1=file1,\n files2=file2,\n htol=self.htol,\n difftol=True,\n # Change to true to have list of all nodes exceeding htol\n verbose=self.cmp_verbose,\n exfile=exfile,\n )\n msg = sfmt.format(\n f\"{extdict[ext]} comparison {ipos + 1}\",\n self.name,\n )\n print(msg)\n\n if not success_tst:\n self.success = False\n msgall += msg + \" ... FAILED\\n\"\n\n # compare concentrations\n if \"mf6_regression\" in self.action:\n success, msgall = self._compare_concentrations(msgall)\n if not success:\n self.success = False\n\n # compare cbc files\n if \"mf6_regression\" in self.action:\n cbc_extensions = (\n \"cbc\",\n \"bud\",\n )\n success, msgall = self._compare_budgets(\n msgall, extensions=cbc_extensions\n )\n if not success:\n self.success = False\n\n assert self.success, msgall\n return", "def test_filter_files(self):\n expected = [\n (\"/subdir1/fichier1\", False),\n (\"/subdir1/fichier4\", False),\n (\"/subdir1/subsubdir1\", False),\n ]\n files = [\n (\"/subdir1/fichier1\", False),\n (\"/subdir2/fichier2\", False),\n (\"/subdir2/fichier3\", False),\n (\"/subdir1/fichier4\", False),\n (\"/subdir1/subsubdir1/fichier1\", False),\n (\"/subdir1/subsubdir1/\", False),\n ]\n self.assertEqual(\n list(self.path_translator.filter_files(files, \"/subdir1\")),\n expected)", "def checkfiles(args):\n\n from .query import Database\n db = Database()\n\n r = db.objects(\n protocol=args.protocol,\n support=args.support,\n groups=args.group,\n purposes=args.purposes,\n gender=args.gender,\n clients=args.client,\n )\n\n # go through all files, check if they are available on the filesystem\n good = []\n bad = []\n for f in r:\n if os.path.exists(f.make_path(args.directory, args.extension)):\n good.append(f)\n else:\n bad.append(f)\n\n # report\n output = sys.stdout\n if args.selftest:\n from bob.db.base.utils import null\n output = null()\n\n if bad:\n for f in bad:\n output.write('Cannot find file \"%s\"\\n' % (f.make_path(args.directory, args.extension),))\n output.write('%d files (out of %d) were not found at \"%s\"\\n' % \\\n (len(bad), len(r), args.directory))\n\n return 0", "def test_check():\n for f in cfg.required_files:\n assert os.path.isfile(f)", "def check(self):\n badCachePath = list()\n badCacheNode = list()\n cacheIn = getCacheInfoFromMaya()\n cacheInScene = cacheIn.getCacheFromScene()\n # get the templates\n\n if not TYPE == 'MULTI':\n cacheWorkTemplate = self.parent.app.get_template_by_name(\n 'fx_cacheseq_shot_work')\n cachePublishTemplate = self.parent.app.get_template_by_name(\n 'fx_cacheseq_shot_publish')\n mayaCachePublishTemplate = self.parent.app.get_template_by_name(\n 'maya_asset_publish_cache_multi')\n mayaCacheWorkTemplate = self.parent.app.get_template_by_name(\n 'maya_asset_work_cache_multi')\n else:\n cacheWorkTemplate = self.parent.app.get_template_by_name(\n 'fx_cacheseq_shot_work')\n cachePublishTemplate = self.parent.app.get_template_by_name(\n 'fx_cacheseq_shot_publish')\n mayaCachePublishTemplate = self.parent.app.get_template_by_name(\n 'maya_asset_publish_cache')\n mayaCacheWorkTemplate = self.parent.app.get_template_by_name(\n 'maya_asset_work_cache')\n\n for cacheFrom, cacheVal in cacheInScene.iteritems():\n\n fileNode = cacheVal\n for nodes, nodeVal in cacheVal.iteritems():\n for cacheNumber, cacheVal in nodeVal.iteritems():\n filePath = cacheVal['path']\n\n if cacheWorkTemplate.validate(filePath, skip_keys=[\"SEQ\"]):\n continue\n\n elif mayaCacheWorkTemplate.validate(filePath, skip_keys=[\"SEQ\"]):\n continue\n\n elif cachePublishTemplate.validate(filePath, skip_keys=[\"SEQ\"]):\n continue\n\n elif mayaCachePublishTemplate.validate(filePath, skip_keys=[\"SEQ\"]):\n continue\n\n else:\n badCachePath.append(pm.Path(filePath))\n badCacheNode.append(nodes)\n continue\n\n if not badCachePath:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = badCacheNode\n for node in badCachePath:\n self.addError(\"%s is not in the library\" % node)\n\n self.errorMessage = \"%s Cache not in library\" % (len(badCachePath))", "def test_verify_compare_data(self):\n self.backup(u\"full\", u\"testfiles/various_file_types\", options=[])\n\n # Test verify for the file with --compare-data\n self.verify(u'testfiles/various_file_types/executable', file_to_verify=u'executable',\n options=[u\"--compare-data\"])", "def compare_files(file1, file2):\n return filecmp.cmp(file1, file2)", "def test_resource_files():\n expected_files = os.listdir(EXPECTED)\n actual_files = os.listdir(TEMP_DIR)\n assert expected_files == actual_files", "def clean():\n possible_outputs = (\n '{}.html'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.epub'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.pdf'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.docx'.format(CONFIG['FULL_PROJECT_NAME']),\n '{}.odt'.format(CONFIG['FULL_PROJECT_NAME']),\n )\n\n for filename in possible_outputs:\n if os.path.exists(filename):\n os.remove(filename)\n print(\"Removed {}\".format(filename))", "def get_files(self):\r\n for filename in self.generated_files:\r\n path = os.path.join(CONFIGURATION.source_messages_dir, filename)\r\n exists = os.path.exists(path)\r\n self.assertTrue(exists, msg='Missing file: %s' % filename)\r\n if exists:\r\n yield path", "async def check_files(hass):\n # Verify that the user downloaded all files.\n base = f\"{hass.config.path()}/custom_components/{DOMAIN}/\"\n missing = []\n for file in REQUIRED_FILES:\n fullpath = \"{}{}\".format(base, file)\n if not os.path.exists(fullpath):\n missing.append(file)\n\n if missing:\n _LOGGER.critical(\"The following files are missing: %s\", str(missing))\n returnvalue = False\n else:\n returnvalue = True\n\n return returnvalue", "def verifyFileExists(self, fileDir, fileName):\n # check that file exists\n fpath = fileDir.child(fileName)\n self.assertTrue(fpath.exists())\n\n # check that the output files have some content\n fcontents = fpath.getContent()\n self.assertTrue(len(fcontents) > 0)\n\n # check that the html files are at least html-ish\n # this is not a terribly rigorous check\n if fpath.path.endswith(\".html\"):\n self.assertIn(b\"<body\", fcontents)", "def convertFiles():\n\n #### Get file lists\n tmp = os.path.join(remarkableBackupDirectory,remContent)\n files = [x for x in os.listdir(tmp) if \".\" not in x]\n\n for i in range(0, len(files)):\n # get file reference number\n refNrPath = os.path.join(remarkableBackupDirectory, remContent,\n files[i])\n # get meta Data\n meta = json.loads(open(refNrPath + \".metadata\").read())\n fname = meta[\"visibleName\"]\n fname = fname.replace(\" \", \"_\")\n # Does this lines file have an associated pdf?\n AnnotPDF = os.path.isfile(refNrPath + \".pdf\")\n # Get list of all rm files i.e. all pages\n npages = len(glob.glob(refNrPath + \"/*.rm\"))\n if npages != 0:\n if AnnotPDF:\n # we have found an annotated pdf\n # now make sure it has the right ending\n if meta[\"visibleName\"][-4:] != \".pdf\":\n syncFilePath = os.path.join(syncDirectory, \"*\",\n meta[\"visibleName\"] + \".pdf\")\n else:\n syncFilePath = os.path.join(syncDirectory, \"*\",\n meta[\"visibleName\"])\n\n # does the file exist in our system?\n inSyncFolder = glob.glob(syncFilePath) != []\n\n if inSyncFolder:\n # have we exported this thing before?\n local_annotExist = \\\n glob.glob(syncFilePath[:-4] + \"_annot.pdf\") != []\n # first, assume, it needs converting\n remoteChanged = True\n if local_annotExist:\n # if it already exists check when it was last updated\n local_annotPath = \\\n glob.glob(syncFilePath[:-4]+\"_annot.pdf\")[0]\n local_annot_mod_time = os.path.getmtime(local_annotPath)\n # rm time is in ms\n remote_annot_mod_time = int(meta[\"lastModified\"])/1000\n # has this version changed since we last exported it?\n remoteChanged = \\\n remote_annot_mod_time > local_annot_mod_time\n # update if the remote version has changed\n if remoteChanged:\n origPDF = glob.glob(syncFilePath)[0]\n #####\n convertAnnotatedPDF(fname, refNrPath, origPDF)\n #####\n else:\n print(fname + \"hasn't been modified\")\n else:\n print(fname + \" does not exist in the sync directory\")\n # TODO allow y/n input whether it should be copied there\n # anyway\n else:\n # we found a note\n print(\"exporting Notebook \" + fname)\n syncFilePath = os.path.join(syncDirectory, notesDirectory,\n fname + \".pdf\")\n inSyncFolder = glob.glob(syncFilePath) != []\n remoteChanged = True\n if inSyncFolder:\n local_annot_mod_time = os.path.getmtime(syncFilePath)\n remote_annot_mod_time = int(meta['lastModified'])/1000\n remoteChanged = remote_annot_mod_time > local_annot_mod_time\n if remoteChanged:\n #####\n convertNotebook(fname, refNrPath)\n #####\n else:\n print(fname + \"has not changed\")", "def collect():\n\n # Get database.\n with open(local_directory(path='file_diffs/packages.json'), 'r') as f:\n store = json.load(f)\n\n # UI.\n print('Checking files for differences...\\n')\n\n # Iterate database.\n for package_name in store:\n # Package variables.\n package_dir = os.path.join(package_directory, package_name)\n package = store[package_name]\n\n # Recursive (lazy) package searching.\n if type(package) == str:\n package = os.path.expanduser(package)\n for dirpath, dirnames, filenames in os.walk(package):\n for filename in filenames:\n sub_package_dir = package_dir + dirpath.replace(package, '')\n if not os.path.exists(sub_package_dir):\n os.makedirs(sub_package_dir)\n\n fp_local = os.path.join(dirpath, filename)\n fp_remote = os.path.join(sub_package_dir, filename)\n\n cs_local = file_checksum(fp=fp_local)\n cs_remote = file_checksum(fp=fp_remote)\n\n if cs_remote != cs_local:\n print('Found: {}/{}'.format(package_name, filename))\n shutil.copyfile(src=fp_local, dst=fp_remote)\n\n # Manual package searching.\n if type(package) == list:\n for fp in package:\n fn_local = fp['local']\n fn_remote = fp['remote']\n\n fp_local = os.path.expanduser(fn_local)\n fp_remote = os.path.join(package_dir, fn_remote)\n\n cs_local = file_checksum(fp=fp_local)\n cs_remote = file_checksum(fp=fp_remote)\n\n if cs_remote != cs_local:\n print('Found: {}/{}'.format(package_name, fn_remote))\n\n remote_dir_path = '/'.join(fp_remote.split('/')[:-1])\n if not os.path.exists(remote_dir_path):\n os.makedirs(remote_dir_path)\n shutil.copyfile(src=fp_local, dst=fp_remote)", "def processed_file_names(self):\n if self.force_reprocess == True:\n self.force_reprocess = False\n return 'reprocess.pt'\n \n ''' HR 01/06/22 Workaround to avoid FileNotFoundError '''\n print('self.processed_dir:', self.processed_dir)\n # folder,file = os.path.split(self.processed_dir)\n folder = self.processed_dir\n if not os.path.isdir(folder):\n print(' Making folder', folder)\n os.makedirs(folder)\n \n processedfiles = [f for f in os.listdir(self.processed_dir) if os.path.isfile(\n os.path.join(self.processed_dir, f))]\n if 'pre_filter.pt' in processedfiles:\n processedfiles.remove('pre_filter.pt')\n if 'pre_transform.pt' in processedfiles:\n processedfiles.remove('pre_transform.pt')\n # 'not_implimented.pt' #[f'data_{i}.pt' for i in list(self.data.index)]\n return processedfiles", "def testFilesExist(self):\n \n for year in range(2007,2013):\n self.assertTrue(os.path.exists(\"./IncomeHistogram_\"+ str(year)+\".pdf\"), \"A histogram didn't save to output.\")\n self.assertTrue(os.path.exists(\"./LogIncomeHistogram_\"+ str(year)+\".pdf\"), \"A histogram didn't save to output.\")\n self.assertTrue(os.path.exists(\"./IncomeBoxplot(log)_\"+ str(year)+\".pdf\"), \"A boxplot didn't save to output.\") \n self.assertTrue(os.path.exists(\"./results.txt\"), \"Results file doesn't exist.\")", "def check_pickle_files(self, logdir_content):\n if np.all(np.array(\n [s in logdir_content for s in self.expected_pickles])):\n # Processed output files are there\n if self.test_type == 'analysis':\n pickle_there = self.check_analysis_pickle_files()\n elif self.test_type == 'injparamscan':\n pickle_there = self.check_injparamscan_pickle_files()\n elif self.test_type == 'systtests':\n pickle_there = self.check_systtests_pickle_files()\n else:\n logging.info(\n 'Did not find all of the files - %s - expected to indicate '\n 'this data has already been extracted.'%self.expected_pickles\n )\n pickle_there = False\n\n return pickle_there", "def render_templates(self):\n\n # dockerfile\n try:\n t = self.templates.get_template(\n 'docker/dockerfiles/{}.dockerfile.template'.format(self.repo)\n )\n except TemplateNotFound:\n t = self.templates.get_template(\n 'docker/dockerfiles/default.dockerfile.template'\n )\n\n self.files.append({\n 'name': 'Dockerfile',\n 'content': t.render(commit=self.commit),\n })\n\n # gunicorn\n t = self.templates.get_template(\n 'docker/gunicorn/gunicorn.conf.py'\n )\n self.files.append({\n 'name': 'gunicorn.conf.py',\n 'content': t.render(),\n })\n\n t = self.templates.get_template(\n 'docker/gunicorn/gunicorn.sh'\n )\n self.files.append({\n 'name': 'gunicorn.sh',\n 'content': t.render(),\n 'mode': 0555,\n })\n\n # nginx\n t = self.templates.get_template(\n 'docker/nginx/app.nginx.conf'\n )\n self.files.append({\n 'name': 'app.nginx.conf',\n 'content': t.render(),\n })\n\n t = self.templates.get_template(\n 'docker/nginx/nginx.sh'\n )\n self.files.append({\n 'name': 'nginx.sh',\n 'content': t.render(),\n 'mode': 0555,\n })\n\n # cron/, etc/ iif there exists a `self.repo` directory\n def _filter(p):\n return (\"cron/\" in p or \"etc/\" in p) and (self.repo in p) and \\\n (not os.path.basename(p).startswith('.'))\n\n for t in self.templates.list_templates(\n filter_func=_filter):\n\n self.files.append({\n 'name': os.path.basename(t),\n 'content': self.templates.get_template(t).render(),\n })", "def CheckTemplates(self, base_dir, version):\n major_minor = \".\".join(version.split(\".\")[0:2])\n templates = glob.glob(\n os.path.join(base_dir, \"templates/*%s*.zip\" % major_minor))\n required_templates = set(\n [x.replace(\"maj.minor\", major_minor) for x in self.REQUIRED_TEMPLATES])\n\n # Client templates have an extra version digit, e.g. 3.1.0.0\n templates_present = set([\n re.sub(r\"_%s[^_]+_\" % major_minor, \"_%s_\" % major_minor,\n os.path.basename(x)) for x in templates\n ])\n\n difference = required_templates - templates_present\n if difference:\n raise RuntimeError(\"Missing templates %s\" % difference)", "def verify_files(folder_path):\n for dataset_file in listdir(folder_path):\n verify_file(folder_path + dataset_file)", "def prefilter_json_files_then_compare(args):\n\n logging.info(\"prefilter_json_files_then_compare: starting!\")\n with open(args.initialFile) as f:\n json_initial = file.read(f)\n with open(args.finalFile) as f2:\n json_final = file.read(f2)\n\n patch = jsonpatch.JsonPatch.from_diff(json_initial, json_final)\n logging.info(\n \"prefilter_json_files_then_compare:differences before patching: %d\",\n len(list(patch)),\n )\n\n json_initial_filtered = prefilter(json_initial, args.initial_prefilter)\n json_final_filtered = prefilter(json_final, args.finalPreFilter)\n\n patch_after_filtering = jsonpatch.JsonPatch.from_diff(\n json_initial_filtered, json_final_filtered\n )\n differences_after_patching = list(patch_after_filtering)\n logging.info(\n \"prefilter_json_files_then_compare: differences after patching: %d\",\n len(differences_after_patching),\n )\n\n if args.printDifferences:\n for patchline in differences_after_patching:\n print(json.dumps(patchline))\n\n print(len(differences_after_patching))\n return len(differences_after_patching)", "def _update_files():\n configuration_settings = get_configuration()\n\n # Need to find all of the files that are stored in the input_files directories in order to start building the\n # reports that will be used to generate the static log files.\n for input_path in configuration_settings.processing.inputs:\n search_path = pathlib.Path(input_path)\n\n # Currently going to make the assumption that everyone is using the path naming convention that I'm dictating\n # which is YYYY/MM/DD/file.ext\n for file_component in search_path.glob('*/*/*/*'):\n # Store all of the files into a dictionary containing the keys and a list of the files that are associated\n # with that day\n updaters.update_files(search_path, file_component)", "def test_check(self):\n\n self.assertTrue(DirExclude().check(self.file_gitignore))\n self.assertTrue(DirExclude().check(self.file_perceval))\n self.assertTrue(DirExclude().check(self.file_authors))\n\n self.assertFalse(DirExclude().check(self.file_tests))\n self.assertFalse(DirExclude().check(self.file_bin))", "def checkMissingFiles(inDir, jsonUrl):\n\n file_list = []\n remote = False\n try:\n file_list = os.listdir(inDir)\n except OSError:\n remote = True\n file_list = eos_ls(inDir)\n\n if file_list == []:\n print \"Directory does not exist or is empty!\"\n return []\n\n total_expected = 0\n missing_files = []\n suspicious_files = []\n recovered_files = []\n\n print 'Found %d files in input directory' % len(file_list)\n print 20*'-'\n\n jsonFile = open(jsonUrl,'r')\n procList = json.load(jsonFile,encoding = 'utf-8').items()\n\n for proc in procList:\n for desc in proc[1]:\n data = desc['data']\n isData = desc.get('isdata',False)\n mctruthmode = desc.get('mctruthmode')\n for d in data:\n dtag = d.get('dtag','')\n split = d.get('split',1)\n\n for segment in range(0,split):\n eventsFile = dtag\n if split > 1:\n eventsFile = dtag + '_' + str(segment)\n if mctruthmode:\n eventsFile += '_filt%d' % mctruthmode\n filename = eventsFile+'.root'\n\n sys.stdout.write('... checking %s' % filename)\n sys.stdout.flush()\n\n total_expected += 1\n\n if not filename in file_list:\n missing_files.append(filename)\n sys.stdout.write('\\033[91m MISSING \\033[0m \\n')\n # sys.stdout.flush()\n continue\n\n rootFileUrl = os.path.join(inDir, filename)\n if remote:\n rootFileUrl = ('root://eoscms//eos/cms/store' +\n rootFileUrl.split('store',1)[1])\n\n recovered, suspicious = False, False\n tfile = TFile.Open(rootFileUrl)\n try:\n if tfile.TestBit(TFile.kRecovered):\n recovered = True\n if tfile.IsZombie():\n suspicious = True\n tfile.Close()\n except AttributeError, ReferenceError:\n suspicious = True\n\n if recovered:\n sys.stdout.write('\\033[93m Recovered \\033[0m \\n')\n recovered_files.append(filename)\n if suspicious:\n sys.stdout.write('\\033[93m Failed to open \\033[0m \\n')\n suspicious_files.append(filename)\n\n sys.stdout.write('\\033[92m OK \\033[0m \\n')\n sys.stdout.flush()\n\n print 20*'-'\n if len(missing_files):\n print \"Missing the following files:\"\n print \"(%d out of %d expected)\"% (len(missing_files), total_expected)\n for filename in missing_files:\n print filename\n else:\n print \"NO MISSING FILES!\"\n print 20*'-'\n if len(suspicious_files):\n print \"Failed to open the following files:\"\n print \"(%d out of %d expected)\"% (len(suspicious_files), total_expected)\n for filename in suspicious_files:\n print filename\n print 20*'-'\n if len(recovered_files):\n print \"The following files are recovered:\"\n print \"(%d out of %d expected)\"% (len(recovered_files), total_expected)\n for filename in recovered_files:\n print filename\n print 20*'-'\n\n return missing_files+suspicious_files+recovered_files", "def HandleFiles(variables):\n\n # The template file is the html file into which we will write the\n # data from the stats file, formatted correctly for the gviz_api.\n template_file = open(variables[1], \"r\")\n page_template = template_file.read()\n template_file.close()\n\n # This is the path match pattern for finding stats files amongst\n # all the other files it could be. eg: *.stt\n file_pattern = variables[2]\n\n # This is the directory with files that we will use to do the comparison\n # against.\n baseline_dir = variables[3]\n snrs = ''\n filestable = {}\n filestable['dsnr'] = ''\n filestable['drate'] = ''\n filestable['avg'] = ''\n\n # Go through each metric in the list.\n for column in range(1,2):\n\n # Dirs is directories after the baseline to compare to the base.\n dirs = variables[4:len(variables)]\n\n # Find the metric files in the baseline directory.\n dir_list = sorted(fnmatch.filter(os.listdir(baseline_dir), file_pattern))\n\n for metric in ['avg','dsnr','drate']:\n description = {\"file\": (\"string\", \"File\")}\n\n # Go through each directory and add a column header to our description.\n countoverall = {}\n sumoverall = {}\n\n for directory in dirs:\n description[directory] = (\"number\", directory)\n countoverall[directory] = 0\n sumoverall[directory] = 0\n\n # Data holds the data for the visualization, name given comes from\n # gviz_api sample code.\n data = []\n for filename in dir_list:\n row = {'file': splitext(basename(filename))[0] }\n baseline_file_name = baseline_dir + \"/\" + filename\n\n # Read the metric file from each of the directories in our list.\n for directory in dirs:\n metric_file_name = directory + \"/\" + filename\n\n # If there is a metric file in the current directory, open it\n # and calculate its overall difference between it and the baseline\n # directory's metric file.\n if os.path.isfile(metric_file_name):\n overall = FileBetter(baseline_file_name, metric_file_name,\n column, metric)\n row[directory] = overall\n\n sumoverall[directory] += overall\n countoverall[directory] += 1\n\n data.append(row)\n\n # Add the overall numbers.\n row = {\"file\": \"OVERALL\" }\n if countoverall[directory]:\n for directory in dirs:\n row[directory] = sumoverall[directory] / countoverall[directory]\n data.append(row)\n\n # write the tables out\n data_table = gviz_api.DataTable(description)\n data_table.LoadData(data)\n\n filestable[metric] = ( filestable[metric] + \"filestable_\" + metric +\n \"[\" + str(column) + \"]=\" + data_table.ToJSon()\n + \"\\n\" )\n\n filestable_avg = filestable['avg']\n filestable_dpsnr = filestable['dsnr']\n filestable_drate = filestable['drate']\n\n # Now we collect all the data for all the graphs. First the column\n # headers which will be Datarate and then each directory.\n columns = (\"datarate\",baseline_dir)\n description = {\"datarate\":(\"number\", \"Datarate\")}\n for directory in dirs:\n description[directory] = (\"number\", directory)\n\n description[baseline_dir] = (\"number\", baseline_dir)\n\n snrs = snrs + \"snrs[\" + str(column) + \"] = [\"\n\n # Now collect the data for the graphs, file by file.\n for filename in dir_list:\n\n data = []\n\n # Collect the file in each directory and store all of its metrics\n # in the associated gviz metrics table.\n all_dirs = dirs + [baseline_dir]\n for directory in all_dirs:\n\n metric_file_name = directory + \"/\" + filename\n if not os.path.isfile(metric_file_name):\n continue\n\n # Read and parse the metrics file storing it to the data we'll\n # use for the gviz_api.Datatable.\n metrics = ParseMetricFile(metric_file_name, column)\n for bitrate, metric in metrics:\n data.append({\"datarate\": bitrate, directory: metric})\n\n data_table = gviz_api.DataTable(description)\n data_table.LoadData(data)\n snrs = snrs + \"'\" + data_table.ToJSon(\n columns_order=tuple([\"datarate\",baseline_dir]+dirs)) + \"',\"\n\n snrs = snrs + \"]\\n\"\n\n formatters = \"\"\n for i in range(len(dirs)):\n formatters = \"%s formatter.format(better, %d);\" % (formatters, i+1)\n\n print FillForm(page_template, vars())\n return", "def test_file_collection():\n with tempfile.TemporaryDirectory() as STATUS_DIR:\n Status.make_job_file(STATUS_DIR, 'generation', 'test1', TEST_1_ATTRS_1)\n Status.make_job_file(STATUS_DIR, 'generation', 'test2', TEST_2_ATTRS_1)\n\n Status.update(STATUS_DIR)\n with open(os.path.join(STATUS_DIR, 'rev_status.json'), 'r') as f:\n data = json.load(f)\n assert str(TEST_1_ATTRS_1) in str(data)\n assert str(TEST_2_ATTRS_1) in str(data)", "def validate_po_files(root, report_empty=False):\r\n\r\n for dirpath, __, filenames in os.walk(root):\r\n for name in filenames:\r\n __, ext = os.path.splitext(name)\r\n if ext.lower() == '.po':\r\n filename = os.path.join(dirpath, name)\r\n # First validate the format of this file\r\n msgfmt_check_po_file(filename)\r\n # Now, check that the translated strings are valid, and optionally check for empty translations\r\n check_messages(filename, report_empty)", "def test_filter_file_exceptions_early():\n exceptions = Exceptions(os.path.join(os.path.dirname(__file__),\n 'early_exceptions.yaml'))\n\n package = Package('test', os.path.dirname(__file__))\n files = [os.path.join(os.path.dirname(__file__),\n 'unlikelystring')]\n\n filtered_files = exceptions.filter_file_exceptions_early(package, files)\n\n assert not filtered_files", "def check_files(self):\n print('checking files')\n for f in tqdm(self.filenames):\n img = cv2.imread(f, int(self.color))\n if img is None:\n os.remove(f)", "def __is_complete__(self,configs,*args,**kwargs):\n current_dir = self.output_dir\n if GenericProcess.__is_complete__(self,*args,**kwargs):\n return True\n elif not os.path.isfile(self.complete_file):\n if hasattr(self,\"upload_dir\"):\n current_dir = self.upload_dir\n if not os.path.isfile(self.complete_file.replace(self.output_dir,self.upload_dir)): #If the output directory has already been cleaned, check the upload dir.\n return False\n else: \n return False\n if hasattr(self, \"snp_path\") and not self.snp_path is None and hasattr(self,\"analysis_ready_bam_path\") and not self.analysis_ready_bam_path is None:\n if not os.path.isdir(os.path.dirname(self.snp_path)) or not os.path.dirname(os.path.isfile(self.analysis_ready_bam_path)):\n return False\n if not os.path.isfile(self.snp_path) or not os.path.isfile(self.analysis_ready_bam_path):\n snp_file = False\n bam_file = False\n return False\n if not self.upload_dir is None:\n for file in os.listdir(os.path.join(self.upload_dir,self.description)):\n if file.endswith('.vcf'):\n snp_file = True \n if file.endswith('.bam'):\n bam_file = True \n if not snp_file or not bam_file:\n if configs[\"system\"].get(\"Logging\",\"debug\") is \"True\":\n print \"At least one of the output files is missing for sample \" + str(self.sample_key) + \":\"\n if not os.path.isfile(self.snp_path):\n print \"Missing \"+ self.snp_path\n if not os.path.isfile(self.analysis_ready_bam_path):\n print \"Missing \"+ self.analysis_ready_bam_path\n #os.remove(self.complete_file)\n #template_dir = configs['system'].get('Common_directories','template')\n #qsub_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','bcbio_no_postprocess'))\n #self.__fill_template__(qsub_template,os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n #self.__launch__(configs['system'],os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n return False\n else:\n check_file = os.path.join(current_dir,'project-summary.csv')\n #If the process is complete, check to make sure that the check file is created. If not, send email once.\n if not os.path.isfile(check_file) and configs['pipeline'].has_option('Template_files','bcbio_no_postprocess') and current_dir==self.output_dir:\n #subject, body = self.__generate_general_error_text__(config)\n #send_email(subject,body)\n #self.fail_reported = True\n os.remove(self.complete_file)\n template_dir = configs['system'].get('Common_directories','template')\n qsub_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','bcbio_no_postprocess'))\n self.__fill_template__(qsub_template,os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n self.__launch__(configs['system'],os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n return False\n #store_stats_in_db(self)\n self.__finish__(*args,**kwargs)\n return True", "def _get_target_files(self) -> List[Path]:\n repo = get_git_repo()\n submodules = repo.submodules # type: ignore\n submodule_paths = [\n self._fname_to_path(repo, submodule.path) for submodule in submodules\n ]\n\n # resolve given paths relative to current working directory\n paths = [p.resolve() for p in self._paths]\n if self._base_commit is not None:\n paths = [\n a\n for a in (self._status.added + self._status.modified)\n # diff_path is a subpath of some element of input_paths\n if any((a == path or path in a.parents) for path in paths)\n ]\n changed_count = len(paths)\n click.echo(f\"| looking at {unit_len(paths, 'changed path')}\", err=True)\n paths = [\n path\n for path in paths\n if all(\n submodule_path not in path.parents\n for submodule_path in submodule_paths\n )\n ]\n if len(paths) != changed_count:\n click.echo(\n f\"| skipping files in {unit_len(submodule_paths, 'submodule')}: \"\n + \", \".join(str(path) for path in submodule_paths),\n err=True,\n )\n\n # Filter out ignore rules, expand directories\n self._ignore_rules_file.seek(0)\n patterns = Parser(self._base_path).parse(self._ignore_rules_file)\n\n file_ignore = FileIgnore(\n base_path=self._base_path, patterns=patterns, target_paths=paths\n )\n\n walked_entries = list(file_ignore.entries())\n click.echo(\n f\"| found {unit_len(walked_entries, 'file')} in the paths to be scanned\",\n err=True,\n )\n filtered: List[Path] = []\n for elem in walked_entries:\n if elem.survives:\n filtered.append(elem.path)\n\n skipped_count = len(walked_entries) - len(filtered)\n if skipped_count:\n click.echo(\n f\"| skipping {unit_len(range(skipped_count), 'file')} based on path ignore rules\",\n err=True,\n )\n\n relative_paths = [path.relative_to(self._base_path) for path in filtered]\n\n return relative_paths", "def check_files(self, data_path):\n files = os.listdir(data_path)\n\n if 'test_batch' not in files:\n return False\n\n if 'batches.meta' not in files:\n return False\n\n for i in range(1, 6):\n if 'data_batch_{}'.format(i) not in files:\n return False\n\n return True", "def _need_generate(paths):\r\n if not os.path.exists(paths.generated_dir):\r\n return True\r\n\r\n if not os.path.exists(paths.index_file):\r\n return True\r\n\r\n # Use the index file to determine if regeneration is necessary\r\n with open(paths.index_file, 'r',newline='\\n') as index_file:\r\n indexed = [item for item in\r\n index_file.read().split('\\n') if len(item) != 0 and\r\n not item.startswith(\"#\")]\r\n return indexed != paths.resource_files", "def template_staging_directory(staging_directory, problem):\n\n # prepend the staging directory to all\n dont_template = copy(problem.dont_template) + [\n \"app/templates\", \"problem.json\", \"challenge.py\", \"templates\", \"__pre_templated\"\n ]\n\n dont_template_files = list(filter(isfile, dont_template))\n dont_template_directories = list(filter(isdir, dont_template))\n dont_template_directories = [\n join(staging_directory, directory)\n for directory in dont_template_directories\n ]\n\n for root, dirnames, filenames in os.walk(staging_directory):\n if any(\n os.path.commonprefix([root, path]) == path\n for path in dont_template_directories):\n logger.debug(\n \"....Not templating anything in the directory '{}'\".format(\n root))\n continue\n for filename in filenames:\n if filename in dont_template_files:\n logger.debug(\n \"....Not templating the file '{}'\".format(filename))\n continue\n fullpath = join(root, filename)\n try:\n template_file(fullpath, fullpath, **get_attributes(problem))\n except UnicodeDecodeError as e:\n # tried templating binary file\n pass", "def sprite_files(self) -> Iterable[Tuple[LazyFile, Optional[LazyFile], str]]:\n\n missed_patterns = []\n\n # For each graphic directory we want recursively all png file\n for mod, pattern in self.patterns:\n pattern_used = False\n\n for sprite_path in mod.files(pattern):\n # But they should not match any of the excludes\n if any(\n fnmatch(sprite_path, f\"*{exclude}*\") for exclude in self.excludes\n ):\n continue\n\n if len(self.includes) > 0:\n # They must contain an includes\n if all(\n not fnmatch(sprite_path, f\"*{include}*\")\n for include in self.includes\n ):\n continue\n\n full_sprite_path = f\"__{mod.name}__/{sprite_path}\"\n\n replaced_mod, replaced_sprite_path = self.replace_path(full_sprite_path)\n\n pattern_used = True\n\n lazy_match_size_file = None\n if replaced_mod != mod or replaced_sprite_path != sprite_path:\n lazy_match_size_file = mod.lazy_file(sprite_path)\n\n # Double check the excludes still don't match\n if any(\n fnmatch(replaced_sprite_path, f\"*{exclude}*\")\n for exclude in self.excludes\n ):\n continue\n\n try:\n lazy_source_file = replaced_mod.lazy_file(replaced_sprite_path)\n except:\n click.secho(f\"Failed to find replacement sprite for:\", fg=\"red\")\n click.secho(f\" {self.source}: {full_sprite_path}\", fg=\"red\")\n raise\n\n yield (\n lazy_source_file,\n lazy_match_size_file,\n full_sprite_path,\n )\n\n if not pattern_used:\n missed_patterns.append(f\"__{mod.name}__/{pattern}\")\n\n if len(missed_patterns) > 0:\n click.secho(\n f\"Warning: Resources with no match in file {self.source}:\\n \"\n + \"\\n \".join(missed_patterns),\n fg=\"yellow\",\n )\n\n for asset in self.forced_assets:\n # Since a forced asset won't actually exist, we must replace\n # it out to an actual sprite\n mod, sprite_path = self.replace_path(asset)\n\n try:\n lazy_source_file = mod.lazy_file(sprite_path)\n except:\n click.secho(\n f\"Failed to find replacement sprite for forced asset:\", fg=\"red\"\n )\n click.secho(f\" {self.source}: {asset}\", fg=\"red\")\n raise\n\n yield (lazy_source_file, None, asset)", "def updateTestFiles(self):\n for filename, filetype in self._get_test_files():\n lines = open(filename).readlines()\n found_version_line = False\n\n if self.Verbose:\n print 'Reading %s' % filename\n\n if filetype is 'Python':\n lines, write_out = self._update_python_file(lines, filename)\n else:\n raise TypeError, \"Unknown test file type %s\" % filetype\n\n if write_out:\n self._file_writer(lines, filename)", "def check_prerequisites(self, env):\n super(PopLog, self).check_prerequisites(env)\n print(\" Checking prerequisites for : {0}\".format(self.__class__.__name__))\n \n for inFile in self._expectedInFiles:\n rc, err_msg = cesmEnvLib.checkFile('{0}/{1}'.format(env['WORKDIR'],inFile), 'read')\n if not rc:\n print('{0}... continuing with additional plots.'.format(err_msg))", "def check_files(self):\n print('checking files')\n for f in self.filenames:\n img = cv2.imread(f, int(self.color))\n if img is None:\n os.remove(f)", "def testCheckChangeOnUploadWithEmptyAffectedFileList(self, _):\n diff_file_chromium1_h = ['some diff']\n diff_file_chromium2_h = ['another diff']\n diff_file_layout_test_html = ['more diff']\n mock_input_api = MockInputApi()\n mock_input_api.files = []\n # Access to a protected member _CheckStyle\n # pylint: disable=W0212\n PRESUBMIT._CheckStyle(mock_input_api, MockOutputApi())\n self.assertEqual(0, subprocess.Popen.call_count)", "def parse_files_and_set_flags(self):\n change_requires_product_plus1 = False\n sensitive_file_touched = {}\n try:\n files_contents = self.github.get_files()\n LOG.info(\"**** Reading files ****\")\n for item in files_contents:\n file_path = item[\"filename\"]\n if any(x in str(file_path) for x in self.pr.config.sensitiveFiles):\n sensitive_file_touched[\"is_found\"] = True\n sensitive_file_touched[\"file_name\"] = str(file_path)\n if item[\"filename\"].find(self.pr.config.productPlusRequiredDirPattern) != -1:\n LOG.info(\"product change found marking ui_change to True\")\n change_requires_product_plus1 = True\n # break\n except PRFilesNotFoundException, e:\n LOG.exception(e)\n return sensitive_file_touched, change_requires_product_plus1", "def should_build(target_platform, changed_files):\n return any(_should_file_trigger_build(target_platform, file) for file in changed_files)", "def check_init_files_and_folders():\n\t#['cascade_wimb_bus_front_100_stages_1000_pos_3000_neg.xml', 'cascade_wimb_bus_front_33_stages_1000_pos_3000_neg_wrong.xml', 'color_detect_2.py', 'dedupe.py', 'detect_image_group_ku.py', 'detect_shape_5.py', 'get_cam_id_2.py', 'get_image_8.py', 'gui_hsv.py', 'knaps.py', 'knapsack_2.py', 'maps.html', 'program_detect_rectangle.zip', 'start_capture.py']\n\tfile_list=[\n\t#'cascade_wimb_bus_front_100_stages_1000_pos_3000_neg.xml', \n\t'models/cascade_wimb_bus_front_33_stages_1000_pos_3000_neg_wrong.xml', \n\t#'color_detect_2.py', \n\t#'dedupe.py', \n\t'detect_bus_haar_group.py', \n\t#'detect_shape_5.py', \n\t'get_cam_detail.py', \n\t'get_image.py', \n\t#'gui_hsv.py', \n\t#'knaps.py', \n\t#'knapsack_2.py', \n\t#'maps.html', \n\t#'program_detect_rectangle.zip', \n\t'start_wimb.py',\n\t'g.php',\n\t]\n\tdirectory_list=[\n\t'images',\n\t'images_bgs',\n\t'images_bgs_mask',\n\t#'images_bgs_result',\n\t'images_color',\n\t'images_haar',\n\t'images_haar_result',\n\t'images_number',\n\t'images_number_result',\n\t'models',\n\t'images_old',\n\t'text_number',\n\t]\n\t\n\tfor file_name in file_list: print 'file '+file_name+' existed: '+str(os.path.isfile(file_name))\n\tfor directory_name in directory_list: \n\t\tprint 'directory '+directory_name+' existed: '+str(os.path.isdir(directory_name))\n\t\tif not os.path.isdir(directory_name): \n\t\t\tos.makedirs(directory_name)\n\t\tif \"images\" in directory_name: shutil.copy(path+'/g.php',path+'/'+directory_name+'/g.php')" ]
[ "0.6298096", "0.5958212", "0.5917722", "0.5899919", "0.5821627", "0.58071446", "0.57569844", "0.5753009", "0.5753009", "0.5753009", "0.57331693", "0.5726181", "0.56889504", "0.5665329", "0.5652075", "0.56505686", "0.5650157", "0.561327", "0.56093055", "0.5601862", "0.5555765", "0.55554396", "0.5544284", "0.55367583", "0.55271", "0.5514008", "0.55031884", "0.5490609", "0.5489855", "0.5482246", "0.5464058", "0.5420397", "0.5419601", "0.54187113", "0.5404949", "0.54029137", "0.538868", "0.5384603", "0.53834873", "0.53789335", "0.5377978", "0.5376577", "0.53733146", "0.53609115", "0.53546435", "0.5350409", "0.5350331", "0.5348746", "0.53452873", "0.53449875", "0.53391373", "0.5332695", "0.5329432", "0.5319471", "0.53173465", "0.53066546", "0.5302081", "0.53002197", "0.52864003", "0.5277939", "0.5269498", "0.526898", "0.52648264", "0.5256673", "0.5252024", "0.52478784", "0.5244709", "0.5243098", "0.5240047", "0.5235061", "0.52333164", "0.5233248", "0.5231731", "0.5231235", "0.52300763", "0.52300555", "0.5227665", "0.5226207", "0.52254486", "0.52251214", "0.52102864", "0.5209631", "0.520832", "0.5208179", "0.52044266", "0.5204359", "0.5202902", "0.5200326", "0.5195121", "0.51920813", "0.51832795", "0.51825136", "0.518104", "0.51807207", "0.51800776", "0.5179607", "0.51693237", "0.5166967", "0.5164915", "0.5157513" ]
0.68884873
0
Helper function get file path for pipeline file
def _pf(file_path): return os.path.join(self.wf_path, file_path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def file_path(self) -> global___Expression:", "def pipe_path(name: str, extension=\".txt\") -> str:\n return \"\\\\\".join(sys.argv[0].split(\"\\\\\")[:-3]) + f\"\\\\pipeline\\\\{name}{extension}\"", "def _get_path(): # THIS IS JUST FOR GETTING THE FILE\n return os.path.dirname(os.path.abspath(__file__)) + '/'", "def file_path(self) -> Path:\n return self._input_file", "def filenameAsPath(self, app):\n return app.recordingsPath.child(self.filename).path", "def path(self) -> str:\n return self.src + \"/\"", "def input_path(self, filename):\n\n return self.filename_path_join(self.input_dir, filename)", "def path(self, f):\n\t\treturn os.path.join(self.directory, f)", "def path(self):\n return self.file_path()", "def file_path(self):\n return self.lib.file_path", "def get_path(self):\n try:\n return self._file.path\n except AttributeError:\n return os.path.abspath(self._file.name)", "def _file_path(self, file: str) -> str:\n return os.path.abspath(f\"tests/resources/{file}\")", "def get_path(self):\n\n if not self.path:\n Settings.err_print(\"missing file path\")\n return \"\"\n return self.path", "def path(self, args):\n dir_path = self.dir_path_(*args)\n return os.path.join(dir_path, self.file_name)", "def get_file_path(self):\n return self._file_path", "def file_path(self):\n return posixpath.dirname(self.file_name)", "def get_file_path(self):\n if self.file_path is None:\n return None\n if self.file_path.endswith('.pyc'):\n return self.file_path[:-1]\n return self.file_path", "def input_path(self, filename):\n\n return os.path.abspath(os.path.expanduser(os.path.join(self.input_dir, filename)))", "def path(filename: str) -> str:\n path = os.path.dirname(sys.argv[0])\n if not path:\n path = '.'\n return path + '/' + filename", "def path(self):\n\n if self.file_func:\n path = self.file_func(self.lookup_obj, **self.pattern_params)\n return FilePath(path=path)\n return FilePath(path=\"\")", "def _get_resource_path(filename, path=Path.TEST):\n return os.path.normpath(os.path.join(path.value, filename))", "def filepath(self):\n return self.file.path", "def _get_file_path(filename=\"\"):\n\n return os.path.join(data_path, \"cifar-10-batches-py/\", filename)", "def filepath(self):\n return self._filepath.path", "def file_path() -> str:\n stack_t = inspect.stack()\n ins = inspect.getframeinfo(stack_t[1][0])\n return os.path.abspath(ins.filename)", "def FilePath(self) -> str:", "def file_path(self):\n return self._obs_file()", "def path(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"path\")", "def _filepath(self, filename):\n return os.path.join(self.root, self.version, filename)", "def get_file_path(filename, path='Data/'):\n path= os.path.abspath(os.path.dirname(path))\n return os.path.join(path, filename)", "def get_file_path(self,filename):\n return Path(self.resource_path,filename)", "def output_path():\n folder = path.join(path.curdir, \"stages\")\n folder = path.abspath(folder)\n return ensure_path(folder)", "def step_file_path(self, string):\n if not self.has_step_field(string):\n return None\n # TODO handle url\n root_dir = self.root_dir()\n if root_dir:\n path = os.path.join(root_dir, self.step_field(string))\n return os.path.realpath(path)\n return os.path.realpath(self.step_field(string))", "def __get_path(self):\n return self.path", "def get_relative_pathname(self):\n return os.path.join(Exam.EXAM_FILES_LOCATION,\n str(self.unique_id)[0:2],\n str(self.unique_id) + self.file_ext)", "def real_path(self):\n\t\treturn self.args[0]", "def _get_filepath(self) -> str:\n return os.path.join(\n os.sep.join(\n [\n self.period.value,\n 'activities',\n f'activities_{self._dt_string}.json'\n ]\n )\n )", "def path(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"path\")", "def get_path() -> str:\n return os.path.dirname(os.path.realpath(__file__))", "def compute_path(file: mesonlib.FileOrString) -> str:\n if isinstance(file, File):\n return file.absolute_path(self.source_dir, self.build_dir)\n return os.path.normpath(os.path.join(self.build_dir, file))", "def _getfilename(self):\n pass", "def get_path(data_path):\n\treturn os.path.dirname(os.path.realpath(__file__)) + os.sep + data_path", "def getPublishPath(self, filename):\n \n #recognize first\n if not self.isRecognized(filename): return None\n else:\n filename = Template(xpath(self.currentDataset,\n './/_:fileTemplate/text()', self.currentDatasetNs)).substitute(\\\n self.groupDict, hostname=self.hostname, SCIFLO_ROOT=self.scifloRoot)\n publishAtTpls = xpath(self.currentDataset,\n './/_:publishAt/_:location/_:data/text()',\n self.currentDatasetNs)\n if isinstance(publishAtTpls, (types.ListType, types.TupleType)):\n publishTpl = publishAtTpls[0]\n else: publishTpl = publishAtTpls\n publishAt = Template(publishTpl).substitute(self.groupDict,\n hostname=self.hostname, SCIFLO_ROOT=self.scifloRoot)\n return os.path.join(publishAt, filename)", "def get_path(filename):\n\tif filename != \"\":\n\t\treturn filename\n\telse:\n\t\tfilename = \".\"", "def get_path():\n return path.abspath(path.dirname(path.dirname(__file__)))", "def get_file(self):\n return self.dir + self.file_name + self.extension", "def filepath(self):\n return self.filepath_", "def get_file_path(self):\n if self.path[0] in self._simulation_data.mfpath.model_relative_path:\n return os.path.join(\n self._simulation_data.mfpath.get_model_path(self.path[0]),\n self._filename,\n )\n else:\n return os.path.join(\n self._simulation_data.mfpath.get_sim_path(), self._filename\n )", "def path(self):\n return os.path.join(FLOWJS_PATH, self.filename)", "def get_processed_path(self):\n\n return self.processed_img_path", "def get_filename(cls):\n return osp.join(cls.dir_location, *cls.file_path)", "def pathToFileName(self, path):\n\t\t# Find the path, and strip the leading slash.\n\t\tpath =urlparse.urlparse(self.path)[2].lstrip(\"/\")\n\t\t# Process url escape codes, and normalize the path.\n\t\tpath = os.path.normpath(urllib2.unquote(path))\n\t\t# normpath strips the last slash\n\t\tif os.path.isdir(path):\n\t\t\treturn path + '/'\n\t\telse:\n\t\t\treturn path", "def path(self):\n return os.path.dirname(os.path.abspath(self._filename))", "def _getAbsolutePath(self, filename):\n\n # find the correct path, in the experiment file they are either\n # relative to the experiment file, or an absolute path\n if filename != os.path.abspath(filename):\n return os.path.join(self._path, filename)\n else:\n return filename", "def path(self):\n if self.filename:\n return os.path.join(self.season.path, self.filename)", "def path(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"path\")", "def _get_params_filepath(self):\n\t\treturn os.path.join(self.workdir, \"params.txt\")", "def getPath(filename):\n\n if os.path.isabs(filename):\n pathfile = filename\n else:\n filename = filename.lstrip('/\\.')\n filename = filename.replace('/', '\\\\')\n pathfile = os.path.join(os.getcwd(), filename)\n \n return pathfile", "def file_path(file_name, path):\n return path.rstrip('\\/') + \"/{0}\".format(file_name) if path else os.getcwd() + \"/{0}\".format(file_name)", "def getfilename(path):\r\n return path.split('\\\\').pop().split('/').pop().rsplit('.', 1)[0]", "def source_file_path(self) -> str:\n return self._source_file_path", "def fpath(self):\n return os.path.join(self.path, self.name)", "def _get_source_path(self, docmeta: DocMetadata) -> Optional[str]:\n identifier = docmeta.arxiv_identifier\n version = docmeta.version\n file_noex = identifier.filename\n if not docmeta.is_latest:\n parent_path = self._get_parent_path(identifier, version)\n file_noex = f'{file_noex}v{version}'\n else:\n parent_path = self._get_parent_path(identifier)\n\n for extension in VALID_SOURCE_EXTENSIONS:\n possible_path = os.path.join(\n parent_path,\n f'{file_noex}{extension[0]}')\n if os.path.isfile(possible_path):\n return possible_path\n return None", "def output_file_path(self):\n return self.__output_file_path", "def get_path(f=sys.argv[0]):\n\n return os.path.split(f)", "def dag_file_path(self, string):\n if not self.has_dag_field(string):\n return None\n # TODO handle url\n root_dir = self.root_dir()\n if root_dir:\n path = os.path.join(root_dir, self.dag_field(string))\n return os.path.realpath(path)\n return os.path.realpath(self.dag_field(string))", "def _get_filename():\n dirname = os.path.dirname(__file__)\n return os.path.join(dirname, 'occulttraining.txt')", "def get_file_path(filename):\n here_dir = os.path.dirname(os.path.abspath(__file__))\n file_dir = os.path.join(here_dir, \"../data/\", filename)\n\n return file_dir", "def get_compose_path(compose_path: str, base_path: str) -> str:\n return get_path(compose_path, base_path)", "def __make_path(self, filename):\n return self.__path() + os.sep + filename", "def filepath(filename, data, root='/home/cyneo/Work/Scans/Processed Data/',\r\n filetype='.csv'):\r\n path = os.path.abspath(root + data + '/' + filename +\r\n ' ' + data + filetype)\r\n return path", "def get_validation_file_path(self):\n validation_file_name = self.get_validation_file_name()\n if self.helper_decoders_one_class:\n validation_file_name = validation_file_name + \"_1\"\n\n return self.base_folder_path + \"/outputs/\" + validation_file_name + \".txt\"", "def getCurrentFilePath(self):\n return os.path.abspath(self.filePath)", "def get_path(self):\n raise NotImplementedError(\"This asset does not support absolute paths\")", "def filename(self):\n # Just the name of the file\n filename = self.use_name\n if self.extension:\n filename = \"{0}.{1}\".format(self.use_name, self.extension)\n # Architecture sub-folder\n arch_folder_conf = spack.config.get(\"modules:%s:arch_folder\" % self.conf.name, True)\n if arch_folder_conf:\n # include an arch specific folder between root and filename\n arch_folder = str(self.spec.architecture)\n filename = os.path.join(arch_folder, filename)\n # Return the absolute path\n return os.path.join(self.dirname(), filename)", "def get_filename(filepath):\n return filepath.replace(\"{}\\\\\".format(RES_DIR), \"\")", "def _get_conv_filepath(self):\n\t\treturn os.path.join(self.workdir, \"conv.txt\")", "def getFilePathInBackend(self, hostPath):\n return hostPath", "def get_file_save_path(self):\n return self.out", "def output_path(self):\n return self._event.output_path", "def get_pathname(self):\n return self.image_data.path", "def _tf(file_path):\n return os.path.join(test_pipeline_dir, file_path)", "def path(x):\n return os.path.abspath(os.path.join(os.path.dirname(__file__), x))", "def get_path(self, project_file=None):\n root = os.path.abspath(\n os.path.join(os.path.dirname(__file__), '..', '..')\n )\n if project_file:\n return os.path.join(root, project_file)\n else:\n return root", "def path(self) -> str:\n return pulumi.get(self, \"path\")", "def path(self) -> str:\n return pulumi.get(self, \"path\")", "def get_file_path_in_project_directory(filename): \n DIR = os.path.dirname(os.path.abspath(\"__file__\")) \n path = os.path.join(DIR, filename)\n return path", "def get_shp_file(self):\n files = os.listdir(self.targetpath)\n file = files[0].split('.')[0]\n return self.targetpath + '/' + file", "def get_file_path(filename):\n if 'http' in filename:\n parsed_uri = urlparse(filename)\n f = '/' + parsed_uri.path[1:]\n f = '/'.join(f.split('/')[3:]) # split the xxx dir, remove the leading /\n else:\n filename = ('/' + filename) if filename[0] != '/' else filename # make sure starts with /\n # split local img path from path\n f = filename.replace(settings.FILE_PATH, '/')\n f = f.replace(settings.IMAGE_PATH, '/')\n f = f.replace(settings.DERIVED_PATH, '/')\n f = '/'.join(f.split('/')[2:]) # split the xxx dir, remove the leading /\n\n return f", "def get_test_filepath(filename):\n parent_dir = Path(__file__).parent\n return parent_dir / filename", "def outpath(self):\n return None", "def path(self):\n return self._data_file", "def execution_path(self, filename):\n return os.path.join(os.path.dirname(inspect.getfile(sys._getframe(0))), filename)", "def path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"path\")" ]
[ "0.7225395", "0.7101079", "0.7039319", "0.6956954", "0.6794682", "0.67932165", "0.67596304", "0.674209", "0.6732054", "0.6722096", "0.6676987", "0.66186446", "0.6618615", "0.66175014", "0.66000587", "0.65910906", "0.65839905", "0.6580201", "0.657943", "0.6575496", "0.6546741", "0.65405315", "0.6539037", "0.6504681", "0.6481606", "0.6478917", "0.64736515", "0.6466348", "0.64564294", "0.64538014", "0.6453147", "0.6449517", "0.6448336", "0.6445759", "0.6417693", "0.6412714", "0.6411506", "0.6405716", "0.6404932", "0.64013225", "0.6400317", "0.6369427", "0.6352771", "0.634569", "0.63441145", "0.63422364", "0.6333462", "0.6332373", "0.63153905", "0.6312344", "0.6309052", "0.6290749", "0.62902105", "0.6279835", "0.6277842", "0.6276765", "0.6276765", "0.6271324", "0.6270566", "0.6266283", "0.62595475", "0.6256488", "0.6249513", "0.6239331", "0.62371707", "0.6228322", "0.62116116", "0.62098604", "0.62097967", "0.6198269", "0.6198266", "0.6197394", "0.6185099", "0.61701214", "0.61652285", "0.61598426", "0.614062", "0.61391217", "0.6137595", "0.6131209", "0.61250216", "0.6123906", "0.6123393", "0.6115254", "0.61143565", "0.6113265", "0.6113265", "0.6111519", "0.61057484", "0.6099898", "0.6098336", "0.6098329", "0.6097179", "0.60933447", "0.6092532", "0.6092532", "0.6092532", "0.6092532", "0.6092532", "0.6092532" ]
0.681078
4
Helper function get file path for template file
def _tf(file_path): return os.path.join(test_pipeline_dir, file_path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getTemplateFile(fname):\n return os.path.join(Configurations.getTemplateDir(), fname)", "def get_template_path(self):\n raise NotImplementedError()", "def _get_template_filename(self):\n _format = self.cfg.get('mutations', 'format')\n if _format == 'pdf':\n tf = 'PDFTemplate.bt'\n elif _format == 'png':\n tf = 'PNG12Template.bt'\n\n module_dir = os.path.dirname(os.path.abspath(__file__))\n\n return os.path.join(module_dir, templates_dir, tf)", "def get_template_filename(template):\n config = read_config(SETTINGS_PATH)\n #String templates\n if (template in STRING_TEMPLATES):\n options = config.options(STRING_TEMPLATES_SECTION) \n for option in options:\n if (option==template):\n #Get root path for the templates\n root_path = config.get(TEMPLATES_SECTION,TEMPLATES_ROOT_PATH)\n #Get the strings path templates\n strings_path = config.get(STRING_TEMPLATES_SECTION,STRING_TEMPLATES_PATH)\n return join(root_path,strings_path),config.get(STRING_TEMPLATES_SECTION,option)", "def get_template_path(relative_path, **kwargs): # lint-amnesty, pylint: disable=unused-argument\n return relative_path", "def template_path(self) -> str:\n return self._values.get(\"template_path\")", "def tfile_path(filename):\n here = os.path.dirname(__file__)\n return '{0}/static/files/{1}'.format(here, filename)", "def template_path(self):\n return self.get_config(\"templates\")", "def getTmpTemplateFile(fname):\n return os.path.join(Configurations.getTmpTemplateDir(), fname)", "def _get_template_fname(self):\n template_fname = self._context.get('template_fname', False)\n return template_fname", "def template_path(name):\n template_dir = os.path.join(os.path.dirname(__file__), 'templates')\n return os.path.join(template_dir, (name + \".html\"))", "def get_template_from_path(path: str) -> str:\r\n path = path.replace(\"\\\\\", \"/\")\r\n return path", "def destPath(file, package, type='files'):\n\treturn tmpDir(package)+'/etc/univention/templates/'+type+'/'+file", "def _file_path(self, file: str) -> str:\n return os.path.abspath(f\"tests/resources/{file}\")", "def _get_config_template(self, key):\n tmp_path = self._get_config_value('templates', 'path') + key\n return tmp_path", "def _get_path(): # THIS IS JUST FOR GETTING THE FILE\n return os.path.dirname(os.path.abspath(__file__)) + '/'", "def get_template(self, template):\n\n template_path = aj.config.data['email']['templates'].get(template, 'default')\n\n if template_path == 'default' or not os.path.isfile(template_path):\n template_path = DEFAULT_TEMPLATES[template]\n\n return template_path", "def save_path(self):\n return self.template.manager.render_template_txt(self.path, self.template)", "def _get_source_filepath(self, template_name):\n # FIXME: we are using the Django file system loader here, this API\n # might change in the future. Should we isolate this into a\n # functional programming style section instead of deep in this\n # class?\n try:\n source, filepath = \\\n filesystem._loader.load_template_source(template_name)\n except TemplateDoesNotExist:\n source, filepath = \\\n app_directories._loader.load_template_source(template_name)\n return source, filepath", "def _get_template_filename(self):\n file_name = ReportMeta.reports[self._report_key]['fileName']\n return '{}.html'.format(file_name)", "def get_template_path(relative_path):\r\n\r\n if not is_request_in_microsite():\r\n return relative_path\r\n\r\n microsite_template_path = str(get_value('template_dir'))\r\n\r\n if microsite_template_path:\r\n search_path = os.path.join(microsite_template_path, relative_path)\r\n\r\n if os.path.isfile(search_path):\r\n path = '{0}/templates/{1}'.format(\r\n get_value('microsite_name'),\r\n relative_path\r\n )\r\n return path\r\n\r\n return relative_path", "def _find_relative(self, spec):\n if spec.template_rel_path is not None:\n return os.path.split(spec.template_rel_path)\n # Otherwise, determine the file name separately.\n\n locator = self.loader._make_locator()\n\n # We do not use the ternary operator for Python 2.4 support.\n if spec.template_name is not None:\n template_name = spec.template_name\n else:\n template_name = locator.make_template_name(spec)\n\n file_name = locator.make_file_name(template_name, spec.template_extension)\n\n return (spec.template_rel_directory, file_name)", "def _FindTemplateFile(self, topdir):\n if topdir.endswith('..'):\n topdir = '/'.join(topdir.split('/')[:-2])\n fnames = os.listdir(topdir)\n for fname in fnames:\n filename = '%s/%s' % (topdir, fname)\n if filename.endswith('.yaml') and not os.path.isdir(filename) and \\\n os.path.exists(filename):\n f = open(filename, 'r')\n magic_code = f.read(22)\n f.close()\n if '#!fmri_file_template' in magic_code:\n return filename\n return None", "def get_file_name(replay_dir, template_name):\n suffix = '.json' if not template_name.endswith('.json') else ''\n file_name = f'{template_name}{suffix}'\n return os.path.join(replay_dir, file_name)", "def _find_base_path(self):\n paths = [path for path, content in self._templates]\n if len(paths) == 1:\n return os.path.dirname(paths[0])\n return common_path_prefix(paths)", "def getTemplateDir():\n return os.path.join(Configurations.getProjectRootDir(), TEMPLATE_DIR_NAME)", "def file_path(self) -> global___Expression:", "def get_source(self, environment, template):\n if \"/\" not in template:\n path = template.split(\".\")\n\n # The first part of the path must be a bundle name\n # The other parts are the hierarchy of directory after 'views'\n bundle = path[0]\n sub_hierarchy = \"/\".join(path[1:])\n path = \"bundles/\" + bundle + \"/views/\" + sub_hierarchy + \".jj2\"\n else:\n path = template\n\n path = join(self.server.user_directory, path)\n if not exists(path):\n raise TemplateNotFound(template)\n\n mtime = getmtime(path)\n with open(path, 'r', encoding=\"utf-8\") as file:\n source = file.read()\n\n return source, path, lambda: mtime == getmtime(path)", "def find_template_filename(self, template_name):\n\n def next_file():\n filename = self.path / template_name\n yield filename\n try:\n exts = self.default_file_extensions\n except AttributeError:\n return\n\n strfilename = str(filename)\n for ext in exts:\n yield Path(strfilename + ext)\n\n for filename in next_file():\n if filename.is_file():\n return filename", "def _filepath(self, filename):\n return os.path.join(self.root, self.version, filename)", "def _get_cfn_template_file_name(self, cfn_template_path: str) -> str:\n base_name = os.path.basename(cfn_template_path)\n (file_name, ext) = os.path.splitext(base_name)\n return file_name", "def _get_resource_path(filename, path=Path.TEST):\n return os.path.normpath(os.path.join(path.value, filename))", "def file_path(self):\n return self.lib.file_path", "def _get_pubchem_template_path(self, het_id):\n path = os.path.join(self.pubchem_templates, f\"{het_id}.sdf\")\n\n return path if os.path.isfile(path) else \"\"", "def get_file_path(self,filename):\n return Path(self.resource_path,filename)", "def path(self, f):\n\t\treturn os.path.join(self.directory, f)", "def _get_template(self):\n try:\n template_path = current_app.config.get('REPORT_TEMPLATE_PATH')\n template_code = Path(f'{template_path}/{self._get_template_filename()}').read_text()\n # substitute template parts\n template_code = self._substitute_template_parts(template_code)\n except Exception as err: # noqa: B902; just logging\n current_app.logger.error(err)\n raise err\n return template_code", "def template_dir(self):\n return self.cm.get(YAML_CONFIG_TEMPLATE_DIR)", "def template():\n return ENVIVIRTUALIZABLEURI('DEFile')", "def get_path(self):\n\n if not self.path:\n Settings.err_print(\"missing file path\")\n return \"\"\n return self.path", "def file_path() -> str:\n stack_t = inspect.stack()\n ins = inspect.getframeinfo(stack_t[1][0])\n return os.path.abspath(ins.filename)", "def get_file_path(filename: str):\n return TEMP_DIR.joinpath(filename)", "def path(self):\n return self.file_path()", "def sed_template_filename(sedtype):\n path = datapath.sed_template_path()\n filename = 'SEDtemplate_'+sedtype.lower()+'.fits'\n return join(path, filename)", "def get_resource(filename: str, path: str | None = None) -> str:\n root = Path(__file__).parent\n full_path = root if path is None else root / Path(path)\n return str(full_path / filename)", "def get_file_path(self):\n return self._file_path", "def path(self) -> str:\n return self.src + \"/\"", "def get_file_path(filename, path='Data/'):\n path= os.path.abspath(os.path.dirname(path))\n return os.path.join(path, filename)", "def path(self):\n\n if self.file_func:\n path = self.file_func(self.lookup_obj, **self.pattern_params)\n return FilePath(path=path)\n return FilePath(path=\"\")", "def get_file(self):\n return self.dir + self.file_name + self.extension", "def determine_template_by_path(path):\n path = path.lstrip('/')\n\n path_chunks = re.split('\\/', path)\n if len(path_chunks) <= 1:\n return path\n else:\n \"\"\"\n For now be ignorant and just return the\n first entry of the list as the possible template\n name, so in fact we only have a 1 level deep structure\n \"\"\"\n return '_%s.html' % path_chunks[0]", "def structure_file_path(self):\n return os.path.join(\n self.base_path,\n self.structure_dir,\n self.content_path,\n self.structure_filename\n )", "def generate_filename_template_path(output_dir, filename_template):\n if output_dir:\n os.makedirs(output_dir, exist_ok=True)\n return os.path.join(output_dir, filename_template)\n return None", "def config_dir(template_file_path=None):\n if template_file_path:\n return os.path.dirname(template_file_path)\n\n return os.getcwd()", "def html_template_file(self):\n pass", "def htmlpath(path):\n filelocation = f\"public/pages/{path}\"\n print(filelocation)\n\n return filelocation", "def file_path(file_name, path):\n return path.rstrip('\\/') + \"/{0}\".format(file_name) if path else os.getcwd() + \"/{0}\".format(file_name)", "def _localfile(name):\n return os.path.abspath(resource_filename(__name__, name))", "def get_filename(self, path, params, type_=None):\n phase = self.phase\n\n if type_:\n phase += ('_' + type_)\n\n filename = self.FILENAME_TEMPLATES[phase].format(**params)\n\n return os.path.join(path, filename)", "def get_templates_dir(self):\n return self.templates_dir", "def get_file_path(self):\n if self.file_path is None:\n return None\n if self.file_path.endswith('.pyc'):\n return self.file_path[:-1]\n return self.file_path", "def filepath(self):\n return self.file.path", "def get_path(self):\n try:\n return self._file.path\n except AttributeError:\n return os.path.abspath(self._file.name)", "def get_content_path(content):", "def get_filename(filepath):\n return filepath.replace(\"{}\\\\\".format(RES_DIR), \"\")", "def file_path(self):\n if not self._has_tmp_file_path():\n return None\n return self._get_tmp_file_path()", "def get_template(self, name):\n with open(name, 'r+') as open_f:\n template_content = open_f.read()\n return template_content", "def tex_base_path(self, file_path):\n file_path = os.path.normpath(file_path)\n try:\n base_path = self._import_base_paths[file_path]\n except KeyError:\n base_path, _ = os.path.split(self._tex_root)\n return base_path", "def templates_folder(self):\n return os.path.join(\n os.path.dirname(__file__), \"default_config\", \"divvy_templates\"\n )", "def temporary_file_path(self):\n return self.file.name", "def FilePath(self) -> str:", "def include_abs_path_in_templates(file_path):\n template_path = get_abs_path(file_path, 'views')\n TEMPLATE_PATH.insert(0, template_path)", "def get_template_path_with_theme(relative_path):\n relative_path = os.path.normpath(relative_path)\n\n theme = get_current_theme()\n\n if not theme:\n return relative_path\n\n # strip `/` if present at the start of relative_path\n template_name = re.sub(r'^/+', '', relative_path)\n\n template_path = theme.template_path / template_name\n absolute_path = theme.path / \"templates\" / template_name\n if absolute_path.exists():\n return str(template_path)\n else:\n return relative_path", "def recipe_path(file_path, prefix=\"compute_\"):\n dir_path = os.path.dirname(os.path.abspath(file_path))\n filename = os.path.basename(file_path).replace(\"test_\", prefix)\n return F\"{dir_path}/../../recipes/{filename}\" # noqa F501", "def get_temp_file_path(self, filename, root=None):\n root = root or self.get_default_temp_dir()\n return root.join(filename)", "def _get_template(specified_template, default_template):\n template_file_path = specified_template\n if template_file_path:\n if not (os.path.exists(template_file_path) and os.path.isfile(template_file_path)):\n LOG.error(u\"Template file: %s doesn't exist, using default template\",\n template_file_path)\n template_file_path = None\n\n if not template_file_path:\n # using default template\n template_file_path = os.path.join(\n os.path.dirname(os.path.realpath(__file__)),\n default_template\n )\n\n LOG.debug(u\"template file used: %s\", template_file_path)\n with open(template_file_path, \"r\") as definition:\n return definition.read()", "def get_file_from_path(file_path):\n return Utils.get_real_file_path(file_path)", "def get_path_from_template(path_template: str, path_type: PathType = PathType.AUTO) -> str:\r\n # automatically select path type depending on running OS\r\n if path_type == PathType.AUTO:\r\n if platform.system() == \"Windows\":\r\n path_type = PathType.WINDOWS\r\n elif platform.system() == \"Linux\":\r\n path_type = PathType.LINUX\r\n else:\r\n raise RuntimeError(\"Unknown platform\")\r\n\r\n path_template = path_template.replace(\"<USERNAME>\", get_user_name())\r\n\r\n # return correctly formatted path\r\n if path_type == PathType.WINDOWS:\r\n return str(pathlib.PureWindowsPath(path_template))\r\n elif path_type == PathType.LINUX:\r\n return str(pathlib.PurePosixPath(path_template))\r\n else:\r\n raise RuntimeError(\"Unknown platform\")", "def getFile(self):\n #try to redetect the filetype\n vim.command(\"filetype detect\")\n #return the filetype\n filetype = vim.eval(\"&ft\")\n #filetype = vim.command(\"&ft\")\n if filetype:\n for file in self.template_files:\n if filetype.lower() in file.lower():\n self.hasTemplate = True\n return open(self.template_folder + \"/\" + file, 'r')\n return None", "def _template_file_default(self):\n return \"index\"", "def get_upload_path(instance, filename):\n \n userpath = \"{name}/{file}\".format(name=instance.user.username, file=filename)\n mainpath = os.path.join(\"infocomp\",userpath)\n return mainpath", "def file_root(self):\n return os.path.join(CredentialApplication.FILE_ROOT, self.slug)", "def get_template(self, template):\n\n\n env = Environment(\n loader=FileSystemLoader('templates')\n )\n return env.get_template(template)", "def join_path(self, template, parent):\n if (template.startswith('./')):\n return os.path.join(os.path.dirname(parent), template)\n return template", "def __default_pptx_path(self):\n thisdir = os.path.split(__file__)[0]\n return os.path.join(thisdir, 'templates', 'default.pptx')", "def template_dir(self):\n return os.path.join(Config().template_dir(), 'platform')", "def get_test_file_path(filename):\n return os.path.join('parsers', 'tests', 'fixtures', 'table-widget', filename)", "def get_file_path_in_project_directory(filename): \n DIR = os.path.dirname(os.path.abspath(\"__file__\")) \n path = os.path.join(DIR, filename)\n return path", "def template(self):\n return self.conf.get(\"template\", None)", "def _get_tmp_file_path(self):\n return os.path.join(self.tmp_dir, self.hash)", "def get_aug_path(file_path: str) -> str:\n return \"/files%s\" % file_path", "def get_path(data_path):\n\treturn os.path.dirname(os.path.realpath(__file__)) + os.sep + data_path", "def make_file_path_component(self, filepath) -> TestPathComponent:\n if base_path:\n filepath = os.path.relpath(filepath, start=base_path)\n return {\"type\": \"file\", \"name\": filepath}", "def _pf(file_path):\n return os.path.join(self.wf_path, file_path)", "def source_file_path(self) -> str:\n return self._source_file_path", "def make_path(self, filename):\n return os.path.join(self.root_path, filename)", "def get_template_name(request, base_template_name):\n template_base_dir = get_template_base_directory(request)\n return f\"cast/{template_base_dir}/{base_template_name}\"", "def get_path(self, project_file=None):\n root = os.path.abspath(\n os.path.join(os.path.dirname(__file__), '..', '..')\n )\n if project_file:\n return os.path.join(root, project_file)\n else:\n return root", "def get_static_data(self, settings):\n fpath = settings['file_loc']\n return fpath", "def get_template_name(self):\n if self.template_name:\n return self.template_name\n\n if Path('_templates/global/WaitPage.html').exists():\n return 'global/WaitPage.html'\n return 'otree/WaitPage.html'", "def find_custom_template(args):\n for arg in args:\n if os.path.isdir(arg):\n dirlist = os.listdir(arg)\n if \"custom.html\" in dirlist:\n return os.path.join(arg, \"custom.html\")\n elif \"custom.jinja\" in dirlist:\n return os.path.join(arg, \"custom.jinja\")" ]
[ "0.8357311", "0.7725359", "0.7723076", "0.76889884", "0.76159966", "0.76117176", "0.7574724", "0.75500005", "0.7501025", "0.7403312", "0.7396856", "0.72851056", "0.7204504", "0.7142604", "0.7068567", "0.7013538", "0.6973091", "0.69499636", "0.69488835", "0.6947856", "0.6941594", "0.69366705", "0.68817866", "0.68316656", "0.68169194", "0.67917234", "0.6784126", "0.67006415", "0.66893625", "0.6680339", "0.66792977", "0.66726065", "0.6647778", "0.6627993", "0.6607229", "0.6596431", "0.6591503", "0.65896314", "0.6584746", "0.65018636", "0.6470834", "0.6467302", "0.6461222", "0.646028", "0.64475304", "0.6444526", "0.6440252", "0.6433495", "0.64155734", "0.64149886", "0.6414895", "0.6413544", "0.64113194", "0.6406805", "0.6381555", "0.6376796", "0.6374752", "0.63745946", "0.6355177", "0.63503546", "0.6349852", "0.6347623", "0.634447", "0.6334723", "0.63289934", "0.632653", "0.63221604", "0.6313259", "0.628712", "0.6283809", "0.6277515", "0.6275258", "0.62751853", "0.6274596", "0.626857", "0.6264805", "0.6261832", "0.62613606", "0.62600255", "0.62593484", "0.6256387", "0.6253408", "0.62493354", "0.6244601", "0.62306625", "0.6227065", "0.6205738", "0.6199897", "0.6195191", "0.6177367", "0.6172251", "0.6167253", "0.616446", "0.6158646", "0.6158187", "0.61552435", "0.615453", "0.6149633", "0.6148921", "0.61380726", "0.6126921" ]
0.0
-1
source_address argument used only for python2.7 compatibility
def create_connection_nodelay(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): msg = "getaddrinfo returns an empty list" host, port = address for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) sock.connect(sa) return sock except socket.error as msg: if sock is not None: sock.close() raise socket.error(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getSource():", "def add_source_address(self, srcAddr):\n self.source.address = srcAddr", "def add_source_address(self, srcAddr):\n self.source.address = srcAddr", "def getAddressSourceInfo(self, address: ghidra.program.model.address.Address) -> ghidra.program.database.mem.AddressSourceInfo:\n ...", "def get_source(self):", "def test_get_source_ip(self):\n pass", "def test_script_p2_s_address(self):\n pass", "def get_source(cls, *args, **kwargs): # real signature unknown\n pass", "def get_source(cls, *args, **kwargs): # real signature unknown\n pass", "def get_source(cls, *args, **kwargs): # real signature unknown\n pass", "def do_source(self, args):\n self.source = int(args)", "def main(source):\n pass", "def main(ctx, debug, address):\n ctx.obj['address'] = address", "def setAddressSource(self, address_source):\n # type: (str)->None\n\n self._validator.validate_one(\n 'source', VALID_OPTS['source'], address_source)\n self._ifAttributes['source'] = address_source", "def address(self):\n ...", "def do_inspect_with_source(self, arg):\n self._do_inspect(arg, with_source=True)", "def test_script_p2_sh_address(self):\n pass", "def source(something):\n # How to source? It's impossible. It's like: we fork, run bash, run\n # ourselves in the environment set up by Bash.\n raise NotImplementedError('Impossible.')", "def src(self) -> 'Data_Address':\n return Data_Address(self._info.sha, self._info.spa)", "def test_source_location_construction():\n _ir.SourceLocation(\"foo\", \"bar.py\", 10)", "def preprocess_python_source(self, module, source):\n\n return source", "def _tell_source(self) -> int:\n raise NotImplementedError() # pragma: no cover", "def __init__(self, source):\r\n self.source = source", "def _remoteScript(self, source_script):", "def add_source_achors():\n pass", "def magic_psource(self, parameter_s=''):\n self._inspect('psource',parameter_s)", "def __init__(self, source):\n self.source = source", "def __init__(self, source):\n self.source = source", "def source_url(self, target_url):\n raise NotImplementedError()", "def LocalAddress(self) -> _n_5_t_0:", "def get_tool_source_path(self, uri_like):", "def addSource(self, source):\n self.tprint('source ' + source)", "def source(self):\n return some.dap.source(py.path.local(self.co_filename))", "def __init__(self, source):\n self._source = source", "def getSourceStamp():\n pass", "def getSourceStamp():\n pass", "def source_interfaces(self):", "def source_interfaces(self):", "def _get_address(self):\n return self.__address", "def importaddress(self, *args, **kwargs):\n return True", "def __init__(self, source, *args, **kwargs):\n super(self.__class__, self).__init__()\n self._source = source\n self.provides = source.provides", "def source(self):\n return some.dap.source(\"<string>\")", "def __init__(self, name: unicode, entry: ghidra.program.model.address.Address, body: ghidra.program.model.address.AddressSetView, source: ghidra.program.model.symbol.SourceType, findEntryPoint: bool, recreateFunction: bool):\n ...", "def use_source(self, src_node):\n self._bld.Addreference(self.get_sobj(), src_node.get_sobj())", "def check_for_source_in_parent(elf, addr):\n result = subprocess.run(\n (DWARFDUMP, \"--lookup=0x\" + addr, \"-p\", elf), capture_output=True, text=True\n )\n dwarfdump = result.stdout\n matches = re.findall(dw_at_file_re, dwarfdump)\n\n def getFile(line):\n return line.strip().split('\"')[1]\n\n source_files = list(map(getFile, matches))\n for (i, f) in enumerate(source_files[::-1]):\n if \"/core/\" not in f:\n line_matches = re.findall(dw_at_line_re, dwarfdump)\n\n def getLine(line):\n return line.strip().split(\"(\")[1].split(\")\")[0]\n\n source_lines = list(map(getLine, line_matches))\n source_line = source_lines[::-1][i]\n return (f, source_line)\n return (\"\", \"\")", "def activate_source(self):\n pass", "def set_source(self, source_name):\n self.source = source_name", "def source(self) -> str | Path:\n ...", "def setSourceFile(filename):", "def __init__(self, source, *args, **kwargs):\n super(self.__class__, self).__init__()", "def link(address):", "def source(self):\n return self._source_code", "def get_src_to_inst(self) -> int:\n\n # get the Qt document\n doc: QCodeDocument = self.document()\n\n # get the current position of the cursor\n cursor = self.textCursor()\n pos = cursor.position()\n\n # get the node at the associated cursor position\n current_node = doc.get_stmt_node_at_position(pos)\n\n if (\n current_node is not None\n and hasattr(current_node, \"tags\")\n and current_node.tags is not None\n and \"ins_addr\" in current_node.tags\n ):\n asm_ins_addr = current_node.tags[\"ins_addr\"]\n\n else:\n # the top of the function decompiled\n asm_ins_addr = self._code_view.function.addr\n\n return asm_ins_addr", "def resolve(self, address):", "def _source_type(self):\n pass", "def give_source(self):\n has_src, src_sobj = self.get_sobj().ReferencedObject()\n if has_src:\n return self.__class__(self._std, self._bld, src_sobj.GetID())", "def source():\n\n source = models.Source(name=u\"Joe's Funerals.com\", url=u\"http://www.joesfunerals.com\")\n return source", "def get_binary_start_address(target_binary):\n obj_dump = subprocess.Popen([\"objdump\", \"-f\", target_binary],stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n results = obj_dump.stdout.read().decode()\n start_address = results.strip()[-10:]\n return start_address", "def add_source(self, name, position):#)*args, **kwargs):\n return self._add_object(name, Source, position)#*args, **kwargs)", "def source_ip(self) -> Optional[str]:\n return pulumi.get(self, \"source_ip\")", "def getSource(self):\n return urllib2.urlopen(Parser.SOURCE_URL)", "def usrp_source_make(*args):\n return _uhd_swig.usrp_source_make(*args)", "def SphinxDummySourceClass(source: Any, *args: Any, **kwargs: Any) -> Any:\n return source", "def _getscriptsource(self):\n\t\tscriptname = misc.sysinfo.script_name.rstrip(\"c\")\n\t\ttry:\n\t\t\tencoding = tokenize.detect_encoding(open(scriptname, \"rb\").readline)[0]\n\t\t\twith open(scriptname, \"r\", encoding=encoding, errors=\"replace\") as f:\n\t\t\t\tself.source = f.read()\n\t\texcept IOError: # Script might have called ``os.chdir()`` before\n\t\t\tself.source = None", "def getsource(object):\r\n lines, lnum = getsourcelines(object)\r\n return string.join(lines, '')", "def source(self, irc, msg, args):\n irc.reply('My source is at http://supybot.com/')", "def sources(self):\n raise NotImplementedError()", "def _get_source_address(course_id, course_title):\r\n course_title_no_quotes = re.sub(r'\"', '', course_title)\r\n\r\n # For the email address, get the course. Then make sure that it can be used\r\n # in an email address, by substituting a '_' anywhere a non-(ascii, period, or dash)\r\n # character appears.\r\n from_addr = u'\"{0}\" Course Staff <{1}-{2}>'.format(\r\n course_title_no_quotes,\r\n re.sub(r\"[^\\w.-]\", '_', course_id.course),\r\n settings.BULK_EMAIL_DEFAULT_FROM_EMAIL\r\n )\r\n return from_addr", "def source_code(obj):\n print(inspect.getsource(obj))", "def setBytes(self, addr: ghidra.program.model.address.Address, source: List[int]) -> None:\n ...", "def __init__(self, source_code, bytecode=None, address=None, abi=SOPHIA, client=None):\n if client is None:\n client = epoch.EpochClient()\n self.client = client\n self.abi = abi\n self.source_code = source_code\n self.bytecode = bytecode\n self.address = address\n if self.bytecode is None:\n self.bytecode = self.compile(self.source_code)", "async def _botsource(self, ctx):\r\n source_link = \"https://github.com/Simalary/SimsVIP.Servo\"\r\n await self.bot.say('{0.message.author.mention}, my source code is available at <{1}>.'.format(ctx, source_link))", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def source(self, source):\n\n self._source = source", "def Address(self) -> _n_5_t_0:", "def source(self) -> str:\n return self._source", "def get_address(machine: Machine) -> str:\n default_route, _ = machine.run(\"ip route get 8.8.8.8\")\n return re.search(\" src ([0-9.]+) \", default_route).group(1)", "def _resolveSourcePath(self, sources, source):\n source = copy.deepcopy(source)\n if source['path'] != '__none__':\n sourcePath = Path(source['path'])\n source['path'] = self._basePath / sourcePath\n if not source['path'].is_file():\n altpath = self._basePath.parent / sourcePath / sourcePath.name\n if altpath.is_file():\n source['path'] = altpath\n if not source['path'].is_file():\n raise TileSourceFileNotFoundError(str(source['path']))\n sources.append(source)", "def get_source_unicode(obj):\n return inspect.getsource(obj)", "def _get_source(self, fullmodname):\n submodname, is_package, relpath = self._get_info(fullmodname)\n fullpath = self.path_entry + relpath\n source = self.datablocks[relpath]\n if hasattr(source, \"decode\"):\n source = source.decode(\"UTF-8\")\n source = source.replace('\\r\\n', '\\n')\n source = source.replace('\\r', '\\n')\n return submodname, is_package, fullpath, source", "def get_source(self):\n\t\treturn self.source.get_source()", "def getsource(object):\n lines, lnum = getsourcelines(object)\n return string.join(lines, '')", "def get_source_for_option(self, section, option):\n raise NotImplementedError", "def __init__(self, name: unicode, entries: ghidra.program.model.address.AddressSetView, body: ghidra.program.model.address.AddressSetView, source: ghidra.program.model.symbol.SourceType, findEntryPoint: bool, recreateFunction: bool):\n ...", "def SYS_addr(self, addr):\n\t\tpass", "def get_source(self) -> Optional[str]:\n return self._source", "def source(self):\n return self.__source", "def set_source(self, source):\n self.data['source'] = source", "def test_get_source_log(self):\n pass", "def nameToAddress(self, name):\n pass", "def set_source_file(self, source_file):\n self.set_attribute(\"source_file\", source_file)", "def getSourceStamp():\n # TODO: it should be possible to expire the patch but still remember\n # that the build was r123+something.", "def findsource(obj):\n filename = inspect.getsourcefile(obj)\n if filename:\n linecache.checkcache(filename)\n return inspect.findsource(obj)", "def _read_sourced_path(self, line):\n # type: (str)->tp.Optional[str]\n if line.startswith('source '):\n sline = [x.strip() for x in line.split()]\n sline.pop(0)\n path = ' '.join(sline)\n if not os.path.isabs(path):\n current_root = self._root_interfaces_path\n if os.path.isfile(current_root):\n current_root = os.path.dirname(current_root)\n path = os.path.join(current_root, path)\n return path\n return None", "def Sourceify(path):\n return path", "def getAbsoluteSourceStamp(self, got_revision):" ]
[ "0.6894374", "0.667033", "0.667033", "0.63841516", "0.62165433", "0.613326", "0.6117469", "0.61146295", "0.61146295", "0.61146295", "0.60979617", "0.6066222", "0.6025801", "0.59759766", "0.59759283", "0.58986866", "0.586193", "0.58330524", "0.5787249", "0.5779645", "0.5777858", "0.57544225", "0.5753867", "0.57499427", "0.5722155", "0.57221365", "0.57096535", "0.57096535", "0.570579", "0.5705618", "0.5704884", "0.5704246", "0.56879663", "0.5685912", "0.56535506", "0.56535506", "0.55796415", "0.55796415", "0.55773443", "0.5546957", "0.5528929", "0.5490674", "0.5490408", "0.5483763", "0.54772884", "0.5471698", "0.54480463", "0.54450524", "0.5442056", "0.54415685", "0.5440905", "0.5384398", "0.53603077", "0.5359627", "0.53574705", "0.5352641", "0.5352263", "0.53519434", "0.5346787", "0.5345824", "0.53227645", "0.531546", "0.5285991", "0.5281948", "0.5276952", "0.5268106", "0.5266898", "0.5265598", "0.52616304", "0.52507395", "0.52445626", "0.5244185", "0.5243522", "0.5243522", "0.5243522", "0.5243522", "0.5243522", "0.5243522", "0.5243522", "0.52407104", "0.52352846", "0.522839", "0.5222656", "0.5201532", "0.5200866", "0.5199037", "0.51962924", "0.5193304", "0.5184699", "0.5172838", "0.51593363", "0.51590866", "0.5151639", "0.51448625", "0.51399124", "0.5134052", "0.51335007", "0.5129635", "0.5123817", "0.51162755", "0.5113427" ]
0.0
-1
sample from the MACKRL tree
def select_actions(self, inputs, avail_actions, tformat, info, hidden_states=None, test_mode=False, **kwargs): noise_params = kwargs.get("noise_params", None) T_env = info["T_env"] test_suffix = "" if not test_mode else "_test" if self.args.agent_level1_share_params: # --------------------- LEVEL 1 if self.is_obs_noise(test_mode): inputs_level1, inputs_level1_tformat = _build_model_inputs(self.input_columns_level1_noisy, inputs, to_variable=True, inputs_tformat=tformat) inputs_level1_tformat = "a*bs*t*v" else: inputs_level1, inputs_level1_tformat = _build_model_inputs(self.input_columns_level1, inputs, to_variable=True, inputs_tformat=tformat) if self.args.debug_mode: _check_nan(inputs_level1) out_level1, hidden_states_level1, losses_level1, tformat_level1 = self.model.model_level1(inputs_level1["agent_input_level1"], hidden_states=hidden_states["level1"], loss_fn=None, tformat=inputs_level1_tformat, n_agents=self.n_agents, test_mode=test_mode, **kwargs) if self.args.debug_mode: _check_nan(inputs_level1) if self.is_obs_noise(test_mode): # have to do correlated sampling of what pair id everyone agrees on bs = out_level1.shape[_bsdim(inputs_level1_tformat)] ftype = th.FloatTensor if not out_level1.is_cuda else th.cuda.FloatTensor sampled_pair_ids = ftype(*out_level1.shape[:-1], 1) for _b in range(bs): ps = out_level1[:, _b] rn = np.random.random() for _a in range(ps.shape[0]): act = 0 s = ps[_a, 0, act] while s <= rn: act += 1 s += ps[_a, 0, act] sampled_pair_ids[_a, _b, 0, :] = act modified_inputs_level1 = inputs_level1 selected_actions_format_level1 = "a*bs*t*v" else: # TODO: This is the pair-product encoded ID of both selected pairs. sampled_pair_ids, modified_inputs_level1, selected_actions_format_level1 = self.action_selector.select_action({"policies":out_level1}, avail_actions=None, tformat=tformat_level1, test_mode=test_mode) _check_nan(sampled_pair_ids) if self.args.debug_mode in ["level2_actions_fixed_pair"]: """ DEBUG MODE: LEVEL2 ACTIONS FIXED PAIR Here we pick level2 actions from a fixed agent pair (0,1) and the third action from IQL """ assert self.n_agents == 3, "only makes sense in n_agents=3 scenario" sampled_pair_ids.fill_(0.0) # sample which pairs should be selected # TODO: HAVE TO ADAPT THIS FOR NOISY OBS! if self.is_obs_noise(test_mode): self.selected_actions_format = selected_actions_format_level1 else: self.actions_level1 = sampled_pair_ids.clone() self.selected_actions_format = selected_actions_format_level1 self.policies_level1 = modified_inputs_level1.squeeze(0).clone() if self.is_obs_noise(test_mode): inputs_level2, inputs_level2_tformat = _build_model_inputs(self.input_columns_level2_noisy, inputs, to_variable=True, inputs_tformat=tformat, ) else: inputs_level2, inputs_level2_tformat = _build_model_inputs(self.input_columns_level2, inputs, to_variable=True, inputs_tformat=tformat, ) assert self.args.agent_level2_share_params, "not implemented!" if "avail_actions_pair" in inputs_level2["agent_input_level2"]: pairwise_avail_actions = inputs_level2["agent_input_level2"]["avail_actions_pair"] else: assert False, "NOT SUPPORTED CURRENTLY." avail_actions1, params_aa1, tformat_aa1 = _to_batch(inputs_level2["agent_input_level2"]["avail_actions_id1"], inputs_level2_tformat) avail_actions2, params_aa2, _ = _to_batch(inputs_level2["agent_input_level2"]["avail_actions_id2"], inputs_level2_tformat) pairwise_avail_actions = th.bmm(avail_actions1.unsqueeze(2), avail_actions2.unsqueeze(1)) pairwise_avail_actions = _from_batch(pairwise_avail_actions, params_aa2, tformat_aa1) ttype = th.cuda.FloatTensor if pairwise_avail_actions.is_cuda else th.FloatTensor delegation_avails = Variable(ttype(pairwise_avail_actions.shape[0], pairwise_avail_actions.shape[1], pairwise_avail_actions.shape[2], 1).fill_(1.0), requires_grad=False) pairwise_avail_actions = th.cat([delegation_avails, pairwise_avail_actions], dim=_vdim(tformat)) out_level2, hidden_states_level2, losses_level2, tformat_level2 \ = self.model.models["level2_{}".format(0)](inputs_level2["agent_input_level2"], hidden_states=hidden_states["level2"], loss_fn=None, tformat=inputs_level2_tformat, # sampled_pair_ids=sampled_pair_ids, # UNUSED? pairwise_avail_actions=pairwise_avail_actions, test_mode=test_mode, seq_lens=inputs["agent_input_level2__agent0"].seq_lens, **kwargs) if self.is_obs_noise(test_mode): # have to do correlated sampling of what pair id everyone agrees on bs = out_level2.shape[_bsdim(inputs_level2_tformat)] ftype = th.FloatTensor if not out_level2.is_cuda else th.cuda.FloatTensor pair_sampled_actions = ftype(*out_level2.shape[:-1], 1).view(int(out_level2.shape[0]/2), 2, *out_level2.shape[1:-1], 1) for _b in range(bs): ps = out_level2.view(int(out_level2.shape[0]/2), 2, *out_level2.shape[1:])[:, :, _b] avail_actions = pairwise_avail_actions.view(int(out_level2.shape[0]/2), 2, *out_level2.shape[1:])[:, :, _b] _sum0 = th.sum(ps[:, 0] * avail_actions[:, 0], dim=-1, keepdim=True) _sum0_mask = (_sum0 == 0.0) _sum0.masked_fill_(_sum0_mask, 1.0) ps[:, 0] = ps[:, 0] * avail_actions[:, 0] / _sum0 _sum1 = th.sum(ps[:, 1] * avail_actions[:, 1], dim=-1, keepdim=True) _sum1_mask = (_sum1 == 0.0) _sum1.masked_fill_(_sum1_mask, 1.0) ps[:, 1] = ps[:, 1] * avail_actions[:, 1] / _sum1 rns = np.random.random(ps.shape[0]) #one seed for each pair / batch for _a in range(ps.shape[0]): for _j in range(2): act = 0 s = ps[_a, _j, 0, act] while s <= rns[_a]: act += 1 s += ps[_a, _j, 0, act] if act == 122: # DEBUG a = 5 pass pair_sampled_actions[_a, _j, _b, 0, :] = act # TODO: Fix the return values so I can debug in episode buffer!!! modified_inputs_level2 = inputs_level2 selected_actions_format_level2 = "a*bs*t*v" else: # TODO: Implement for noisy obs!! # Need again correlated sampling pair_sampled_actions, \ modified_inputs_level2, \ selected_actions_format_level2 = self.action_selector.select_action({"policies":out_level2}, avail_actions=pairwise_avail_actions.data, tformat=tformat_level2, test_mode=test_mode) # if th.sum(pair_sampled_actions == 26.0) > 0.0: # a = 5 if sampled_pair_ids.shape[_tdim(tformat_level1)] > 1: # only used for mackrl sampling sampled_pairs = th.cat([ self.magic_map[sampled_pair_ids[:,:,_t:_t+1,:].long()].squeeze(2) for _t in range(sampled_pair_ids.shape[_tdim(tformat_level1)]) ], dim=_tdim(tformat_level1)) else: sampled_pairs = self.magic_map[sampled_pair_ids.long()].squeeze(2) self.actions_level2 = pair_sampled_actions.clone() if self.is_obs_noise(test_mode): self.actions_level2_sampled = [] for _aid in range(self.n_agents): self.actions_level2_sampled.append([]) for i in range(sampled_pairs.shape[-1]): self.actions_level2_sampled[_aid].append( pair_sampled_actions[:, i].gather(0, sampled_pairs[_aid:_aid+1, :, :, i:i + 1].long())) self.actions_level2_sampled[_aid] = th.cat(self.actions_level2_sampled[_aid], 0) else: # ToDO: Gather across all selected pairs!! self.actions_level2_sampled = [] for i in range(sampled_pairs.shape[-1]): self.actions_level2_sampled.append(pair_sampled_actions.gather(0, sampled_pairs[:,:,:,i:i+1].long())) self.actions_level2_sampled = th.cat(self.actions_level2_sampled, 0) self.selected_actions_format_level2 = selected_actions_format_level2 self.policies_level2 = modified_inputs_level2.clone() inputs_level3, inputs_level3_tformat = _build_model_inputs(self.input_columns_level3, inputs, to_variable=True, inputs_tformat=tformat, ) action_tensor = None if self.is_obs_noise(test_mode): action_tensor = ttype(self.n_agents, sampled_pairs.shape[_bsdim(tformat)], sampled_pairs.shape[_tdim(tformat)], 1).fill_(float("nan")) for _bid in range(sampled_pairs.shape[_bsdim(tformat)]): # each agent has it's own assumptions about what pair-wise actions were sampled! for _aid in range(self.n_agents): # work out which pair id agent _aid is in (if any) and whether at first or second position partid = None posid = None #for _partid, _part in enumerate(_ordered_2_agent_pairings(self.n_agents)): combid = int(sampled_pair_ids[_aid, _bid, 0, 0].item()) part = list(_ordered_2_agent_pairings(self.n_agents))[combid] for pid, p in enumerate(part): agentids = _pairing_id_2_agent_ids(p, self.n_agents) if agentids[0] == _aid: partid = pid posid = 0 break if agentids[1] == _aid: partid = pid posid = 1 break pass if partid is not None: # ok so what actions did agent _aid finally select? joint_act = self.actions_level2_sampled[_aid][partid,_bid,0,0].item() joint_act_dec = _joint_actions_2_action_pair(int(joint_act), self.n_actions) if joint_act_dec == 11: # DEBUG a = 5 if joint_act_dec != 0: # else delegate action_tensor[_aid,_bid,0,:] = joint_act_dec[posid] else: # decentralized anyway! pass else: action_tensor = ttype(self.n_agents, pair_sampled_actions.shape[_bsdim(tformat)], pair_sampled_actions.shape[_tdim(tformat)], 1).fill_(float("nan")) for i in range(sampled_pairs.shape[-1]): sampled_pair = sampled_pairs[:,:,:,i:i+1] pair_id1, pair_id2 = _pairing_id_2_agent_ids__tensor(sampled_pair, self.n_agents, "a*bs*t*v") # sampled_pair_ids.squeeze(0).squeeze(2).view(-1), self.n_agents) avail_actions1 = inputs_level3["agent_input_level3"]["avail_actions"].gather( _adim(inputs_level3_tformat), Variable(pair_id1.repeat(1, 1, 1, inputs_level3["agent_input_level3"][ "avail_actions"].shape[_vdim(inputs_level3_tformat)]))) avail_actions2 = inputs_level3["agent_input_level3"]["avail_actions"].gather( _adim(inputs_level3_tformat), Variable(pair_id2.repeat(1, 1, 1, inputs_level3["agent_input_level3"][ "avail_actions"].shape[_vdim(inputs_level3_tformat)]))) # selected_level_2_actions = pair_sampled_actions.gather(0, sampled_pair_ids.long()) this_pair_sampled_actions = pair_sampled_actions.gather(0, sampled_pair.long()) actions1, actions2 = _joint_actions_2_action_pair_aa(this_pair_sampled_actions.clone(), self.n_actions, avail_actions1, avail_actions2) # count how often level2 actions are un-available at level 3 # TODO: Verify that 'this_pair_sampled_actions != 0' is the right thing to do!! pair_action_unavail_rate = (th.mean(((actions1 != actions1) & (this_pair_sampled_actions != 0)).float()).item() + th.mean(((actions2 != actions2) & (this_pair_sampled_actions != 0)).float()).item()) / 2.0 if pair_action_unavail_rate != 0.0 and hasattr(self.args, "mackrl_delegate_if_zero_ck") and self.args.mackrl_delegate_if_zero_ck: #assert False, "pair action unavail HAS to be zero in mackrl_delegate_if_zero_ck setting!" self.logging_struct.py_logger.warning("ERROR: pair action unavail HAS to be zero in mackrl_delegate_if_zero_ck setting!") self._add_stat("pair_action_unavail_rate__runner", pair_action_unavail_rate, T_env=T_env, suffix=test_suffix, to_sacred=False) # Now check whether any of the pair_sampled_actions violate individual agent constraints on avail_actions ttype = th.cuda.FloatTensor if self.args.use_cuda else th.FloatTensor action_tensor.scatter_(0, pair_id1, actions1) action_tensor.scatter_(0, pair_id2, actions2) avail_actions_level3 = inputs_level3["agent_input_level3"]["avail_actions"].clone().data self.avail_actions = avail_actions_level3.clone() inputs_level3["agent_input_level3"]["avail_actions"] = Variable(avail_actions_level3, requires_grad=False) out_level3, hidden_states_level3, losses_level3, tformat_level3 = self.model.models["level3_{}".format(0)](inputs_level3["agent_input_level3"], hidden_states=hidden_states["level3"], loss_fn=None, tformat=inputs_level3_tformat, test_mode=test_mode, seq_lens=inputs["agent_input_level3__agent0"].seq_lens, **kwargs) # extract available actions avail_actions_level3 = inputs_level3["agent_input_level3"]["avail_actions"] individual_actions, \ modified_inputs_level3, \ selected_actions_format_level3 = self.action_selector.select_action({"policies":out_level3}, avail_actions=avail_actions_level3.data, tformat=tformat_level3, test_mode=test_mode) self.actions_level3 = individual_actions action_tensor[action_tensor != action_tensor] = individual_actions[action_tensor != action_tensor] # set states beyond episode termination to NaN if self.is_obs_noise(test_mode): action_tensor = _pad_nan(action_tensor, tformat=tformat_level3, seq_lens=inputs["agent_input_level1__agent0"].seq_lens) # DEBUG else: action_tensor = _pad_nan(action_tensor, tformat=tformat_level3, seq_lens=inputs["agent_input_level1"].seq_lens) # DEBUG # l2 = action_tensor.squeeze() # DEBUG if self.args.debug_mode in ["level3_actions_only"]: """ DEBUG MODE: LEVEL3 ACTIONS ONLY Here we just pick actions from level3 - should therefore just correspond to vanilla COMA! """ action_tensor = individual_actions self.final_actions = action_tensor.clone() if th.sum(self.final_actions == 11).item() > 0: # DEBUG a = 5 pass if self.is_obs_noise(test_mode): selected_actions_list = [] selected_actions_list += [dict(name="actions", select_agent_ids=list(range(self.n_agents)), data=self.final_actions)] modified_inputs_list = [] else: #self.actions_level3 = individual_actions.clone() self.selected_actions_format_level3 = selected_actions_format_level3 self.policies_level3 = modified_inputs_level3.clone() self.avail_actions_active = avail_actions_level3.data selected_actions_list = [] for _i in range(_n_agent_pair_samples(self.n_agents) if self.args.n_pair_samples is None else self.args.n_pair_samples): #_n_agent_pair_samples(self.n_agents)): selected_actions_list += [dict(name="actions_level1__sample{}".format(_i), data=self.actions_level1[_i])] for _i in range(_n_agent_pair_samples(self.n_agents)): selected_actions_list += [dict(name="actions_level2__sample{}".format(_i), data=self.actions_level2_sampled[_i])] # TODO: BUG!? selected_actions_list += [dict(name="actions_level2", select_agent_ids=list(range(_n_agent_pairings(self.n_agents))), data=self.actions_level2)] selected_actions_list += [dict(name="actions_level3", select_agent_ids=list(range(self.n_agents)), data=self.actions_level3)] selected_actions_list += [dict(name="actions", select_agent_ids=list(range(self.n_agents)), data=self.final_actions)] modified_inputs_list = [] modified_inputs_list += [dict(name="policies_level1", data=self.policies_level1)] for _i in range(_n_agent_pair_samples(self.n_agents)): modified_inputs_list += [dict(name="policies_level2__sample{}".format(_i), data=self.policies_level2[_i])] modified_inputs_list += [dict(name="policies_level3", select_agent_ids=list(range(self.n_agents)), data=self.policies_level3)] modified_inputs_list += [dict(name="avail_actions_active", select_agent_ids=list(range(self.n_agents)), data=self.avail_actions_active)] modified_inputs_list += [dict(name="avail_actions", select_agent_ids=list(range(self.n_agents)), data=self.avail_actions)] #modified_inputs_list += [dict(name="avail_actions", # select_agent_ids=list(range(self.n_agents)), # data=self.avail_actions)] selected_actions_list += [dict(name="actions_onehot", select_agent_ids=list(range(self.n_agents)), data=_onehot(self.final_actions, rng=(0, self.n_actions)))] hidden_states = dict(level1=hidden_states_level1, level2=hidden_states_level2, level3=hidden_states_level3) return hidden_states, selected_actions_list, modified_inputs_list, self.selected_actions_format pass else: assert False, "Not implemented"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sample(self):\n return self._root.sample()", "def sample(self):", "def sample(tree, i, alpha=0.5, beta=0.5, only_tree=True):\n # for n in tree.nodes():\n # lab = tuple(n)\n # if len(n) == 1:\n # lab = \"(\" + str(list(n)[0]) + \")\"\n # tree.node[n] = {\"color\": \"black\", \"label\": lab}\n # print tree.nodes()\n\n if only_tree is True:\n tree_new = tree # Alter the input tree\n else:\n #tree_new = tree.subgraph(tree.nodes()) # nx < 2.0\n tree_new = tree.copy() # nx < 2.0\n\n #print(nocopy)\n #old_G = trilearn.graph.junction_tree.get_graph(tree)\n #(subtree, old_separators, probtree) = glib.random_subtree(tree, alpha, beta)\n\n # plotGraph(subtree, directory+\"subtree_\"+str(i)+\".eps\")\n # for n in subtree.nodes():\n # tree_old.node[n] = {\"color\": \"blue\", \"label\": tuple(n)}\n # if n in tree.nodes():\n # tree.node[n] = {\"color\": \"blue\", \"label\": tuple(n)}\n\n # plotGraph(tree_old.subgraph(tree_old.nodes()),\n # directory + \"tree(\" + str(i-1) + \")p.eps\")\n\n (_, subtree_nodes, subtree_edges, subtree_adjlist,\n old_separators, prob_subtree) = ss.random_subtree(tree, alpha, beta, i)\n\n (old_cliques,\n new_cliques,\n new_separators,\n P,\n neig) = sample_cond_on_subtree_nodes(i, tree_new, subtree_nodes, subtree_edges, subtree_adjlist)\n\n if only_tree is True:\n return tree_new\n #conn_nodes = set()\n #for clique in new_cliques:\n # conn_nodes |= clique\n\n # for n in tree.nodes():\n # lab = tuple(n)\n # if len(n) == 1:\n # lab = \"(\"+str(list(n)[0])+\")\"\n # if n in new_cliques:\n # tree.node[n] = {\"color\": \"red\", \"label\": lab}\n # plotGraph(tree.subgraph(tree.nodes()), directory+\"tree(\"+str(i)+\").eps\")\n\n #G = trilearn.graph.junction_tree.get_graph(tree)\n # G.node[i] = {\"color\": \"red\"}\n # for n in old_G:\n # if n in conn_nodes:\n # old_G.node[n] = {\"color\": \"blue\"}\n # G.node[n] = {\"color\": \"blue\"}\n\n # plotGraph(G, directory+\"G\"+str(i)+\".eps\")\n # plotGraph(old_G, directory+\"G\"+str(i-1)+\"p.eps\")\n\n # Proposal kernel\n K_st = None\n if len(subtree_nodes) == 1:\n # There might be two possible subtrees so\n # we calculate the probabilities for these explicitly\n K_st = pdf(tree, tree_new, alpha, beta, i)\n else:\n K_st = prob_subtree\n for c in P:\n K_st *= P[c] * neig[c]\n return tree_new, K_st, old_cliques, old_separators, new_cliques, new_separators", "def get_next_sample(self):", "def sample(self):\n raise NotImplementedError", "def sample(self):\n raise NotImplementedError", "def sample(self):\r\n raise NotImplementedError", "def sample(self, root, tree, sample_num, for_d):\n\n # all_score = self.sess.run(self.generator.all_score)\n # all_score is a matrix with shape [n_node, n_node]\n all_score = self.generator.all_score\n samples = []\n paths = []\n n = 0\n\n while len(samples) < sample_num:\n current_node = root\n previous_node = -1\n paths.append([])\n is_root = True\n paths[n].append(current_node)\n while True:\n node_neighbor = tree[current_node][1:] if is_root else tree[current_node]\n # print(\"////\", tree[current_node])\n is_root = False\n if len(node_neighbor) == 0: # the tree only has a root\n return None, None\n if for_d: # skip 1-hop nodes (positive samples)\n if node_neighbor == [root]:\n # in current version, None is returned for simplicity\n return None, None\n if root in node_neighbor:\n node_neighbor.remove(root)\n\n # we retrieve embeddings corresponding to current node's neighbors\n # the multiply of g_v with shape (1, 50) and g_vi with shape(1, 50) is a scala\n # to calculate the multiply of g_v and g_vi: we calculate the \"multiplication\" (inner product) between embedding_matrix with shape(n_node, 50) and its transpose\n # then saved the result in self.score with shape (n_node, n_node) in dis_torch.py\n # all_score has the shape = (5254, 5254), each row is a list of scala, each scala is the \"multiplication\" (inner product) between a particular node to an other node in the graph\n # due to for each current_node, we have a list of its neighbors, saved in [node_neighbor]\n # we can retrieve a list of scalas that equal to the \"multiplications\" (inner product) between g_v(current node) to its neighbor g_vi\n # to do that, we have:\n relevance_probability = all_score[current_node][node_neighbor]\n\n # convert tensor to numpy array\n relevance_probability = relevance_probability.cpu().detach().numpy()\n\n # finally, applying softmax function, we get the relevance probability of current_node and its neighbors, as formed in the paper\n relevance_probability = utils.softmax(relevance_probability)\n \n # pick a random node from its neighbors based on relevance_probability\n next_node = np.random.choice(node_neighbor, size=1, p=relevance_probability)[0] # select next node\n # print(\"???\", next_node)\n paths[n].append(next_node)\n if next_node == previous_node: # terminating condition\n samples.append(current_node)\n break\n previous_node = current_node\n current_node = next_node\n n = n + 1 # n equal to sample_num\n return samples, paths # for each sample, we get one path from root to that sample", "def sample(self, seg_logit, seg_label):", "def _choose_sample(self):\n\n \t #periodically generate a new reconstruction for the purposes of sampling", "def sample(self, x):", "def sample(self, bqm, **parameters):\n return self.child.sample(bqm, **parameters)", "def sample_from_prior(self):\n raise NotImplementedError", "def sample_tree(self):\n logger.info('TreeCatTrainer.sample_tree given %d rows',\n len(self._added_rows))\n SERIES.sample_tree_num_rows.append(len(self._added_rows))\n complete_grid = self._tree.complete_grid\n edge_logits = self.compute_edge_logits()\n assert edge_logits.shape[0] == complete_grid.shape[1]\n assert edge_logits.dtype == np.float32\n edges = self.get_edges()\n edges = sample_tree(complete_grid, edge_logits, edges)\n return edges, edge_logits", "def sample(self, n):\n raise NotImplementedError", "def bst_100_rand():\n from bbst import Bst\n from random import shuffle\n rando = [num for num in range(100)]\n shuffle(rando)\n tree = Bst(rando)\n return tree", "def prior_sample(self):\n pass", "def _sample(self, rnn_output, temperature):\n pass", "def sample(self):\n raise NotImplementedError(\"Override me!\")", "def test_sample(system_generator):\n\n name, test = system_generator()\n print(name)\n\n w_F, w_R, N_k = test.sample([10, 8], mode=\"wFwR\")\n w_F, w_R, N_k = test.sample([1, 1], mode=\"wFwR\")\n w_F, w_R, N_k = test.sample([10, 0], mode=\"wFwR\")\n w_F, w_R, N_k = test.sample([0, 5], mode=\"wFwR\")", "def sample(self, n=1):\n raise NotImplementedError", "def generate_samples(self, n_samples):", "def generate_samples(self, n_samples):", "def __call__(self, params):\r\n return self.sample(params)", "def posterior_sample(self):\n pass", "def recursion_tree(self,node):\n if node.clades: # for non-leaf node\n tmp = 0\n flag = 0\n for clade in node.clades:\n if flag == 0:\n tmp = copy.copy(self.recursion_tree(clade).sample_series)\n else:\n tmp += self.recursion_tree(clade).sample_series \n flag = 1\n node.sample_series = tmp\n else: # leaf node which has been init above.\n try:\n a = node.sample_series\n #print(node.name +' is a leaf')\n except:\n print('please initialize the tree leaves by otu table.')\n return node", "def sample_from_prior(self, *args, **kwargs):\n pass", "def search_tree_sample(variables, formula, samples):\n\n to_use = list(variables)\n used_vars = []\n solns = [None]\n\n while to_use != []:\n next_var = to_use[0]\n to_use = to_use[1:]\n\n solns = black_box_sample(formula, solns, samples, used_vars, next_var)\n used_vars.append(next_var)\n\n return uniform_select(solns)", "def sample(self, size=1):\n pass", "def sample(self, rng, query_value=None):\n nodes = jnp.array(self.nodes)\n query_value = (\n jax.random.uniform(rng) if query_value is None else query_value)\n query_value *= self._total_priority()\n\n _, index, _ = jax.lax.fori_loop(0, self.depth, step,\n (query_value, 0, nodes))\n\n return np.minimum(index - self.low_idx, self.highest_set)", "def sample(self, seed=None):\n raise NotImplementedError()", "def test_sample(self):\n\n p = g.Point([0, 0]).buffer(1.0)\n count = 100\n\n s = g.trimesh.path.polygons.sample(p, count=count)\n assert len(s) <= count\n assert s.shape[1] == 2\n\n radius = (s ** 2).sum(axis=1).max()\n assert radius < (1.0 + 1e-8)\n\n # test Path2D sample wiring\n path = g.trimesh.load_path(p)\n s = path.sample(count=count)\n assert len(s) <= count\n assert s.shape[1] == 2\n radius = (s ** 2).sum(axis=1).max()\n assert radius < (1.0 + 1e-8)\n\n # try getting OBB of samples\n T, extents = g.trimesh.path.polygons.polygon_obb(s)\n # OBB of samples should be less than diameter of circle\n diameter = g.np.reshape(p.bounds, (2, 2)).ptp(axis=0).max()\n assert (extents <= diameter).all()\n\n # test sampling with multiple bodies\n for i in range(3):\n assert g.np.isclose(path.area, p.area * (i + 1))\n path = path + g.trimesh.load_path(\n g.Point([(i + 2) * 2, 0]).buffer(1.0))\n s = path.sample(count=count)\n assert s.shape[1] == 2", "def sample(self):\n L = e ** (-self.lamb)\n k, p = 1, rand()\n while p > L:\n k += 1\n p *= rand()\n return k - 1", "def sample(self):\n return self.items[self.np_random.choice(len(self.items))]", "def get_random_depth_sample(n=8, depths=list(range(2,26,2)), num_samples=100):\n\n def get_states(start):\n frontier = [start]\n frontier_set = {start}\n explored = set()\n\n states = [False for _ in range(len(depths))]\n while not all(states):\n node = frontier.pop(0)\n frontier_set.remove(node)\n explored.add(node)\n\n children = node.get_children()\n\n # It's necessary to shuffle children to get a truly random sample; otherwise, the first child (always\n # produced from the parent by the same action) produced at a certain depth will always be selected,\n # and children produced by other actions will never be selected\n shuffle(children)\n\n for child in children:\n if child not in frontier_set and child not in explored:\n frontier_set.add(child)\n frontier.append(child)\n child.path_cost = node.path_cost+1\n index = depths.index(child.path_cost) if child.path_cost in depths else None\n if index is not None and not states[index]:\n states[index] = {'start': start.sequence, 'end': child.sequence}\n\n return states\n\n depth_sample = [[] for depth in range(len(depths))]\n\n for _ in range(num_samples):\n start = list(range(1,n+2))\n shuffle(start)\n start = PuzzleState(start, path_cost=0)\n\n states = get_states(start)\n print('\\rSet ' + str(_+1) + ' of ' + str(num_samples) + ' complete', end='', flush=True)\n list(map(list.append, depth_sample, states))\n\n return depth_sample", "def sampled(self):\n for name in self._nodes:\n node = self._nodes[name]\n if isinstance(node, RandomVariable) and not node.observed:\n yield name", "def sample(self, batch_size):\n\n if self.tree.filled_size() < batch_size:\n return None, None, None\n\n out = []\n indices = []\n weights = []\n priorities = []\n i = 0\n while i < batch_size:\n r = random.random()\n data, priority, index = self.tree.find(r)\n if not data:\n continue\n priorities.append(priority)\n weights.append((1. / self.capacity / priority) ** self.beta if priority > 1e-16 else 0)\n indices.append(index)\n out.append(data)\n self.priority_update([index], [0]) # To avoid duplicating\n i += 1\n\n self.priority_update(indices, priorities) # Revert priorities\n\n weights = [w / max(weights) for w in weights] # Normalize for stability\n\n return out, weights, indices", "def samples(self, u=None):\n roots = [u]\n if u is None:\n roots = self.roots\n for root in roots:\n yield from self._sample_generator(root)", "def sample(self, pkg):\n return next(self.dist[pkg])", "def samples(self):\n pass", "def testSampleRichness(self):\n self.tree.calculate_richness()\n self.assertEqual(1167, self.tree.get_species_richness(1))\n self.assertEqual(1171, self.tree.get_species_richness(2))\n self.assertEqual(self.tree.get_species_richness(1), self.tree.get_species_richness(1))\n self.assertEqual(self.tree.get_species_richness(2), self.tree.get_species_richness(2))\n self.assertEqual(self.tree.get_species_richness(3), self.tree.get_species_richness(3))", "def sample(self):\n return gc.rand_state.choice(self.domain)", "def sample(self, batch_size):\n # get the sum of priorities\n priority_sum = self.sum_tree.get_sum_priority()\n # sample priorities \n priorities_to_sample = np.random.uniform(0, priority_sum, batch_size)\n # get the indexes of replays\n sample_idxes = [self.sum_tree.get(x) for x in priorities_to_sample]\n # fetch the transitions and prepare the batch for training\n random_sample = [self.queue[x] for x in sample_idxes]\n # zip\n zipped = [ torch.from_numpy( np.asarray(arr).astype(np.float32) ).float() for arr in zip(*random_sample) ]\n sample = Transition( zipped[0], zipped[1].unsqueeze_(-1).long(), zipped[2].unsqueeze_(-1), zipped[3], zipped[4].unsqueeze_(-1).byte() )\n return sample, sample_idxes", "def prepare_data_for_g(self):\n\n paths = []\n for i in self.root_nodes:\n if np.random.rand() < config.update_ratio:\n sample, paths_from_i = self.sample(i, self.trees[i], config.n_sample_gen, for_d=False)\n if paths_from_i is not None:\n paths.extend(paths_from_i)\n # for each root, we generate 20 samples, each sample is equal to one path from root to that sample\n # So, we will get maximum (num_root x 20) paths\n # path is a list with length = (N x num_sample), with num_sample = 20\n # paths =[[path_root1_to_sample1],[path_root1_to_sample2],....,[path_root1_to_sample20],\n # [path_root2_to_sample1],[path_root2_to_sample2],....,[path_root2_to sample20]\n # .\n # .\n # [path_rootN_to_sample1],[path_rootN_to_sample2],....,[path_rootN_to_sample20]]\n # get_node_pairs_from_path\n\n node_pairs = list(map(self.get_node_pairs_from_path, paths))\n # node_pairs = [[node pairs for path_root1_to_sample1],[node pairs for path_root1_to_sample2],....,[node pairs for path_root1_to_sample20],\n # [node_pairs for path_root2_to_sample1],[node pairs for path_root2_to_sample2],....,[node pairs for path_root2_to sample20],\n # .\n # .\n # [node pairs for path_rootN_to_sample1],[node pairs for path_rootN_to_sample2],....,[node pairs for path_rootN_to_sample20]]\n\n node_1 = []\n node_2 = []\n for i in range(len(node_pairs)):\n for pair in node_pairs[i]:\n node_1.append(pair[0])\n node_2.append(pair[1])\n # reward = self.sess.run(self.discriminator.reward,\n # feed_dict={self.discriminator.node_id: np.array(node_1),\n # self.discriminator.node_neighbor_id: np.array(node_2)})\n reward = self.discriminator.forward(node_1, node_2)\n return node_1, node_2, reward", "def easy_sample(self, num, **kwargs):\n return self.preprocess(self.sample(num, **kwargs), **kwargs)", "def sample(self):\n sampleIndices = self.random_state.choice(len(self.X), int(len(self.X)*self.sample_ratio), replace=False)\n\n return self.X[sampleIndices]\n pass", "def sample(self, M):\n ran = self.mdl.sample(M, replace=True)\n return ' '.join(ran.index)", "def sample_from_prior(self, n_samples):\n pass", "def sample(self):\n x = self.state\n# dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n dx = self.theta * (self.mu - x) + self.sigma * np.random.standard_normal(self.size)\n self.state = x + dx\n return self.state", "def sample(self):\n return self._action_out(self._env.action_space.sample())", "def sample_tree(grid, edge_logits, edges, steps=1):\n logger.debug('sample_tree sampling a random spanning tree')\n COUNTERS.sample_tree_calls += 1\n if len(edges) <= 1:\n return edges\n tree = MutableTree(grid, edges)\n V, E, K = tree.VEK\n\n for step in range(steps):\n for e in range(E):\n e = np.random.randint(E) # Sequential scanning doesn't work.\n k1 = tree.remove_edge(e)\n valid_edges = np.where(\n tree.components[grid[1, :]] != tree.components[grid[2, :]])[0]\n valid_probs = edge_logits[valid_edges]\n valid_probs -= valid_probs.max()\n np.exp(valid_probs, out=valid_probs)\n total_prob = valid_probs.sum()\n if total_prob > 0:\n valid_probs *= 0.9999995 / total_prob # Avoid np.binom errors.\n k2 = valid_edges[sample_from_probs(valid_probs)]\n else:\n k2 = k1\n COUNTERS.sample_tree_infeasible += 1\n tree.add_edge(e, k2)\n\n COUNTERS.sample_tree_propose += 1\n COUNTERS.sample_tree_accept += (k1 != k2)\n HISTOGRAMS.sample_tree_log2_choices.update(\n [len(valid_edges).bit_length()])\n\n edges = sorted((grid[1, k], grid[2, k]) for k in tree.e2k.values())\n assert len(edges) == E\n return edges", "def getRondomNode_1(t):\n treelist = []\n traverse(t, treelist)\n random_num = random.randint(0, len(treelist) - 1)\n return treelist[random_num]", "def get_subsample_of_nodes(g, sampl=1):\n return sample(g.nodes(), int(len(g.nodes())*sampl))", "def sample(self,p0=None,nsamp=None): \r\n raise NotImplementedError('Need to implement sample function')", "def stratified_sample(self, batch_size, rng):\n if self._total_priority() == 0.0:\n raise Exception('Cannot sample from an empty sum tree.')\n\n indices = parallel_stratified_sample(rng, self.nodes, np.arange(batch_size),\n batch_size, self.depth)\n return np.minimum(indices - self.low_idx, self.highest_set)", "def sample_one(self):\n # x = self.mean + self.sigma * np.random.normal()\n x = self.dist.sample(1)\n return x", "def __init__(self, size = 1000, discard_sample = False, method = 'first'):\r\n super(SampleNode, self).__init__()\r\n self.size = size\r\n self.discard_sample = discard_sample\r\n self.method = method\r\n # random nodes need a stack to hold intermediate records\r\n if method == \"random\":\r\n self.stack = Stack(size)\r\n else:\r\n self.stack = None\r\n if method == \"percent\" and ((size>100) or (size<0)):\r\n raise ValueError, \"Sample size must be between 0 and 100 with 'percent' method.\"", "def tree_query(self, pta_root):\n self.sul.pre()\n curr_node = pta_root\n\n inputs = []\n outputs = []\n\n while True:\n\n if curr_node.children:\n frequency_sum = sum(curr_node.input_frequencies.values())\n if frequency_sum == 0:\n # uniform sampling in case we have no information\n inp = choice(list(curr_node.children.keys()))\n else:\n # use float random rather than integers to be able to work with non-integer frequency information\n selection_value = random() * frequency_sum\n inp = None\n for i in curr_node.input_frequencies.keys():\n inp = i\n selection_value -= curr_node.input_frequencies[i]\n if selection_value <= 0:\n break\n # curr_node.input_frequencies[inp] -= 1\n\n inputs.append(inp)\n out = self.sul.step(inp)\n new_node = curr_node.get_child(inp, out)\n\n if new_node:\n outputs.append(out)\n curr_node = new_node\n else:\n self.sul.post()\n return\n else:\n curr_node = pta_root\n for i, o in zip(inputs, outputs):\n self.curr_node.input_frequencies[i] -= 1\n curr_node = curr_node.get_child(i, o)\n self.sul.post()\n return", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.random.rand(*x.shape) \n self.state = x + dx\n return self.state", "def sample(self, observation):\n raise NotImplementedError", "def sample(self):\n return self._sample_func", "def node2vec_sample(succ, prev_succ, prev_node, p, q):\n print(\"succ\", succ, \"prev_succ\", prev_succ, \"prev_node\", prev_node)\n succ_len = len(succ)\n prev_succ_len = len(prev_succ)\n\n probs = list()\n prob_sum = 0\n\n prev_succ_set = list()\n for i in range(prev_succ_len):\n prev_succ_set.insert(0, prev_succ[i])\n\n for i in range(succ_len):\n if succ[i] == prev_node:\n prob = 1. / p\n elif len(prev_succ_set) > 0 and succ[i] != prev_succ_set[-1]:\n prob = 1.\n else:\n prob = 1. / q\n probs.append(prob)\n prob_sum += prob\n\n rand_num = random.uniform(0, 1) * prob_sum\n\n for i in range(succ_len):\n rand_num -= probs[i]\n if rand_num <= 0:\n sample_succ = succ[i]\n return sample_succ", "def totem_random():\n random_head()\n random_head()\n random_head()", "def sample(self):\n # return [v.sample() for v in self.variables]\n return self.domain[gc.rand_state.choice(len(self.domain))]", "def sample_pagerank(corpus, damping_factor, n):\n first_page = random.choice(list(corpus))\n model = transition_model(corpus, first_page, DAMPING)\n\n for i in range(n):\n\n choosen = random.random()\n total = 0\n\n for k, v in model.items():\n total += v\n\n if choosen <= total:\n page = k\n break\n \n model = transition_model(corpus, page, DAMPING)\n \n return model", "def sample(self, state, action):\n in_target=False\n if action not in self.available(state):\n return None\n # N = len(self.post(state, action))\n prob = []\n for t in self.post(state, action):\n prob.append(self.prob_delta(state, action, t))\n\n rand_val = random.random()\n total = 0\n for key in self.post(state,action):\n total +=self.prob_delta(state,action,key)\n\n if rand_val <= total:\n\n next_state=key\n break\n (x,y,t)=state\n ballpos = (-200, 0)\n if (abs(x) > 1000 or abs(y) > 1000) or (abs(y) <= 400 and x <= 0) or (t < 115 or t > 245):\n in_target=True\n\n\n if x==0 and y==0 and t==180:\n\n in_target=True\n\n\n # next_state = self.post(state, action)[np.random.choice(range(len(self.post(state, action))),1,prob)[0]]\n # Note that only one element is chosen from the array, which is the\n # output by random.choice\n return next_state,in_target", "def _get_sample(self):\n return [layer._get_sample() for layer in self.layers]", "def sample_posterior(self):\n \n# print (\"SAMPLING FROM LINEAR SIMILARITY VB\")\n if (self.posterior_mean == False):\n self.weight = Vil.sample_posterior(self.mu_weight, Vil.softplus(self.rho_weight))\n self.bias = Vil.sample_posterior(self.mu_bias, Vil.softplus(self.rho_bias))\n# print (self.bias)\n else:\n self.weight.data = self.mu_weight.data\n self.bias.data = self.mu_bias.data", "def sample(self, nsamples):\n return self.dist.sample(nsamples)", "def samplesim(conn, sample, threshold, fp):\n click.echo('Fingerprint: %s, Threshold: %s' % (fp, threshold))\n cur = conn.cursor()\n mol_ids = sample.read().strip().split('\\n')\n cur.execute(\"set rdkit.tanimoto_threshold=%s;\", (threshold,))\n for i, mol_id in enumerate(mol_ids[:100]):\n click.echo('Query: %s (%s of %s)' % (mol_id, i+1, len(mol_ids)))\n cur.execute(\"select entity_id from chembl_id_lookup where chembl_id = %s\", (mol_id,))\n molregno = cur.fetchone()[0]\n cur.execute(\"select %s from rdk.fps where molregno = %s\", (AsIs(fp), molregno,))\n qfp = cur.fetchone()[0]\n cur.execute(\"select molregno from rdk.fps where %s%%%s\", (AsIs(fp), qfp,))\n results = [r[0] for r in cur.fetchall()]\n chembl_ids = []\n for mrn in results:\n cur.execute(\"select chembl_id from chembl_id_lookup where entity_id = %s and entity_type = 'COMPOUND'\", (mrn,))\n chembl_ids.append(cur.fetchone()[0])\n click.echo(chembl_ids)\n cur.close()\n conn.close()", "def test_sample(self):\n dist = self.many_samples([0, 0, 0, 1])\n self.assertEquals(3, dist.argMax())\n\n dist = self.many_samples([1, 0, 0, 0, 0])\n self.assertEquals(0, dist.argMax())\n\n dist = self.many_samples([0.5, 0, 0, 0.25, 0.25])\n self.assertAlmostEquals(dist[0], 0.5, delta=0.01)\n self.assertAlmostEquals(dist[3], 0.25, delta=0.01)\n self.assertAlmostEquals(dist[4], 0.25, delta=0.01)\n self.assertEquals(dist[1], 0)\n self.assertEquals(dist[2], 0)\n\n with self.assertRaises(AssertionError):\n diffp.sample([0.5, 0.5, 0.01])", "def sample(self):\n sample_ind = np.random.choice(len(self.memory), self.batch_size)\n # get the selected experiences: avoid using mid list indexing\n es, ea, er, en, ed = [], [], [], [], []\n i = 0\n while i < len(sample_ind):\n self.memory.rotate(-sample_ind[i]) # rotate the memory up to this index\n e = self.memory[0] # sample from the top\n es.append(e.state)\n ea.append(e.action)\n er.append(e.reward)\n en.append(e.next_state)\n ed.append(e.done)\n self.memory.rotate(sample_ind[i])\n i += 1\n states = torch.stack(es).squeeze().float().to(device)\n actions = torch.stack(ea).float().to(device)\n rewards = torch.from_numpy(np.vstack(er)).float().to(device)\n next_states = torch.stack(en).squeeze().float().to(device)\n dones = torch.from_numpy(np.vstack(ed).astype(np.uint8)).float().to(device)\n return (states, actions, rewards, next_states, dones)", "def sampleClass(classgroup):\n return classgroup.sample(frac = fraction)", "def sample(src, num):\n dst = os.path.join(os.path.dirname(src),\n os.path.basename(src) + '-sample')\n if not os.path.exists(dst):\n os.makedirs(dst)\n total_num = len(os.listdir(src))\n prob = num / total_num\n selected_files = random.sample(os.listdir(src), num)\n for f in selected_files:\n from_file = os.path.join(src, f)\n to_file = os.path.join(dst, f)\n shutil.copytree(from_file, to_file)", "def sample(self, length):\n pass", "def samples(self, gp):\r\n raise NotImplementedError", "def get_sample(self, path, prefix) -> List:\n pass", "def test_flmb(self):\n self.create_sample_data_set_dir(\"node10p1.dat\", TELEM_DIR, \"node59p1.dat\")\n self.assert_initialize()\n result = self.data_subscribers.get_samples(DataParticleType.METADATA_TELEMETERED,1,30)\n result = self.data_subscribers.get_samples(DataParticleType.SAMPLE_TELEMETERED,5,30)", "def sample_generator(self, sess):\n\n to_return = {\n 'g_sample': self.G_sample_test,\n }\n return sess.run(to_return)", "def dep_sample_generator(path_to_file):\n assert os.path.isfile(path_to_file), \"File does not exist\"\n root = DepSample(0, ROOT, ROOT, 0)\n with open(path_to_file) as fp:\n sample = [root]\n for line in fp:\n if not line.rstrip():\n yield sample\n sample = [root]\n else:\n ls = line.rstrip().split('\\t')\n # print(ls)\n sample.append(DepSample(int(ls[0]), ls[1], ls[3], int(ls[6])))\n if len(sample) > 1:\n yield sample", "def monte_carlo_sample(self):\n\t\tresult = dict()\n\t\tfor n in self.topological_sort():\n\t\t\tpvals = tuple(result[p] for p in n.parents)\n\t\t\tresult[n.name] = n.cpt.rand_result(pvals)\n\t\treturn result", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n raise NotImplementedError", "def sample(self, size=1):\n raise NotImplementedError", "def get_sample(self):\n # initialize with a seed point\n self.__sample__(rnd() * self.width, rnd() * self.height)\n while len(self.queue) > 0:\n idx = int(rnd() * len(self.queue))\n p = self.queue[idx]\n new_inserted = False\n for j in xrange(self.k):\n theta = 2 * np.pi * rnd()\n # radius <= r <= 2 * radius\n r = np.sqrt(3 * rnd() * self.radius**2 + self.radius**2)\n x = p[0] + r * np.cos(theta)\n y = p[1] + r * np.sin(theta)\n if (0 <= x < self.width) and (0 <= y < self.height) and self.__far__(x,y):\n self.__sample__(x,y)\n new_inserted = True\n break\n # remove point from active list\n if not new_inserted:\n self.queue = self.queue[:idx] + self.queue[idx+1:]\n self.samples.append(p)\n\n return self.samples", "def regular_subsample(neuron):\n # select all the main points\n selected_index = get_main_points(neuorn)\n\n # Computing the parent id of the selected nodes\n neuron = neuron_with_selected_nodes(selected_index)\n return neuron", "def sample(self):\r\n x = self.state\r\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\r\n self.state = x + dx\r\n return self.state", "def _uniform_random_walk(self, start_node = None):\n\t\tif start_node == None:\n\t\t\t# Sampling is uniform w.r.t V, and not w.r.t E\n\t\t\tstart_node = random.choice(range(self.nodes_size))\n\t\tpath = [start_node]\n\t\twhile len(path) < self._walk_length:\n\t\t\t#if random.random() < self._walk_restart:\n\t\t\t# path.append(start_node)\n\t\t\t# continue\n\t\t\tcur = path[-1]\n\t\t\tadj_list = self._net.get_adj_list(cur)\n\t\t\tif len(adj_list) > 0:\n\t\t\t\tpath.append(random.choice(adj_list)) # Generate a uniform random sample\n\t\t\telse:\n\t\t\t\t# logger.warning('no type-corresponding node found, walk discontinued, generate a path less than specified length.')\n\t\t\t\t# break\n\t\t\t\t# logger.warning('no type-corresponding node found, walk restarted.')\n\t\t\t\tpath.append(start_node)\n\n\t\treturn [str(node) for node in path]", "def sample_response(self, slate_p):\n slate_p[slate_p >= 0.5] = 1.0\n slate_p[slate_p < 0.5] = 0.0\n# m = Bernoulli(slate_p)\n# return m.sample()\n return slate_p", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n # dx = self.theta * (self.mu - x) + self.sigma * np.random.standard_normal(self.size)\n self.state = x + dx\n return self.state", "def sample(self, M):\n ran = np.random.choice(self.mdl.index, p=self.mdl.values, size=M)\n return ' '.join(ran)", "def _get_sample(self):\n p = self._get_mean()\n u = self.random.random_sample(p.shape)\n sample = u < p\n return sample", "def test_sample(self):\n\n # By default we return all photos from the travelogue (but ordered at random).\n _current_sample_size = models.SAMPLE_SIZE\n models.SAMPLE_SIZE = 5\n self.assertEqual(len(self.test_travelogue.sample()), 2)\n\n # We can state how many photos we want.\n self.assertEqual(len(self.test_travelogue.sample(count=1)), 1)\n\n # If only one photo is public then the sample cannot have more than one\n # photo.\n self.pl.is_public = False\n self.pl.save()\n self.assertEqual(len(self.test_travelogue.sample(count=2)), 1)\n\n self.pl.is_public = True\n self.pl.save()\n\n # We can limit the number of photos by changing settings.\n models.SAMPLE_SIZE = 1\n self.assertEqual(len(self.test_travelogue.sample()), 1)\n\n models.SAMPLE_SIZE = _current_sample_size", "def sample(self, params):\r\n old_model_trace = poutine.trace(self.model)(self.args, self.kwargs)\r\n traces = []\r\n t = 0\r\n i = 0\r\n while t < self.burn + self.lag * self.samples:\r\n i += 1\r\n # q(z' | z)\r\n new_guide_trace = poutine.block(\r\n poutine.trace(self.model))(old_model_trace, self.args, self.kwargs)\r\n # p(x, z')\r\n new_model_trace = poutine.trace(\r\n poutine.replay(self.model, new_guide_trace))(self.args, self.kwargs)\r\n # q(z | z')\r\n old_guide_trace = poutine.block(\r\n poutine.trace(\r\n poutine.replay(self.guide, old_model_trace)))(new_model_trace,\r\n self.args, self.kwargs)\r\n # p(x, z') q(z' | z) / p(x, z) q(z | z')\r\n logr = new_model_trace.log_pdf() + new_guide_trace.log_pdf() - \\\r\n old_model_trace.log_pdf() - old_guide_trace.log_pdf()\r\n rnd = pyro.sample(\"mh_step_{}\".format(i),\r\n Uniform(torch.zeros(1), torch.ones(1)))\r\n\r\n if torch.log(rnd).data[0] < logr.data[0]:\r\n # accept\r\n t += 1\r\n old_model_trace = new_model_trace\r\n if t <= self.burn or (t > self.burn and t % self.lag == 0):\r\n yield (new_model_trace, new_model_trace.log_pdf())", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n self.state = x + dx\n return self.state", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n self.state = x + dx\n return self.state", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n self.state = x + dx\n return self.state", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n self.state = x + dx\n return self.state", "def sample(self):\n x = self.state\n dx = self.theta * (self.mu - x) + self.sigma * np.array([random.random() for i in range(len(x))])\n self.state = x + dx\n return self.state", "def sample(self, like_params):\n\t\traise NotImplementedError" ]
[ "0.73472464", "0.6716626", "0.66083133", "0.65131354", "0.64742315", "0.64742315", "0.6471004", "0.64553005", "0.6374327", "0.6284512", "0.62842137", "0.62587696", "0.61892176", "0.61316454", "0.6015631", "0.6002262", "0.5970276", "0.5970129", "0.5945327", "0.5932385", "0.5906762", "0.58944225", "0.58944225", "0.58800155", "0.58780444", "0.5849301", "0.58489186", "0.5841952", "0.5837383", "0.58331203", "0.58312964", "0.5824149", "0.581882", "0.5809704", "0.5792435", "0.57916224", "0.5789994", "0.5789378", "0.57876515", "0.57855135", "0.57809705", "0.57664895", "0.57572913", "0.5739183", "0.5738484", "0.57270604", "0.57154113", "0.5714869", "0.5705312", "0.56987286", "0.5690426", "0.56853944", "0.5682985", "0.5679461", "0.567471", "0.5674095", "0.566611", "0.56418073", "0.5627477", "0.56263024", "0.56139624", "0.5612462", "0.5610699", "0.5607872", "0.5594343", "0.55871636", "0.55865", "0.55845857", "0.5582922", "0.5579289", "0.5562731", "0.5561659", "0.55574363", "0.5556504", "0.5554574", "0.55520517", "0.5551671", "0.5549497", "0.5543687", "0.55397594", "0.55376", "0.5532781", "0.5532781", "0.5532781", "0.5532781", "0.55320543", "0.5521995", "0.55218714", "0.551817", "0.55159605", "0.55139935", "0.55111796", "0.55105454", "0.5495981", "0.54930216", "0.5488805", "0.5488805", "0.5488805", "0.5488805", "0.5488805", "0.5487871" ]
0.0
-1