query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| negatives
listlengths 19
20
| metadata
dict |
---|---|---|---|
Reindex to include missing timestamps and create new column for actual rain from cumulative rain
|
def preprocessing(df):
logger.debug("Fill in missing timestamps by reindexing")
min_time = min(df.index)
max_time = max(df.index)
rng = pd.date_range(min_time, max_time, freq='15Min')
df = df.reindex(rng)
logger.debug("Convert cumulative rain to actual rain")
df['rain'] = df['cum_rain'].diff(periods=2)
# negative values from diff are when the rain value resets so we set equal to the cumulative value
df.loc[df['rain'] < 0, 'rain'] = df.loc[df['rain'] < 0, 'cum_rain']
latest_rain_time = max(df.index[df.cum_rain.notnull()])
logger.info('latest rain update at: ' + str(latest_rain_time))
logger.debug("Concat rain and forecast to create model_rain")
df['model_rain'] = pd.concat([
df[df.index <= latest_rain_time]['rain'],
df[df.index > latest_rain_time]['forecast']
])
logger.debug("interpolate model_rain")
df['model_rain'] = df['model_rain'].interpolate()
return df
|
[
"def fill_gaps(df):\n idx = pd.period_range(df.index.min(), df.index.max(), freq=\"D\")\n # idx_forecast = pd.period_range(start_datetime, end_datetime, freq=\"H\")\n ts = pd.DataFrame({\"empty\": [0 for i in range(idx.shape[0])]}, index=idx)\n ts = ts.to_timestamp()\n df_filled = pd.concat([df, ts], axis=1)\n del df_filled[\"empty\"]\n return df_filled",
"def fill_nan(train_raw):\n return train_raw.fillna(method='ffill')",
"def _fix_index(self, frame):\n frame['time'] = pd.DatetimeIndex(frame['time'].astype('datetime64[s]'))\n frame.set_index('time', inplace=True)\n frame = frame.tz_localize('UTC')\n frame = frame.tz_convert(self.tz.zone)\n return frame",
"def add_epoch_index(self, temp):\n\n # first index level is time\n assert temp.index.names[0] == self.time\n\n # temp should be long form, columns have single level (channels\n # hopefully)\n assert not isinstance(temp.columns, pd.core.indexes.multi.MultiIndex)\n\n # we can only handle 2- or 3-dimensional\n assert temp.index.nlevels in (2, 3)\n\n for i in range(1, temp.index.nlevels):\n level = temp.index.levels[i]\n # if a level looks like it was automatically created by Pandas,\n # we replace it with the epoch_index\n if (\n isinstance(level, pd.RangeIndex)\n and len(level) == len(self.epoch_index)\n and level.start == 0\n and level.step == 1\n and level.stop == len(self.epoch_index)\n ):\n # inplace is deprecated pandas 1.2+\n # temp.index.set_levels(self.epoch_index, level=i, inplace=True)\n temp.index = temp.index.set_levels(self.epoch_index, level=i)\n temp.index.rename(self.epoch_index.name, level=i, inplace=True)\n\n return temp",
"def fill_in_missing_dates_in_calendardate_index(sf1):\n sf1[\"calendardate_temp2\"] = sf1.index # Don't know another awy to get the index value after selection\n desired_index = get_calendardate_index(sf1.iloc[0][\"calendardate_temp2\"], sf1.iloc[-1][\"calendardate_temp2\"])\n\n index_difference = list(set(desired_index).difference(set(sf1.index)))\n\n for caldate_index in index_difference:\n # sf1.index.insert(-1, caldate_index)\n sf1.loc[caldate_index] = pd.Series()\n\n sf1 = sf1.drop(columns=[\"calendardate_temp2\"])\n sf1 = sf1.sort_values(by=[\"calendardate\", \"datekey\"], ascending=True)\n\n return sf1",
"def daily_resample(b, a):\n \n master_index=a.index\n a_daily=a.resample('1D') ## Only want index, fill method is irrelevant\n b=uniquets(b)\n b_daily=b.reindex(a_daily.index, method=\"ffill\", limit=1)\n new_b=b_daily.reindex(master_index, method=\"ffill\", limit=1)\n \n return new_b",
"def fillzero(df):\n missing_traffic = (df == 0).all(axis=1)\n df[missing_traffic].replace(0, np.NaN)\n r_mean = df.rolling(min_periods=1, window=3, center=True).sum().shift(-1) / 2\n df.loc[missing_traffic] = r_mean[missing_traffic]\n return df",
"def test_clean_ts(self):\n\n df_raw = pd.DataFrame({\n 'dates' : ['2020.11.03',\n '11/6/2020',\n '2020-11-9 1:30PM',\n '11/10/2020 12:00AM',\n '11/13/2020 2:00PM',\n '11/21/2020',\n ],\n 'junk' : [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\"],\n })\n\n df_raw['values']=[160.25, 150.5, 'foo', 140, 145, 130]\n\n ts1=lstmutil.TimeSeries(begin=datetime(2020, 11, 5),\n end=datetime(2020, 11, 23)\n )\n\n # Two outer timestamps should be reject, and the non-numeric\n # value should be dropped.\n df_clean1 = ts1.clean_ts(df_raw, 'dates', 'values')\n self.assertEqual(len(df_clean1), 4)\n\n\n # Check interpolate within and beyond region\n df_interp1 = ts1.interp_ts(df_clean1,\n 'values',\n ts1.get_target_timestamps())\n\n self.assertEqual(df_interp1['values'].values[0], 150.5)\n self.assertEqual(df_interp1['values'].values[-1], 130.0)\n mask=df_interp1['date']=='2020-11-11'\n self.assertEqual(df_interp1[mask]['values'].values[0], 140.0)\n\n # Make sure we didn't lose good data\n df_merge1=df_interp1.merge(df_clean1,\n on='date',\n suffixes=['_i', '_c'],\n how='left')\n\n num_before=sum([not pd.isnull(t) for t in df_clean1['values']])\n num_after=sum([not pd.isnull(t) for t in df_merge1['values_c']])\n self.assertTrue(num_before, num_after)",
"def reindex_year(df):\n dfs = []\n colyearmap = dates.find_year(df)\n for colname in df.columns:\n dfs.append(dask.delayed(_reindex_col(df, colname, colyearmap)))\n\n dfs = dask.compute(*dfs)\n dfs = [x for x in dfs if x is not None]\n # merge all series into one dataframe, concat doesn't quite do the job\n res = reduce(\n lambda left, right: pd.merge(\n left, right, left_index=True, right_index=True, how=\"outer\"\n ),\n dfs,\n )\n res = res.dropna(how=\"all\") # drop uneeded columns out into future\n res = pandasutil.fillna_downbet(\n res\n ) # use this as above ffills incorrectly at end of timeseries\n\n return res",
"def _apply_daily_mask(mask, data, transformation):\n data = data.copy()\n data[~mask] = np.nan\n return data.resample('D').transform(transformation)",
"def manual_temp_imputation(self): \n\n self.data = self.data.sort_values(['series_id', 'timestamp'])\n\n print(self.data.loc[self.data.series_id == 100948,\"temperature\"])\n\n # one timestamp before\n self.data['temperature'] = self.data['temperature'].fillna(self.data.groupby(['series_id'])['temperature'].\\\n ffill())\n # one timestamp after\n self.data['temperature'] = self.data['temperature'].fillna(self.data.groupby(['series_id'])['temperature'].\\\n bfill())\n # same time one day ago\n self.data['temperature'] = self.data['temperature'].fillna(self.data.groupby(['series_id', 'hour'])\\\n ['temperature'].ffill())\n # same time one year ago\n self.data['temperature'] = self.data['temperature'].fillna(self.data.groupby(['series_id', 'dayofyear'])\\\n ['temperature'].ffill())\n # same week (mean)\n self.data['temperature'] = self.data.groupby(['series_id', 'week']).temperature.\\\n transform(lambda x:x.fillna(x.mean()))\n # same month (mean)\n self.data['temperature'] = self.data.groupby(['series_id', 'month']).temperature.\\\n transform(lambda x:x.fillna(x.mean()))\n # same timestamp, surface, base_temperature (mean)\n self.data['temperature'] = self.data.groupby(['timestamp', 'surface', 'base_temperature']).temperature. \\\n transform(lambda x:x.fillna(x.mean()))\n # one timestamp before\n self.data['temperature'] = self.data['temperature'].fillna(self.data.groupby(['series_id'])['temperature']. \\\n ffill())\n # same timestamp mean\n self.data['temperature'] = self.data.groupby(['timestamp']).temperature.transform(lambda x:x.fillna(x.mean()))\n\n print(self.data.loc[self.data.series_id == 100948,\"temperature\"])\n\n self._logger.info(\"Complete imputation for temperature.\")",
"def prep_data(input_df):\n\n output_df = input_df.fillna(-999)\n output_df.replace(np.inf, -999, inplace=True)\n output_df.drop('date_fire', inplace=True, axis=1)\n\n return output_df",
"def fill_timeseries( df, id_columns, date_col, target_variables, freq = \"D\", fillmethod = \"zeros\"):\n df_result = pd.DataFrame()\n df[date_col] = pd.to_datetime(df[date_col]) \n df[\"ID\"] = \"\"\n for col in id_columns:\n df[\"ID\"] = df.ID.apply(str) + \"_\" + col +df[col].apply(str)\n df[col] = df[col].apply(str)\n for id_variable in df.ID.unique():\n id_columns_date = id_columns + [date_col]\n df_subset = df[df.ID == id_variable]\n df_subset = df_subset.groupby(id_columns_date)[target_variables].sum() #sum or count\n df_subset = df_subset.reset_index()\n\n start_date = df_subset[date_col].min() #- timedelta(days = daysmax_lag)\n end_date = df_subset[date_col].max() #- timedelta(days = days =max_lag)\n idx=pd.date_range(start=start_date,end=end_date, freq= freq)\n\n if start_date != end_date:\n print(\"\\n\\nMore than one value: \\n\\t ID: {} \\t\\n start_date: {} \\n\\t end_date: {}\".format( id_variable, start_date, end_date))\n\n if fillmethod == \"zeros\":\n id_values = dict()\n for col in id_columns:\n id_values[col] = df_subset[col].unique()[0]\n df_subset = df_subset.set_index(df_subset[date_col],drop=True)\n df_subset = df_subset.reindex(idx)\n df_subset = df_subset.replace(np.nan, 0)\n\n for col in id_columns:\n df_subset[col] = id_values[col] \n\n df_subset = df_subset.sort_index(ascending=False).drop( date_col,1).reset_index().rename(columns={'index':date_col})\n\n elif fillmethod == \"mean\":\n id_values = dict()\n for col in id_columns:\n id_values[col] = df_subset[col].unique()[0]\n\n df_subset = df_subset.reindex(idx)\n df_subset.replace(nan, 0)\n\n for col in id_columns:\n df_subset[col] = id_values[col] \n df_subset.reindex(idx).fillna(df.mean()).sort_index(ascending=False).drop( date_col,1).reset_index().rename(columns={'index':date_col})\n\n elif fillmethod == \"ffil\":\n df_subset=df_subset.set_index(df_subset[date_col],drop=True)\n df_subset.reindex(idx).fillna(method='ffill').sort_index(ascending=False).drop( date_col,1).reset_index().rename(columns={'index':date_col})\n else:\n raise Exception(\"'fllmethod {} is not implemented\".format(fillmethod))\n\n if len(df_result) == 0:\n df_result = df_subset\n else:\n df_result = df_result.append(df_subset)\n return df_result",
"def clean_time (self):\n badrows=[] # List of bad rows indexes \n self.df['DATE']=pd.to_datetime(self.df['DATE'],format='%d/%m/%Y %H:%M:%S',errors='coerce') # Define the format of the date\n self.df['DATE'] = self.df['DATE'].interpolate().ffill().bfill() # Interpolate also the first and last lines with np.nan values if required\n for j in range(0,len(self.df.index)-2): # Test if a bad character is inserted in the date\n if self.df['DATE'].iloc[j] <= self.df['DATE'].iloc[j+1]: \n None\n else:\n if self.df['DATE'].iloc[j] <= self.df['DATE'].iloc[j+2]: \n badrows.append(j+1)\n else:\n badrows.append(j)\n for k in badrows:\n self.df['DATE'].iloc[k]=np.nan\n self.df['DATE'] = self.df['DATE'].interpolate().ffill().bfill() # Interpolate also the first and last lines with np.nan values if required\n self.df.set_index('DATE', inplace=True) # Put the DATA column as index column\n for i in range (0,len(self.df.index)-1):\n self.tdelta.append((self.df.index[i+1]-self.df.index[i]).total_seconds()) # Calculate the delay in second between two dates\n self.tdelta.append((self.df.index[-1]-self.df.index[-2]).total_seconds())\n self.df['TIMELAG'] = pd.Series(self.tdelta,index=self.df.index) \n return self.df",
"def forward_fill_gaps(sf1, quarters):\n sf1 = sf1.fillna(value=\"IAMNAN\")\n sf1[\"calendardate_temp1\"] = sf1.index # Don't know another awy to get the index value after selection\n\n calendardate_index = get_calendardate_index(sf1.iloc[0][\"calendardate_temp1\"], sf1.iloc[-1][\"calendardate_temp1\"])\n\n # sf1_reindexed = sf1.reindex(calendardate_index) # ValueError: cannot reindex from a duplicate axis\n\n sf1_reindexed = fill_in_missing_dates_in_calendardate_index(sf1)\n\n sf1_filled = sf1_reindexed.fillna(method=\"ffill\", limit=quarters)\n \n sf1_filled = sf1_filled.drop(columns=[\"calendardate_temp1\"])\n sf1_filled = sf1_filled.dropna(axis=0)\n sf1_filled = sf1_filled.replace(to_replace=\"IAMNAN\", value=np.nan)\n\n return sf1_filled",
"def resample_and_forward_fill(df, maxfill=0):\n df = df.resample('1T', label='right').asfreq()\n\n # open, high and low get forward filled from close\n df['close'] = df['close'].ffill(limit=maxfill)\n df.loc[df['open'].isnull(), 'open'] = df['close']\n df.loc[df['high'].isnull(), 'high'] = df['close']\n df.loc[df['low'].isnull(), 'low'] = df['close']\n\n return df",
"def onset_by_rain(date, df, window=5, rain_threshold=5):\n mask = df['Date'] == date\n storm_row = df[mask]\n storm_ind = int(storm_row.index[0])\n\n sub_df = df.iloc[(storm_ind - window):(storm_ind + window)]\n\n if sub_df.Rain.dropna().empty: # if there's no rain data\n return date, storm_ind\n\n ind = sub_df.Rain.idxmax()\n val = df.Rain.iloc[ind]\n while val > rain_threshold:\n ind -= 1\n val = df.Rain.iloc[ind]\n\n # ind += 1\n return df['Date'].iloc[ind], ind",
"def fix_time_adjustment(weather_train:pd.DataFrame, weather_test:pd.DataFrame):\r\n # weather_train['DataType'], weather_test['DataType'] = 'train', 'test'\r\n weather_key = ['site_id', 'timestamp']\r\n weather = pd.concat([weather_train, weather_test], ignore_index=True, axis=0, sort=False)\r\n temp_skeleton = weather[weather_key + ['air_temperature']].drop_duplicates(subset=weather_key).sort_values(by=weather_key).copy()\r\n data_to_plot = temp_skeleton.copy()\r\n data_to_plot[\"hour\"] = data_to_plot[\"timestamp\"].dt.hour\r\n count = 1\r\n fig = plt.figure(figsize=(25, 15))\r\n for site_id, data_by_site in data_to_plot.groupby('site_id'):\r\n by_site_by_hour = data_by_site.groupby('hour').mean()\r\n ax = plt.subplot(4, 4, count)\r\n plt.plot(by_site_by_hour.index, by_site_by_hour['air_temperature'], 'xb-')\r\n ax.set_title('site: ' + str(site_id))\r\n count += 1\r\n plt.tight_layout()\r\n plt.show()\r\n # fig.savefig(cfg.eda_dir + \"/air_temperature_before_Adjustment.png\")\r\n del data_to_plot\r\n temp_skeleton['temp_rank'] = temp_skeleton.groupby(['site_id', temp_skeleton.timestamp.dt.date])[\r\n 'air_temperature'].rank('average')\r\n df_2d = temp_skeleton.groupby(['site_id', temp_skeleton.timestamp.dt.hour])['temp_rank'].mean().unstack(level=1)\r\n site_ids_offsets = pd.Series(df_2d.values.argmax(axis=1) - 14)\r\n site_ids_offsets.index.name = 'site_id'\r\n\r\n def timestamp_align(df):\r\n df['offset'] = df.site_id.map(site_ids_offsets)\r\n df['timestamp_aligned'] = (df.timestamp - pd.to_timedelta(df.offset, unit='H'))\r\n df['timestamp'] = df['timestamp_aligned']\r\n del df['timestamp_aligned'], df['offset']\r\n return df\r\n\r\n return timestamp_align(weather_train), timestamp_align(weather_test)",
"def add_missing_rows(df, ind):\n\n df.Date = pd.to_datetime(df.Date)\n ind.Date = pd.to_datetime(ind.Date)\n s = df.Date.head(1).values[0]\n e = df.Date.tail(1).values[0]\n ind = ind[ind.Date.between(e, s)]\n df = df.set_index(\"Date\")\n ind = ind.set_index(\"Date\")\n missing = set(ind.index)-set(df.index)\n for i in missing:\n df.loc[i] = np.nan\n df = df.sort_index(ascending=False)\n df = df.reset_index()\n\n return df"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Upload json file to webpage via ftp and then force fb to update cache
|
def upload_export(testing, output):
filename = "dart.json"
with open(os.path.join(FDIR, '../' + filename), 'w') as f:
json.dump(output, f, indent=4)
from local_info import ftp_url, ftp_pass, ftp_user, ftp_dir
ftp = ftplib.FTP(ftp_url)
ftp.login(ftp_user, ftp_pass)
if ftp_dir is not None:
ftp.cwd(ftp_dir)
ftp.storbinary("STOR " + filename, open(os.path.join(FDIR, '../' + filename)), 1024)
from local_info import facebook_access
r = requests.post("https://graph.facebook.com", data={'scrape': 'True', 'id' : ' http://isthedartrunning.co.uk/', 'access_token' : facebook_access})
|
[
"def make_cached_file(url, fname):\n response = fetch_url(url)\n lfile = open(fname, \"w\")\n lfile.write(json.dumps(response, indent=2))\n lfile.close()",
"def upload(self, filename, contents):\n if not self.__enabled:\n return\n\n self.__cache[filename] = {}\n self.__cache[filename][\"timestamp\"] = SDL_GetTicks()\n self.__cache[filename][\"contents\"] = contents\n\n self.driftwood.log.info(\"Cache\", \"uploaded\", filename)",
"def cache_json( url, force=False ):\n cache = os.path.join( DIR, os.path.basename( urlparse.urlparse( url ).path ))\n\n if force or not os.path.isfile( cache ):\n j = urllib2.urlopen( urllib2.Request(\n url,\n headers={ 'User-Agent': 'Mozilla/5.0', 'Content-Type': 'application/json' }\n ))\n with open( cache, 'wb' ) as f:\n f.write( j.read())\n\n return cache",
"def loadjson(self):\n\n\t\tif os.path.isfile(self.filename):\n\t\t\twith open(self.filename, 'r+') as outfile:\n\t\t\t \tdata = json.load(outfile)\n\t\t\t \tself.cache.update(data)\n\t\telse:\n\t\t\tprint \"File %s created\" % (self.filename)",
"def save(self):\n data = {\n \"file_path\": self._file_path,\n \"local_md5\": self._local_md5,\n \"remote_md5\": self._remote_md5,\n \"created_at\": self._created_at.strftime(\n DbndLocalFileMetadataRegistry._date_format\n ),\n \"ttl\": self._ttl,\n }\n with open(self._cache_file_path, \"w+\") as f:\n f.write(json.dumps(data))",
"def upload_random_data(self):\n if self.user_can_update_information():\n\n # Generate the files\n generate_transaction_files(user=self.user)\n\n # Upload the files\n UploadThread(self.user).run()\n else:\n print_error(\"Password is incorrect. Cannot generate new data.\")",
"def upload_file(self):\n with open(self.temp_path + self.local_file_name, 'r', encoding='utf-8') as file:\n data = {'name': (self.short_name.lower(), file.read(), self.mime_type)}\n if self.sparql_graph == '':\n self.sheet_updates.error_type = 'NO GRAPH NAME'\n self.sheet_updates.error_message = 'A graph name is required for a upload to take place. Once set' \\\n ' the graph name should not be changed.'\n raise FusekiUploadError\n basic_url = 'http://localhost:3030/skosmos/data?graph=' + self.sparql_graph\n\n # replace graph on server. overwrites existing data.\n response = requests.request('PUT', basic_url, files=data)\n\n if not response.ok:\n self.sheet_updates.error_type = 'UPLOAD ERROR ' + str(response.status_code)\n self.sheet_updates.error_message = 'Could not upload item to fuseki: ' + str(response.text)\n raise FusekiUploadError('Could not upload vocabulary ' + self.title + '.')\n\n self.sheet_updates.triple_count = str(json.loads(response.text)['tripleCount'])",
"def update_files():\r\n set_to_file(Crawler.queue, Crawler.queueFile)\r\n set_to_file(Crawler.crawled, Crawler.crawledFile)\r\n external_to_file(Crawler.external, Crawler.externalFile)",
"def save_cache(self):\n #file name is md5 string of url\n\n md5_str = hashlib.md5(self.file_url + str(time.time())).hexdigest()\n\n self.cached_file = md5_str + '.nc'\n response = urllib.urlretrieve(self.file_url, \n settings.CACHE_DIR + self.cached_file)\n\n self.variables = ZooAdapter.get_datafile_variables(\n self._get_opendap_addr())",
"def upload_meta(ctx, fname, version):\n\n token = ctx.obj['token']\n zen_log = ctx.obj['log']\n zen_log.info(f\"Uploading metadata from {fname} to {ctx.obj['portal']},\"\n + f\" production: {ctx.obj['production']}\")\n\n\n # read data from input json file and process plans in file\n data = read_json(fname)\n # process data for each plan and post records returned by process_plan()\n for plan in data:\n if ctx.obj['portal'] == 'zenodo':\n zen_log.info(plan['metadata']['title'])\n record = process_zenodo_plan(plan, ctx.obj['community_id'])\n else:\n zen_log.info(plan['title'])\n record = process_invenio_plan(plan)\n r = post_json(ctx.obj['url'], token, record, zen_log)\n zen_log.debug(f\"Request: {r.request}\") \n zen_log.debug(f\"Request url: {r.url}\") \n zen_log.info(r.status_code) \n return",
"def upload_to_jsonbin(data):\n url = \"https://api.jsonbin.io/b/\" + BIN\n headers = {\n \"Content-Type\": \"application/json\",\n \"secret-key\": KEY,\n \"private\": \"false\",\n }\n about = {\n \"UNIX\": str(int(time.time())),\n \"OWNER\": \"litepresence\",\n \"MISSION\": \"Bitshares Public Node Latency Testing\",\n \"UTC\": str(time.strftime(\"%a, %d %b %Y %H:%M:%S\", time.gmtime())),\n \"LOCATION\": \"USA EAST\",\n \"SOURCE_CODE\": (\n \"https://github.com/litepresence/extinction-event/blob/\"\n + \"GITHUB_MASTER/EV/latencyTEST.py\"\n ),\n }\n data = data.update(about)\n data[\"DICT_KEYS\"] = str(list(data.keys()))\n req = requests.put(url, json=data, headers=headers)\n del data\n print(\"reading jsonbin...\")\n url += \"/latest\"\n print(url)\n req = requests.get(url, headers=headers, timeout=(6, 30))\n del url\n del headers\n print(req.text)\n del req",
"def update_pull(self):\n \n file_path = os.path.join(self.script_dir,'pull list.json') \n if not os.path.isfile(file_path)or os.path.getsize(file_path) == 0 :\n with open(file_path,'w') as out:\n json.dump(self.pull_list,out)\n else:\n with open(file_path) as infile:\n data = json.load(infile)\n data.update(self.pull_list)\n\n with open(file_path,'w') as out:\n json.dump(self.pull_list,out)",
"def update_from_json(\n self, path: str = join(\"config\", \"hdx_user_static.json\")\n ) -> None:\n super().update_from_json(path)",
"def update_file(file):\n url = extract_url(file)\n if url is None:\n return\n remote_source = read_remote(url)\n if remote_source is None:\n return\n with codecs.open(str(file), 'w', 'utf-8') as f:\n f.write(\"# url %s\\n\" % url)\n f.write(remote_source)\n if not settings['SILENT']:\n print \"Updated %s\" % os.path.basename(file)\n return True",
"def _upload_small_file(self, handle, remote_file_path):\n self._client.files_upload(handle.read(), remote_file_path, mode = Dropbox_WriteMode.overwrite)",
"def update_meta_file(meta: Dict):\n print(\"Info: Updating meta file.\")\n\n try:\n with open(meta_file_name, \"w\") as meta_file:\n json.dump(meta, meta_file)\n except OSError:\n sys.exit(\"Could not open/write meta file: meta.json.\")",
"def upload_request(self, message):\n path_to_file = message[1]\n file_size = message[2]\n # Updates user filesystem\n filename = database.add_user_filesystem(self.username, path_to_file, file_size)\n self.recieve_file(filename, int(file_size))",
"def upload_to_aom(file_path,AOM_NAME,AOM_SECRETKEY,replay_dict):\r\n if AOM_NAME == None or AOM_SECRETKEY == None:\r\n return\r\n\r\n if (time.time() - os.path.getmtime(file_path)) > 60:\r\n return\r\n\r\n if replay_dict['mainCommander'] in [None,'']:\r\n sendEvent({'uploadEvent':True,'response':'Not valid replay for upload'})\r\n return\r\n\r\n url = f'http://starcraft2coop.com/scripts/assistant/replay.php?username={AOM_NAME}&secretkey={AOM_SECRETKEY}'\r\n try:\r\n with open(file_path, 'rb') as file:\r\n response = requests.post(url, files={'file': file})\r\n logger.info(f'Replay upload reponse: {response.text}')\r\n \r\n if 'Success' in response.text or 'Error' in response.text:\r\n sendEvent({'uploadEvent':True,'response':response.text})\r\n \r\n except:\r\n sendEvent({'uploadEvent':True,'response':'Error'})\r\n logger.error(traceback.format_exc())",
"def update_file(to_update, from_url, value_dict={}, callback=None):\n\t# Get new content\n\tlocal_mod_time = when_modified(to_update)\n\tnew_content = get_from_server_if_modified(from_url, local_mod_time)\n\t\n\tif new_content:\n\t\tprint '+ Updating %s' % to_update\n\t\t\n\t\t# Substitute values \n\t\t# This could be done better, but it will suffice for now.\n\t\tfor (key, val) in value_dict.iteritems():\n\t\t\tnew_content = new_content.replace(('{$%s$}' % key), val)\n\t\t\n\t\t# Write out the file\n\t\tf = open(to_update, 'w')\n\t\tf.write(new_content)\n\t\tf.close()\n\t\t\n\t\t# Execute any callbacks\n\t\tif callback:\n\t\t\tcallback()\n\t\t\n\t\treturn True\n\telse:\n\t\tprint '- No new content for %s' % to_update\n\t\treturn False"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Determine what kls this group inherits from If default kls should be used, then None is returned
|
def super_kls(self):
if not self.kls and self.parent and self.parent.name:
return self.parent.kls_name
return self.kls
|
[
"def default_windows_group(self):\n return self._default_windows_group",
"def available_groups(cls):\n raise NotImplementedError",
"def get_group(self): # real signature unknown; restored from __doc__\n return \"\"",
"def inherit_cert_keychain(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"inherit_cert_keychain\")",
"def default(self):\n return self.level == SLC_DEFAULT",
"def origin_group_override(self) -> Optional[pulumi.Input['OriginGroupOverrideArgs']]:\n return pulumi.get(self, \"origin_group_override\")",
"def _getRepresentingGrouper(groupingNames):\n\n # Transform grouping name list into grouper list\n grouperList = []\n for g in groupingNames:\n if g == 'Trade.Portfolio':\n grouperList.append(acm.FAttributeGrouper('Trade.Portfolio'))\n elif g in groupingCriteriaGroupers:\n grouperList.append(groupingCriteriaGroupers[g])\n else:\n raise ValueError('Unable to find default or built-in grouper for '\n '%s in %s' % (g, groupingNames))\n\n # Return the only grouper or the synthesised chained grouper from the list.\n if len(grouperList) == 1:\n return grouperList[0]\n return acm.CreateWithParameter('FChainedGrouper', grouperList)",
"def group(self):\n\n return self.parent().group()",
"def getGroupedHebergementTypes():",
"def default_label_key(self):",
"def _default_slc(self, tabset):\n self._slctab = {}\n self._default_tabset = tabset\n for slc in range(NSLC + 1):\n self._slctab[bytes([slc])] = tabset.get(bytes([slc]),\n SLC_definition(SLC_NOSUPPORT, _POSIX_VDISABLE))",
"def kll(self):\n return self._kll",
"def getGroupChild(self) -> \"SoBase const *\":\n return _coin.SoNotRec_getGroupChild(self)",
"def GetSpeciesGroupToUse(self):\n if self._species_group_to_use:\n return self._species_group_to_use\n \n self.SetHighestPriority()\n return self._species_group_to_use",
"def part_salable_default():\n\n return InvenTreeSetting.get_setting('PART_SALABLE')",
"def _nativeLayerGroup( self ):\r\n\t\tindex\t= self.metaData().value( 'groupIndex' ) - 1\r\n\t\tnames \t= list(self._scene.metaData().value('layerGroupNames'))\r\n\t\tif ( 0 <= index and index < len( names ) ):\r\n\t\t\treturn names[index]\r\n\t\treturn ''",
"def default_key_specs(self) -> Optional[Sequence['outputs.ManagedZoneDnssecConfigDefaultKeySpec']]:\n return pulumi.get(self, \"default_key_specs\")",
"def default_agent_group(account):\n return '%s:%s' % (__ensure_acc_id(account), AGENT)",
"def get_classification_hierarchy_levels(self):\n return [l.name for l in self.class_hr]",
"def get_node_grouper(self):\n raise NotImplementedError"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculate color depth of image pixel.
|
def __calc_color_depth(self):
self.color_depth = 2**(8 * self.data.dtype.itemsize)
|
[
"def _get_bit_depth(color_count):\n for i in [1, 2, 4, 8]:\n if color_count <= 2**i:\n return i",
"def screen_color_depth(self):\n return self._screen_color_depth",
"def getBitDepth(im=None, numpy=False):\n import error\n if im==None: im=getImage()\n bd=im.getBitDepth() \n if numpy==False: return bd\n elif bd==8: return 'uint8'\n elif bd==16: return 'uint16'\n elif bd==32: return 'float'\n else:\n raise error.ImageTypeNotSupported, \"RGB images not supported\"",
"def depth( self, pyArgs, index, wrappedOperation ):\n return self.arrayType.dimensions( pyArgs[self.pixelsIndex] )[2]",
"def __call__(self, depth, dtype=np.uint8):\n assert depth.ndim == 2, \"depth image must be 2 dimensional\"\n assert np.issubdtype(\n depth.dtype, np.floating\n ), \"depth dtype must be float\"\n\n normalized, self._min_value, self._max_value = normalize(\n depth,\n min_value=self._min_value,\n max_value=self._max_value,\n return_minmax=True,\n )\n\n isnan = np.isnan(normalized)\n normalized[isnan] = 0\n\n if isinstance(self._colormap, str):\n if hasattr(matplotlib, \"colormaps\"):\n colormap_func = matplotlib.colormaps[self._colormap]\n else:\n colormap_func = matplotlib.cm.get_cmap(self._colormap)\n else:\n colormap_func = self._colormap\n rgb = colormap_func(normalized)[:, :, :3]\n rgb[isnan] = (0, 0, 0)\n\n if dtype == np.uint8:\n rgb = (rgb * 255).round().astype(np.uint8)\n else:\n assert np.issubdtype(dtype, np.floating)\n rgb = rgb.astype(dtype)\n\n return rgb",
"def depth(self) -> float:",
"def ComputeDepth(self, *args):\n return _Select3D.Select3D_SensitiveCircle_ComputeDepth(self, *args)",
"def bits_per_pixel(self):\n ret = self._get_attr(\"bitsPerPixel\")\n return ret",
"def depth_coeff(self) -> float:\n return self.__depth_coeff",
"def __get_projected_image_depth(self, projected_image, surface):\n image_depth = np.ones((self.height, self.width)) * np.inf\n p = self.orientation.dot(surface.edge_points3d[0] - self.position)\n n = self.orientation.dot(surface.normal)\n t = p.dot(n)\n\n for i in xrange(self.height):\n for j in xrange(self.width):\n if not np.allclose(projected_image[i, j], 0):\n d = np.array([j - self.half_width, i - self.half_height, self.focal])\n d /= np.linalg.norm(d)\n image_depth[i, j] = t / d.dot(n)\n\n return image_depth",
"def computeDepthOfField(*args, **kwargs):\n \n pass",
"def getDepth(self) -> \"int\":\n return _coin.SoElement_getDepth(self)",
"def read_depthmap(filename):\n depth_png = np.array(Image.open(filename), dtype=int)\n assert (np.max(depth_png) > 255)\n\n depth = depth_png.astype(np.float) / 256.\n depth[depth_png == 0] = -1.\n return depth",
"def calculate_color_redness(color):\n\n try:\n return color[0]/sum(color)\n except ZeroDivisionError:\n return 0",
"def depth(self):\n return self._depth * 10",
"def get_pixel_count(self): # pragma: no cover\n pass",
"def ComputeDepth(self, *args):\n return _Select3D.Select3D_SensitiveCurve_ComputeDepth(self, *args)",
"def disparity_to_depth(disparity):\n\tinv_depth = (disparity+DOFFS)/(BASELINE*F)\n\treturn 1/inv_depth",
"def get_dominant_color(im, dimensions):\n\n counts = defaultdict(int)\n im = im.crop(dimensions)\n width, height = im.size\n pixels = im.load()\n \n for i in range(width):\n for j in range(height):\n counts[pixels[i, j]] += 1\n \n counts[-1] = 0\n color = -1\n for col in counts:\n if counts[col] > counts[color]:\n color = col\n \n return color"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Apply LUT to the image.
|
def __apply_lut(self, lut):
if self.is_grayscale():
for w in range(self.data.shape[0]):
for h in range(self.data.shape[1]):
self.data[w][h] = lut[self.data[w][h]]
else:
for w in range(self.data.shape[0]):
for h in range(self.data.shape[1]):
for i in range(self.data.shape[2]):
self.data[w][h][i] = lut[self.data[w][h][i]]
|
[
"def apply_lut(band, lut):\n\n # if lut.dtype != band.dtype:\n # msg = \"Band ({}) and lut ({}) must be the same data type.\".format(band.dtype, lut.dtype)\n # raise LUTException(msg)\n\n return np.take(lut, band, mode='clip')",
"def _perturb_image(self, x: np.ndarray, img: np.ndarray) -> np.ndarray:\n return img",
"def lbpOperator(self, img, x, y, w, h, deltas, extract=lambda c: c, step=1):\n \n val = 0\n c = int(extract(img[y, x]))\n\n for j in range(len(deltas)):\n d = deltas[j]\n xx = (x + d[0] * step) % w\n yy = (y + d[1] * step) % h\n\n col = int(extract(img[yy, xx]))\n val += int(2 ** j) if col - c >= 0 else 0\n\n return val",
"def laplace(arr: np.ndarray, out: np.ndarray) -> None:\n for i in range(1, dim_r + 1): # iterate inner radial points\n out[i - 1] = factor_h[i - 1] * (arr[i + 1] - arr[i])\n out[i - 1] -= factor_l[i - 1] * (arr[i] - arr[i - 1])",
"def pull_out_L_channel(img_lab):\n img_l = img_lab[:, :, 0]\n return img_l",
"def lutshow(img,lut):\n f,ax = plt.subplots(1,3,dpi=150)\n imshow(img,ax[0])\n ax[1].plot(lut)\n ax[1].plot(np.arange(0,256),'--')\n ax[1].set_aspect('equal', 'box')\n ax[1].tick_params(left=False,bottom=False,labelleft=False,labelbottom=False)\n imshow(lut[img],ax[2])\n return f",
"def upsample(self, img, result=...) -> result:\n ...",
"def __setitem__(self, *args) -> \"void\":\n return _itkImagePython.vectoritkImageULL3___setitem__(self, *args)",
"def lutx(lut, input, output, format):\n # Read in input image\n gdal.AllRegister()\n src_ds = gdal.Open(input, GA_ReadOnly)\n if src_ds is None:\n print 'Error: could not open {0}'.format(input)\n rows = src_ds.RasterYSize\n cols = src_ds.RasterXSize\n # Read in data\n band = src_ds.GetRasterBand(1)\n dtype = band.DataType\n data = band.ReadAsArray(0, 0, cols, rows).astype(\n gdal_array.flip_code(dtype))\n \n # Determine requiredo output datatype\n out_dt = np.byte\n if np.min(lut.values()) < 0:\n # Must be signed int\n if np.max(np.abs(lut.values())) < 2 ** 15:\n # NOTE: put np.int8 as np.int16 since GDAL has no int8\n out_dt = np.int16\n elif np.max(np.abs(lut.values())) < 2 ** 31:\n out_dt = np.int32\n elif np.max(np.abs(lut.values())) < 2 ** 63:\n out_dt = np.int64\n else:\n print 'Required output data type is unknown'\n sys.exit(1)\n else:\n # Can be unsigned\n if np.max(lut.values()) < 2 ** 8:\n out_dt = np.uint8\n elif np.max(lut.values()) < 2 ** 16:\n out_dt = np.uint16\n elif np.max(lut.values()) < 2 ** 32:\n out_dt = np.uint32\n elif np.max(lut.values()) < 2 ** 64:\n out_dt = np.uint64\n else:\n print 'Required output data type is unknown'\n sys.exit(1)\n\n if DEBUG:\n print 'NumPy data type: %s' % str(out_dt)\n print 'GDAL data type: %s' % str(\n gdal.GetDataTypeName(gdal_array.flip_code(out_dt)))\n\n # Copy data for output\n lutdata = data.copy().astype(out_dt)\n # Apply lut\n for key, value in lut.iteritems():\n np.place(lutdata, data == key, value)\n\n # Write to output\n driver = gdal.GetDriverByName(format)\n dst_ds = driver.Create(output, \n src_ds.RasterXSize, src_ds.RasterYSize, 1,\n gdal_array.flip_code(out_dt))\n dst_ds.SetProjection(src_ds.GetProjection())\n dst_ds.SetGeoTransform(src_ds.GetGeoTransform())\n dst_ds.GetRasterBand(1).WriteArray(lutdata)\n # Close\n src_ds = None\n dst_ds = None\n print 'Wrote output to file {0}'.format(output)",
"def var_laplacian(self):\n if self.data is not None and self.trans:\n gray = cv2.cvtColor(self.data, cv2.COLOR_BGR2GRAY)\n return cv2.Laplacian(gray, cv2.CV_64F).var()\n else:\n raise Exception('Image not yet transformed')",
"def laplacian(img: np.ndarray, type: int):\r\n\r\n print(\"Applying Laplacian kernel number {}\".format(type))\r\n \r\n # Construct the Laplacian kernels. There are multiple types, as referenced from:\r\n # R. Fisher, S. Perkins, A. Walker and E. Wolfart. (2003),\r\n # Laplacian/Laplacian of Gaussian, URL: https://homepages.inf.ed.ac.uk/rbf/HIPR2/log.htm, Accessed 29/05/2020\r\n laplacian_type = {\r\n 0: np.array(([0, 1, 0],[1, -4, 1],[0, 1, 0]), dtype=\"int\"),\r\n 1: np.array(([0, -1, 0],[-1, 4, -1],[0, -1, 0]), dtype=\"int\"),\r\n 2: np.array(([1, 1, 1],[1, -8, 1],[1, 1, 1]), dtype=\"int\"),\r\n 3: np.array(([-1, -1, -1],[-1, 8, -1],[-1, -1, -1]), dtype=\"int\")\r\n }\r\n\r\n # Convolve our input image with the laplacian filter\r\n convolve_output = conv(img, laplacian_type.get(type))\r\n\r\n # Show the original image\r\n cv2.imshow(\"Original\", gray)\r\n\r\n # Show the image after applying the laplacian filter\r\n cv2.imshow(\"Custom Laplacian Filter\", convolve_output)\r\n\r\n # Show the OpenCV version of the Laplacian too, for comparison.\r\n opencv_laplacian = cv2.convertScaleAbs(cv2.Laplacian(img, cv2.CV_32F, ksize=3))\r\n cv2.imshow(\"OpenCV Laplacian\", opencv_laplacian)\r\n\r\n # Press any key to exit\r\n cv2.waitKey(0)\r\n cv2.destroyAllWindows()",
"def __setitem__(self, *args) -> \"void\":\n return _itkImagePython.vectoritkImageULL2___setitem__(self, *args)",
"def make_lungmask(img):\n\trow_size = img.shape[0]\n\tcol_size = img.shape[1]\n\tmean = np.mean(img)\n\tstd = np.std(img)\n\timg = img - mean\n\timg = img / std\n\t# Find the average pixel value near the lungs\n\t# to renormalize washed out images\n\tmiddle = img[int(col_size / 5):int(col_size / 5 * 4),\n\t int(row_size / 5):int(row_size / 5 * 4)] # FIXME: doesn't work for projection\n\tmean = np.mean(middle)\n\t# To improve threshold finding, I'm moving the\n\t# underflow and overflow on the pixel spectrum\n\timg[img == max] = mean\n\timg[img == min] = mean\n\t#\n\t# Using Kmeans to separate foreground (soft tissue / bone) and background (lung/air)\n\t#\n\tkmeans = KMeans(n_clusters=10).fit(np.reshape(middle, [np.prod(middle.shape), 1]))\n\tcenters = sorted(kmeans.cluster_centers_.flatten())\n\tthreshold = np.mean(centers)\n\n\tthresh_img = np.where(img > threshold, 1.0, 0.0) # sets area outside heart to 0, inside to 1\n\teroded = morphology.erosion(thresh_img, np.ones([3, 3]))\n\tdilation = morphology.dilation(eroded, np.ones([6, 6]))\n\treturn dilation",
"def applyNormalisation(image):\n #clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))\n #image[:,:,3] = clahe.apply(image[:,:,3])\n return image / 255.",
"def applyLookupTable(data, lut):\n if data.dtype.kind not in ('i', 'u'):\n data = data.astype(int)\n\n cp = getCupy()\n if cp and cp.get_array_module(data) == cp:\n # cupy.take only supports \"wrap\" mode\n return cp.take(lut, cp.clip(data, 0, lut.shape[0] - 1), axis=0)\n else:\n return np.take(lut, data, axis=0, mode='clip')",
"def applySubImage(self, *args):\n return _coin.SoGLBigImage_applySubImage(self, *args)",
"def Graft(self, data: 'itkImageULL3') -> \"void\":\n return _itkImagePython.itkImageULL3_Graft(self, data)",
"def randomizeLUT(self):\n inputs = list(product((0,1), repeat = self.K))\n for i in inputs:\n self.LUT[i] = np.random.rand()\n print(\"Randomized LUT:\")\n print(self.LUT)",
"def restore_normalization(image):\r\n image = image * 128\r\n image = image + 128\r\n return image"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Change image data type.
|
def change_type(self, img_type):
self.change_color_depth_2_uint8()
self.data = cvtColor(self.data, COLOR_CONVERSION_CODES[img_type])
|
[
"def setType(self, imageType):\t\t\t \n\t\tself.imageType = imageType",
"def change_img_type(\n filepath,\n img_type):\n dirpath = os.path.dirname(filepath)\n info = parse_filename(os.path.basename(filepath))\n info['type'] = img_type\n filepath = to_filename(info, dirpath)\n return filepath",
"def np_changedtype(self, dtype):\n self.data = self.data.astype(dtype)",
"def set_image_type(self, content_type):\n content_types = RedditWallpaperChooser.constants.ACCEPTED_CONTENT_TYPES\n if content_type not in content_types:\n logger.warning(\n \"Unknown content type %s. Falling back to JPG.\",\n content_type\n )\n\n self.image_type = content_types.get(content_type, \"jpg\")",
"def update_data_type(self):\n data_type_var = self.data_type_var.get()\n\n if data_type_var == 0:\n #Auto - determine data type\n bin_count = len(glob.glob1(self.dir_path,\"*.bin\"))\n bmp_count = len(glob.glob1(self.dir_path,\"*.bmp\"))\n\n dir_contents = os.listdir(self.dir_path)\n\n if bin_count >= bmp_count or (\"FTPdetectinfo_\" in dir_contents):\n self.data_type.set(1) #Set to CAMS if there are more bin files\n self.end_frame.set(255)\n else:\n self.data_type.set(2) #Set to Skypatrol if there are more BMP files\n self.end_frame.set(1500)\n\n elif data_type_var == 1:\n #CAMS\n self.data_type.set(1)\n self.end_frame.set(255)\n\n elif data_type_var == 2:\n #Skypatrol\n self.data_type.set(2)\n self.end_frame.set(1500)\n\n self.update_listbox(self.get_bin_list()) #Update listbox\n\n self.mode.set(1)\n self.filter.set(1)\n self.change_mode()\n self.move_top(0) #Move listbox cursor to the top\n\n self.update_image(0)",
"def set_data_type(self, a_data_type):\n self.parameters[\"type\"] = str(a_data_type)\n return self",
"def set_dtype(self, value):\n self._dtype = value\n for x in (self._position, self._orientation, self._velocity,\n self._mass, self._charge, self._diameter,\n self._moment_inertia, self._angmom):\n if x is not None:\n x = x.astype(value)\n for frame in self.frames:\n frame.dtype = value",
"def determine_format(self):\n extension = self.image.name.rsplit(\".\")\n # Get the last chunk of the list\n extension = extension[len(extension) -1]\n extension = extension.lower()\n \n if extension == \"jpg\" or extension == \"jpeg\":\n type = \"JPEG\"\n elif extension == \"gif\":\n type = \"GIF\"\n elif extension == \"png\":\n type = \"PNG\"\n else:\n type = \"JPEG\"\n return type",
"def fancyConvert(image):",
"def determine_mime_type(self):\n extension = self.image.name.rsplit(\".\")\n # Get the last chunk of the list\n extension = extension[len(extension) -1]\n extension = extension.lower()\n \n if extension == \"jpg\" or extension == \"jpeg\":\n type = \"image/jpeg\"\n elif extension == \"gif\":\n type = \"image/gif\"\n elif extension == \"png\":\n type = \"image/png\"\n else:\n type = \"image/jpeg\"\n return type",
"def dtype(self, image_or_type):\n if type(image_or_type) == numpy.dtype:\n return image_or_type\n if self._is_arraylike(image_or_type):\n return image_or_type.dtype\n if not isjava(image_or_type):\n raise TypeError('Unsupported type: ' + str(type(image_or_type)))\n\n # -- ImgLib2 types --\n if jclass('net.imglib2.type.Type').isInstance(image_or_type):\n ij2_types = {\n 'net.imglib2.type.logic.BitType': 'bool',\n 'net.imglib2.type.numeric.integer.ByteType': 'int8',\n 'net.imglib2.type.numeric.integer.ShortType': 'int16',\n 'net.imglib2.type.numeric.integer.IntType': 'int32',\n 'net.imglib2.type.numeric.integer.LongType': 'int64',\n 'net.imglib2.type.numeric.integer.UnsignedByteType': 'uint8',\n 'net.imglib2.type.numeric.integer.UnsignedShortType': 'uint16',\n 'net.imglib2.type.numeric.integer.UnsignedIntType': 'uint32',\n 'net.imglib2.type.numeric.integer.UnsignedLongType': 'uint64',\n 'net.imglib2.type.numeric.real.FloatType': 'float32',\n 'net.imglib2.type.numeric.real.DoubleType': 'float64',\n }\n for c in ij2_types:\n if jclass(c).isInstance(image_or_type):\n return numpy.dtype(ij2_types[c])\n raise TypeError('Unsupported ImgLib2 type: {}'.format(image_or_type))\n\n # -- ImgLib2 images --\n if jclass('net.imglib2.IterableInterval').isInstance(image_or_type):\n ij2_type = image_or_type.firstElement()\n return self.dtype(ij2_type)\n if jclass('net.imglib2.RandomAccessibleInterval').isInstance(image_or_type):\n Util = autoclass('net.imglib2.util.Util')\n ij2_type = Util.getTypeFromInterval(image_or_type)\n return self.dtype(ij2_type)\n\n # -- ImageJ1 images --\n if jclass('ij.ImagePlus').isInstance(image_or_type):\n ij1_type = image_or_type.getType()\n ImagePlus = autoclass('ij.ImagePlus')\n ij1_types = {\n ImagePlus.GRAY8: 'uint8',\n ImagePlus.GRAY16: 'uint16',\n ImagePlus.GRAY32: 'float32', # NB: ImageJ1's 32-bit type is float32, not uint32.\n }\n for t in ij1_types:\n if ij1_type == t:\n return numpy.dtype(ij1_types[t])\n raise TypeError('Unsupported ImageJ1 type: {}'.format(ij1_type))\n\n raise TypeError('Unsupported Java type: ' + str(jclass(image_or_type).getName()))",
"def image_format(self, image_format):\n if image_format not in self.IMAGE_FORMATS:\n raise CameraSettingsError(\n 'Image format {} not supported'.format(image_format))\n self._image_format = image_format",
"def set_dtype(self,dtype):\n self.dtype = dtype",
"def _convert_dicom_metadata_datatype(self, metadata: Dict):\n\n if not metadata:\n return metadata\n\n # Try to convert data type for the well knowned attributes. Add more as needed.\n if metadata.get(\"series_instance_uid\", None):\n try:\n metadata[\"series_instance_uid\"] = str(metadata[\"series_instance_uid\"])\n except Exception:\n pass\n if metadata.get(\"row_pixel_spacing\", None):\n try:\n metadata[\"row_pixel_spacing\"] = float(metadata[\"row_pixel_spacing\"])\n except Exception:\n pass\n if metadata.get(\"col_pixel_spacing\", None):\n try:\n metadata[\"col_pixel_spacing\"] = float(metadata[\"col_pixel_spacing\"])\n except Exception:\n pass\n\n print(\"Converted Image object metadata:\")\n for k, v in metadata.items():\n print(f\"{k}: {v}, type {type(v)}\")\n\n return metadata",
"def set_img_data(self, img_data):\n\n self._data = img_data\n self.update_window()\n self.update_icon()",
"def setDataType(self, type: ghidra.program.model.data.DataType, alignStack: bool, force: bool, source: ghidra.program.model.symbol.SourceType) -> None:\n ...",
"def setDataType(self, dt: ghidra.program.model.data.DataType) -> None:\n ...",
"def test_set_format(fx_asset):\n with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:\n img.format = 'png'\n assert img.format == 'PNG'\n strio = io.BytesIO()\n img.save(file=strio)\n strio.seek(0)\n with Image(file=strio) as png:\n assert png.format == 'PNG'\n with raises(ValueError):\n img.format = 'HONG'\n with raises(TypeError):\n img.format = 123",
"def setDataType(self, type: ghidra.program.model.data.DataType, storage: ghidra.program.model.listing.VariableStorage, force: bool, source: ghidra.program.model.symbol.SourceType) -> None:\n ..."
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create a histogram plot window of the image.
|
def create_hist_window(self):
self.histogram_graphical.create_histogram_plot(self.calc_histogram())
|
[
"def plot_histogram(img):\n hist = cv2.calcHist([img],[0],None,[256],[0,256])\n\n plt.hist(hist,facecolor='green')\n plt.title('Histogram'), plt.xlabel(\"Scale\"), plt.ylabel(\"Quantity\")\n plt.grid(True)\n\n plt.show()",
"def _draw_histogram(self):\n self.range = npy.arange(0, 100)\n all_data = [item for sublist in self.data for item in sublist]\n plt.hist(all_data)\n plt.show()",
"def make_histogram(self): # connected to make histogram (btn_histogram)\n print(\"make hist\")\n# self.calculate_images()\n self.intensitys = np.linspace(0,10,10)\n self.intensitys2 = self.intensitys\n try:\n self.intensitys2 = np.concatenate((self.intensitys,\n self.intensitys2))\n except:\n self.intensitys2 = self.intensitys\n self.doit()\n\n self.histo_data = True",
"def histogram(self):\n\n self.X.hist()\n plt.show()",
"def histogram(self, plot=True, list_of_bands = None):\n if type(list_of_bands) != type(None):\n if type(list_of_bands) == list:\n img = self._img[:,:,[i-1 for i in list_of_bands]]\n else:\n img = self._img[:,:,int(list_of_bands)]\n else:\n img = self._img\n list_of_bands = [i for i in range(1,self._img.shape[2]+1)]\n band_names = {}\n for i in list_of_bands:\n try:\n band_names[i] = self.band_names[i]\n except:\n band_names[i] = \"Band \" + str(i)\n color=iter(cm.rainbow(np.linspace(0,1,len(list_of_bands))))\n bands_histo = {}\n minim = int(img.min())\n maxim = int(img.max())\n for i in list_of_bands:\n pixels = agg_pixels(self[i], mask = self._mask)#imd.agg_pixels(self[i], mask = self._mask)\n bands_histo[i] = np.histogram(pixels, bins =np.arange(minim-1, maxim+1,1))\n if plot:\n plt.figure(figsize=(20,7))\n plt.title(\"{} Histogram\".format(self.name))\n for i in bands_histo:\n c=next(color)\n band_in = bands_histo[i]\n plt.plot(band_in[1][:len(band_in[1])-1], band_in[0], label = band_names[i], color = c)\n plt.legend(bbox_to_anchor=(1.05, 1), loc=1, borderaxespad=0.)\n plt.show()\n return bands_histo",
"def update_histogram(self, img):\n\n nonzero_values = img.ravel()[np.flatnonzero(img)]\n _, _, patches_hist = self.ax_hist.hist(nonzero_values, density=True,\n bins=cfg.num_bins_histogram_display)\n self.ax_hist.relim(visible_only=True)\n self.ax_hist.autoscale_view(scalex=False) # xlim fixed to [0, 1]\n self.UI.data_handles.extend(patches_hist)",
"def plot_hist(self):\n print(\"Plotting histogram\")\n counts = Counter(self.kmeans.label.flatten())\n\n x = list(counts.keys())\n y = list(counts.values())\n\n plt.bar(x, y)\n plt.xlabel(\"Visual Word Index\")\n plt.ylabel(\"Frequency\")\n plt.title(\"Complete Vocabulary Generated\")\n plt.xticks(np.array(x) + 0.4, x)\n plt.savefig(\"visual_word_histogram.jpg\")",
"def makeHistogram(values, numBins, xLabel, yLabel, title=None):\n # TODO\n pylab.hist(values, numBins)\n pylab.xlabel(xLabel)\n pylab.ylabel(yLabel)\n if type(title) == str:\n pylab.title(title)\n\n pylab.show()",
"def _histogram(self):\n\n title = f\"k Nearest Neighbors\\nK values chosen by 10-fold CV\" \n \n sns.set_palette(self.palette, self.n_colors)\n sns.set_style(self.style)\n\n fig, axs = plt.subplots(figsize=(12,5)) \n \n sns.histplot(x=self.k_values, ax=axs).set_title(title, weight=\"bold\") \n fig.tight_layout()",
"def EventDisplayHist(quantities, title=\"Charge\", cutrange=[-1, -1]):\n fig = plt.figure(figsize=[12, 12])\n imgmin = quantities.min()\n imgmax = quantities.max()\n if cutrange[0] != cutrange[1]:\n imgmin = cutrange[0]\n imgmax = cutrange[1]\n plt.hist(quantities, 100, [imgmin, imgmax])\n # fig.suptitle(title, fontsize=20)\n plt.xlabel(title, fontsize=18)\n plt.ylabel('Count / bin', fontsize=16)",
"def plot_color_histogram(img):\n color = ('b', 'g', 'r')\n for i, col in enumerate(color):\n hist = cv2.calcHist([img],[i],None,[256],[0,256]) \n plt.plot(hist, color = col)\n plt.xlim([0,256])\n \n plt.title('Color Histogram'), plt.xlabel(\"Scale\"), plt.ylabel(\"Quantity\")\n plt.grid(True)\n\n plt.show()",
"def add_histogram_panel(self):\n\n self.ax_hist = plt.axes(cfg.position_histogram_t1_mri)\n self.ax_hist.set_xticks(cfg.xticks_histogram_t1_mri)\n self.ax_hist.set_yticks([])\n self.ax_hist.set_autoscaley_on(True)\n self.ax_hist.set_prop_cycle('color', cfg.color_histogram_t1_mri)\n self.ax_hist.set_title(cfg.title_histogram_t1_mri, fontsize='small')",
"def create_histogram_color(self, parent):\n if parent.loaded_image_type == \"gs\" or \\\n parent.loaded_image_type == 'b' or \\\n parent.loaded_image_type == 'gs3ch':\n return self.create_histogram_greyscale(parent)\n img = parent.histogram_image_data\n y_axis = [0 for i in range(256)]\n x_axis = [i for i in range(256)]\n red_channel = [i[0] for i in img[1]]\n green_channel = [i[1] for i in img[1]]\n blue_channel = [i[2] for i in img[1]]\n\n def compute_values_count(channel_name):\n for value in channel_name:\n luminence_value = int(value)\n y_axis[luminence_value] += 1\n\n compute_values_count(red_channel)\n\n plt.figure()\n plt.bar(x_axis, y_axis)\n plt.title(f'Histogram - kanał czerwony - {img[0]}') # Red channel\n\n y_axis = [0 for i in range(256)]\n compute_values_count(green_channel)\n plt.figure()\n plt.bar(x_axis, y_axis)\n plt.title(f'Histogram - kanał zielony - {img[0]}') # Green channel\n\n y_axis = [0 for i in range(256)]\n compute_values_count(blue_channel)\n plt.figure()\n plt.bar(x_axis, y_axis)\n plt.title(f'Histogram - kanał niebieski - {img[0]}') # Blue channel\n\n plt.show()",
"def plot_hist(self, **kwargs: Any) -> None:\n plt.hist(self.iterable, **kwargs)\n if kwargs.get('grid', False):\n plt.grid()\n plt.ylabel('$P(x)$')\n plt.xlabel('$x$')\n plt.show()",
"def plot_hist(list_of_data, plot_title, bin_sz):\r\n \r\n from plotly.offline import plot, iplot, init_notebook_mode\r\n import plotly.graph_objs as go\r\n\r\n # This line is necessary for offline mode.\r\n init_notebook_mode(connected=False)\r\n \r\n data = []\r\n \r\n for d in list_of_data:\r\n \r\n trace_tmp = go.Histogram(\r\n x=d,\r\n opacity=0.33,\r\n autobinx=False,\r\n xbins=dict(start=min(d),end=max(d),size=bin_sz) \r\n )\r\n \r\n data.append(trace_tmp)\r\n\r\n layout = go.Layout(title = plot_title, barmode='overlay')\r\n fig = go.Figure(data=data, layout=layout)\r\n\r\n iplot(fig, filename='Histograms')",
"def create_histogram_greyscale(self, parent, img=None):\n if parent.loaded_image_type == 'gs3ch':\n # gets values of only first channel of greyscale 3 channel type image\n img = [parent.loaded_image_data[1][i][0] for i in range(len(parent.loaded_image_data[1]))]\n else:\n img = parent.loaded_image_data[1] # list containing image luminence avlues\n\n # List with occurrences of each luminance value\n values_count = [0 for i in range(256)]\n for value in img:\n values_count[value] += 1\n\n x_axis = list([i for i in range(256)])\n y_axis = values_count\n plt.title(f\"Histogram - {parent.loaded_image_data[0]}\")\n plt.bar(x_axis, y_axis)\n plt.show()",
"def plot_histogram(self):\n self.history_df['Spread_delta'].plot.hist(bins=40, density=True)\n plt.show()",
"def visualize_histogram(histogram, annotation, fig_size=(20,10), fontsize=12):\n plt.rcParams.update({'font.size': fontsize})\n fig = plt.figure(figsize=fig_size)\n plt.bar(histogram[1][:-1], histogram[0], edgecolor=\"black\", align=\"edge\")\n plt.title(annotation)\n plt.xlabel('bins')\n plt.ylabel('probability')\n plt.show()",
"def histogram(hist, bins, transposition=False, **kwargs):\n # calculate width of each bars by alpha\n alpha = 0.7\n width = alpha * (bins[1] - bins[0])\n # calculate the center point of entire histogram\n center = (bins[1:] + bins[:-1]) / 2\n # create new figure\n if not transposition:\n pl.bar(center, hist, align='center', width=width, **kwargs)\n else:\n pl.barh(center, hist, align='center', height=width, **kwargs)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Open rename dialog window to change the image name.
|
def rename(self):
dialog_rename = Rename(self.name)
if dialog_rename.exec():
self.__update_image_name(dialog_rename.new_name)
|
[
"def renameUI():\n pass",
"def change_image_name(self, img, newname):\r\n return self.update(img, {\"name\": newname})",
"def set_name(self, new_name):\n\n self.img.attrib['Name'] = new_name",
"def click_rename_icon(self, file_name):\n return self",
"def Rename(self, event):\n name = self.confList.GetStringSelection()\n while True:\n n = self.confList.GetStringSelection()\n p = self.state.GetSurface(\"JconfDict\").GetPath(n)\n f = os.path.split(p)[1]\n dlg = wx.TextEntryDialog(self,\n \"What do you want to rename\" + \\\n \" %s to?\\n\\n\" %(n) + \\\n \"Jconf File: %s\" %(f),\n \"Rename %s\" %(n), name)\n if dlg.ShowModal() == wx.ID_OK:\n name = dlg.GetValue()\n dlg.Destroy()\n selection = self.confList.GetStringSelection()\n ##Check for slashes\n if name.count('/') > 0 or name.count('\\\\') > 0:\n dlg = wx.MessageDialog(self,\n \"Your new name has slashes\" + \\\n \" in it.\\n\" + \\\n \"Please choose a different name.\",\n \"ERROR: Name Contains Slashes\",\n wx.OK)\n dlg.ShowModal()\n dlg.Destroy()\n name = name.replace('/', '-')\n name = name.replace('\\\\', '-')\n ##Check if it's empty/spaces\n elif name.isspace() or name == '':\n dlg = wx.MessageDialog(self,\n \"Your new name is empty.\" + \\\n \" Please choose a different name.\",\n \"ERROR: Name is Empty\",\n wx.OK)\n dlg.ShowModal()\n dlg.Destroy()\n name = self.confList.GetStringSelection()\n ##Else accept it.\n else:\n finalName = self.state.GetBase(\"JconfDict\").Rename(selection,\n name)\n if finalName != name:\n self.NameChangeWarning(name, finalName)\n self.UpdateDisplay(finalName, True)\n break\n else:\n break",
"def renameItem(self):\n item = self.getSanitizedItemSelectionData()\n if item is None:\n return\n \n if (item[0] is DATA_SET_ITEM):\n data = DataStore.get(item[1])\n dlg = EditNameDialog(self.Parent, data.displayname)\n if dlg.ShowModal() == wx.ID_OK:\n data.displayname = dlg.Text\n dlg.Destroy()\n \n if (item[0] is FIGURE_SET_ITEM):\n figure = FigureStore.get(item[1])\n dlg = EditNameDialog(self.Parent, figure.name)\n if dlg.ShowModal() == wx.ID_OK:\n figure.name = dlg.Text\n dlg.Destroy()\n \n item = self.tree.GetSelection()\n item.SetText(dlg.Text)\n self.tree.RefreshSelected()\n item.SetHilight(False)\n item.SetHilight(True)",
"def rename_answer_image(filename, answer_id):\n SQL = \"\"\"UPDATE answer SET image = %s WHERE id = %s;\"\"\"\n data = (filename, answer_id)\n fetch = None\n db.run_statements(((SQL, data, fetch),))",
"def do_rename(self, args):\n print(self.enc_ftp.rename(args.filename, args.new_filename))",
"def __updateCaption(self):\n self.setWindowTitle(\"{0}[*] - {1}\".format(\n os.path.basename(self.__filename),\n self.tr(\"eric6 Snapshot\")))\n self.setWindowModified(self.__modified)\n self.pathNameEdit.setText(os.path.dirname(self.__filename))",
"def rename_window(sess_name, win_id, name):\n p = (sess_name, win_id, name)\n cmd = (CMD_RENAME_WINDOW % p).split(config.CMD_SEP)\n util.exec_cmd(cmd)",
"def rename_overlay(self, old_lbl, new_lbl):\r\n # NOTE: the overlay will call _on_overlay_rename after updating\r\n self.overlay.rename_choice(old_lbl, new_lbl)",
"def rename_file(self, file_id, name):\n pass",
"def reset_new_name(self, event=None):\n\n selected_file = self._selected_file.get()\n base, ext = os.path.splitext(os.path.basename(selected_file))\n\n # Reset the displayed basename\n self._new_name.set(base)\n\n # Set the focus on the filename entry box and select all text\n self.focus_filename_entry()",
"def rename(self, event=None):\n\n # Sanity check: Make sure the viewer is done rendering before\n # we do anything to the file\n rendering = self.viewer.rendering\n if rendering.get():\n self.viewer.cancel_rendering()\n self.wait_variable(rendering)\n\n try:\n self._process_rename()\n return True\n\n except (RenamerError) as err:\n showerror(\"Error\",\n err,\n parent=self)\n return False",
"def update_title(name, window):\n\twindow.wm_title(name)",
"def NameChangeWarning(self, oldName, newName):\n dlg = wx.MessageDialog(None,\n \"The name %s already existed\" %(oldName) + \\\n \" in the list.\\n\" + \\\n \"Your entry was given the\" + \\\n \" name %s instead.\" %(newName),\n \"NOTE: Name Changed\",\n wx.OK)\n dlg.ShowModal()\n dlg.Destroy()",
"def setDisplayName( self, name ):\r\n\t\tself._nativePointer.setname('')\r\n\t\tself._nativePointer.setname( str(name) )\r\n\t\t# Reset the metadata and requery it from the scene file. That will\r\n\t\t# force an update of the name value and ensure we don't create a\r\n\t\t# disconnect between the metadata entry and the newly-renamed layer.\r\n\t\tself._metaData = None\r\n\t\tself.metaData()\r\n\t\treturn",
"def change_name(self, name):\n self._player_name = name",
"def admin_change_name_pic(self, admin_change_name_pic):\n\n self._admin_change_name_pic = admin_change_name_pic"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Execute image dialog for object features.
|
def run_features_dialod(self):
dialog_features = ObjectFeatures(self)
if dialog_features.exec():
return
|
[
"def image_chooser(self):\n self.__image = pg.image.load(\"res/ghost/\" + Ghost.image_names[self.__id] + \"/start.png\")",
"def photoProcessed(self):\n # set up initial window features\n self.photoProcessedScreen.setWindowTitle(\"Unique Facial Feature Detection\")\n self.photoProcessedScreen.resize(575, 400)\n\n obtainedFeaturesText= QLabel(self.photoProcessedScreen)\n obtainedFeaturesText.setStyleSheet(\"font: 14pt Century Gothic\")\n obtainedFeaturesText.setText(\"Obtained unique features!\")\n obtainedFeaturesText.setGeometry(QRect(30, -10, 500, 200))\n obtainedFeaturesText.setAlignment(Qt.AlignCenter)\n\n global results\n global images\n if saveImage == 1:\n # open cropped image if it exists\n if (str(os.path.isfile(\"./backend/ResizedImages/newCropped.jpeg\")) == True):\n images = Image.open(\"./backend/ResizedImages/newCropped.jpeg\")\n else:\n images = Image.open(path)\n\n # set up button to show unique features\n photoProcessedBtnLayout = QHBoxLayout()\n getFeaturesListBtn = QPushButton(\"Get unique feature's list!\")\n photoProcessedBtnLayout.addWidget(getFeaturesListBtn)\n\n # go to next window if button is clicked\n getFeaturesListBtn.clicked.connect(self.outputtingList)\n\n self.photoProcessedScreen.setLayout(photoProcessedBtnLayout)",
"def image(self, obj):",
"def run_operation_dialog(self, operation):\n\n operation_dialog = self.DIALOG_OPERATIONS[operation](self)\n\n if operation_dialog.exec():\n self.data = operation_dialog.img_data",
"def CreateImage( self, dialog, title = 'Create Montage Image' ):\n self._RunBegin( dialog, title )",
"def edit_image(self):\n self.update()",
"def display(self):\n if self.valid:\n IJShow.runIjandShow(self.image_name)",
"def display(self):\n self.o.display_image(self.image)",
"def on_load_press(self):\n dialog = ImageDialog(self)\n dialog.size_hint = (0.75,0.75)\n dialog.pos_hint = {'x':0.125,'y':0.125}",
"def insertImage(self):\n self.popup = QtGui.QFileDialog()\n filename = self.popup.getOpenFileName(self,\n \"select an image\",\n \"\",\n \"Image Files (*.png *.jpg *.bmp *.jpeg *.svg *.gif)\" + \\\n \";;all files (*.*)\")\n\n # QFileDialog returns a tuple with filename and used filter\n if filename[0]:\n imagemarkdown = tp.create_image_markdown(filename[0])\n self.corpusBox.insertPlainText(imagemarkdown)",
"def launchImageEditor(filename, editImageFile=\"string\", viewImageFile=\"string\"):\n pass",
"def open_image(self):\r\n image_viewer = {self.__LINUX_SYS: self.__LINUX_IMG_VWR,\r\n self.__WINDOWS_SYS: self.__WINDOWS_IMG_VWR,\r\n self.__APPLE_SYS: self.__APPLE_IMG_VWR}[self._SYS_PLTFRM]\r\n try:\r\n subprocess.run([image_viewer, self.__RCVD_IMG])\r\n except FileNotFoundError:\r\n pass",
"def select(self, obj_list):\n pos = obj_list[0].matrix[3]\n screen_coordinates = self.active_camera.world_to_screen(pos)\n self.add_object(\"ObjectInfo\", object_box(screen_coordinates[0], screen_coordinates[1], obj_list[0]))",
"def label_help(self, event):\n\n #Don't close the image in this case so that the user can still classify it\n\n print(\"\\n\\nArtifactSpy Help:\")\n print(\"\\n\\tFor samples of each type of image artifact, open this link in a browser:\")\n print(\"\\t\\thttps://github.com/rmorgan10/ArtifactSpy#how-to-label-difference-images\")\n print(\"\\n\\tFor questions on an image, message Rob on Slack, or post in the #artifactspy channel\")\n print(\"\\n\\tIf you suspect a bug or got an error, contact Rob immediately.\")\n \n return",
"def open_ref_image(self):\n item = self.window.refList.currentItem()\n if item is None:\n return\n name = item.text()\n filename = self.ref_images[name]\n self._load_image(filename, self.window.refImg)",
"def draw_dialog():\n Launcher.create_main_window()\n Launcher.create_labels()\n Launcher.create_images()\n Launcher.create_buttons()",
"def open_rtg_image(self):\n filename, _ = QtWidgets.QFileDialog.getOpenFileName(\n self.window, \"Open RTG file\", \"\",\n \"Images (*.png *.PNG *.jpg *.jpeg *.JPG *.JPEG *.DCM *.dcm);;\\\n All Files (*);;\")\n if filename is None:\n return\n self._load_image(filename, self.window.rtgImg)\n\n # Start asking questions\n self.restart_decision_tree()\n\n # Enable button to reset evaluation\n self.window.resetEvaluation.setEnabled(True)",
"def selectImgPlaneFunc(self):\n selImgPlane = cmds.ls(type='imagePlane')\n cmds.select(selImgPlane)",
"def selectFrontView(self):\r\n basicFilter = \"Image Files (*.png *.tiff);;PNG (*.png);;TIFF (*.tiff);;All Files (*.*)\"\r\n self.hide()\r\n self.FrontImagePath = cmds.fileDialog2(caption=\"Please select front image\", fileFilter=basicFilter, fm=1)\r\n self.lineFront.setText(str(self.FrontImagePath[0]))\r\n self.show()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Execute specified operation dialog.
|
def run_operation_dialog(self, operation):
operation_dialog = self.DIALOG_OPERATIONS[operation](self)
if operation_dialog.exec():
self.data = operation_dialog.img_data
|
[
"def littleDialog():\r\n psm = uno.getComponentContext().ServiceManager\r\n dp = psm.createInstance(\"com.sun.star.awt.DialogProvider\")\r\n dlg = dp.createDialog(\"vnd.sun.star.script:Standard.Dialog1?location=application\")\r\n dlg.execute()\r\n return None",
"def dialog_handler(self, command, value=None):\n if 'ip' in command and 'port' in command: #From table of servers\n try:\n self.current_screen.set_ip_port(ip=value.split(':')[0], port=int(value.split(':')[1]))\n self.current_screen.hide_dialog()\n return\n except AttributeError:\n pass\n elif 'ip' in command: #From direct connection dialog\n self.current_screen.set_ip_port(ip=value)\n elif 'port' in command: #From direct connection dialog\n self.current_screen.set_ip_port(port=int(value))\n if not self.last_command: #In this case the dialog just popped-up\n self.last_command = command\n return\n elif 'cancel' in command or 'no' in command or 'false' in command:\n self.current_screen.hide_dialog()\n if 'ip' in command or 'port' in self.last_command:\n self.current_screen.destroy()\n self.restart_main_menu()\n self.last_command = None\n elif 'ok' in command or 'yes' in command or 'agree' in command: #The OK button was pressed\n if 'exit' in command:\n raise GameEndException(\"Byebye!\")\n elif 'input' in command:\n self.current_screen.dialog.trigger_all_elements()\n self.current_screen.hide_dialog()\n self.last_command = None\n else:\n LOG.log('warning', 'the command ',command,' is not recognized.')",
"def execute():\n btn = _get_run_button()\n btn.click()",
"def task_dialog(msg):\n\n window = UI.TaskDialog('Edit crop')\n window.TitleAutoPrefix = False\n\n window.MainIcon = UI.TaskDialogIcon.TaskDialogIconError\n window.MainInstruction = 'Error'\n window.MainContent = msg\n\n window.CommonButtons = UI.TaskDialogCommonButtons.Ok\n window.Show()",
"def execute_actioncollection(obj, actioncollection, confirm=True):\n actioncollection.execute(obj)\n status = actioncollection.status()\n if status.value == ActionStatus.SUCCESS or not confirm:\n return status\n ard = ActionReportDialog(actioncollection)\n confirmed = ard.exec_()\n if confirmed:\n msg = \"User confirmed to continue although the status was: %s\" % status.message,\n s = ActionStatus.SUCCESS\n tb = status.traceback\n else:\n s = status.value\n msg = \"User aborted the actions because the status was: %s\" % status.message,\n tb = status.traceback\n return ActionStatus(s, msg, tb)",
"def MaybeExecuteOp(self, client, user_id, operation_id, wait_callback=None):\n from viewfinder.backend.op.user_op_manager import UserOpManager\n\n user_op_mgr = self._active_users.get(user_id, None)\n if user_op_mgr is None:\n user_op_mgr = UserOpManager(client, self.op_map, user_id,\n partial(self._OnCompletedOp, user_id))\n self._active_users[user_id] = user_op_mgr\n\n user_op_mgr.Execute(operation_id, wait_callback)",
"def _options_dialog(*args, **kwargs) -> Any:\n pass",
"def dialog_handler_cb(self, item, data) -> None:\n # Dialog box initialization event\n if item == KDialogInitEvent:\n vs.SetItemText(self.dialog, self.kWidgetID_fileName, self.parameters.excelFileName)\n # vs.SetItemText(self.dialog, self.kWidgetID_imageFolderName, self.settings.imageFolderName)\n\n vs.ShowItem(self.dialog, self.kWidgetID_excelSheetNameLabel, False)\n vs.ShowItem(self.dialog, self.kWidgetID_excelSheetName, False)\n self.show_parameters(False)\n\n vs.EnableItem(self.dialog, self.kWidgetID_importButton, False)\n vs.EnableItem(self.dialog, self.kWidgetID_importNewCount, False)\n vs.EnableItem(self.dialog, self.kWidgetID_importUpdatedCount, False)\n vs.EnableItem(self.dialog, self.kWidgetID_importDeletedCount, False)\n\n elif item == self.kWidgetID_fileName:\n self.parameters.excelFileName = vs.GetItemText(self.dialog, self.kWidgetID_fileName)\n\n elif item == self.kWidgetID_fileBrowseButton:\n result, self.parameters.excelFileName = vs.GetFileN(\"Open Excel file\", \"\", \"xlsm\")\n if result:\n vs.SetItemText(self.dialog, self.kWidgetID_fileName, self.parameters.excelFileName)\n\n elif item == self.kWidgetID_excelSheetName:\n new_excel_sheet_name = vs.GetChoiceText(self.dialog, self.kWidgetID_excelSheetName, data)\n if self.parameters.excelSheetName != new_excel_sheet_name:\n self.parameters.excelSheetName = new_excel_sheet_name\n self.show_parameters(False)\n if data != 0:\n self.show_parameters(True)\n\n elif item == self.kWidgetID_withImageSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_withImage, data == 0)\n self.parameters.withImageSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_withImageSelector, data)\n elif item == self.kWidgetID_withImage:\n self.parameters.pictureParameters.withImage = \"{}\".format(data != 0)\n # elif item == self.kWidgetID_imageFolderName:\n # self.settings.imageFolderName = vs.GetItemText(\n # self.dialog, self.kWidgetID_imageFolderName)\n # elif item == self.kWidgetID_imageFolderBrowseButton:\n # result, self.settings.imageFolderName = vs.GetFolder(\"Select the images folder\")\n # if result == 0:\n # vs.SetItemText(self.dialog, self.kWidgetID_imageFolderName, self.settings.imageFolderName)\n elif item == self.kWidgetID_imageTextureSelector:\n self.parameters.imageTextureSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_withImageSelector, data)\n elif item == self.kWidgetID_imageWidthSelector:\n self.parameters.imageWidthSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_imageWidthSelector, data)\n elif item == self.kWidgetID_imageHeightSelector:\n self.parameters.imageHeightSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_imageHeightSelector, data)\n elif item == self.kWidgetID_imagePositionSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_imagePosition, data == 0)\n self.parameters.imagePositionSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_imagePositionSelector, data)\n elif item == self.kWidgetID_imagePosition:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_imagePosition, 3)\n if valid:\n self.parameters.pictureParameters.imagePosition = str(value)\n elif item == self.kWidgetID_withFrameSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_withFrame, data == 0)\n self.parameters.withFrameSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_withFrameSelector, data)\n elif item == self.kWidgetID_withFrame:\n self.parameters.pictureParameters.withFrame = \"{}\".format(data != 0)\n elif item == self.kWidgetID_frameWidthSelector:\n self.parameters.frameWidthSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameWidthSelector, data)\n elif item == self.kWidgetID_frameHeightSelector:\n self.parameters.frameHeightSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameHeightSelector, data)\n elif item == self.kWidgetID_frameThicknessSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_frameThickness, data == 0)\n self.parameters.frameThicknessSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameThicknessSelector, data)\n elif item == self.kWidgetID_frameThickness:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_frameThickness, 3)\n if valid:\n self.parameters.pictureParameters.frameThickness = str(value)\n elif item == self.kWidgetID_frameDepthSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_frameDepth, data == 0)\n self.parameters.frameDepthSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameDepthSelector, data)\n elif item == self.kWidgetID_frameDepth:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_frameDepth, 3)\n if valid:\n self.parameters.pictureParameters.frameDepth = str(value)\n elif item == self.kWidgetID_frameClassSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_frameClass, data == 0)\n self.parameters.frameClassSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameClassSelector, data)\n elif item == self.kWidgetID_frameClass:\n index, self.parameters.pictureParameters.frameClass = vs.GetSelectedChoiceInfo(self.dialog, self.kWidgetID_frameClass, 0)\n elif item == self.kWidgetID_frameTextureScaleSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureScale, data == 0)\n self.parameters.frameTextureScaleSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameTextureScaleSelector, data)\n elif item == self.kWidgetID_frameTextureScale:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_frameTextureScale, 1)\n if valid:\n self.parameters.pictureParameters.frameTextureScale = str(value)\n elif item == self.kWidgetID_frameTextureRotationSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureRotation, data == 0)\n self.parameters.frameTextureRotationSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_frameTextureRotationSelector, data)\n elif item == self.kWidgetID_frameTextureRotation:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_frameTextureRotation, 1)\n if valid:\n self.parameters.pictureParameters.frameTextureRotation = str(value)\n elif item == self.kWidgetID_withMatboardSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_withMatboard, data == 0)\n self.parameters.withMatboardSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_withMatboardSelector, data)\n elif item == self.kWidgetID_withMatboard:\n self.parameters.pictureParameters.withMatboard = \"{}\".format(data != 0)\n elif item == self.kWidgetID_matboardPositionSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_matboardPosition, data == 0)\n self.parameters.matboardPositionSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_matboardPositionSelector, data)\n elif item == self.kWidgetID_windowWidthSelector:\n self.parameters.windowWidthSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_windowWidthSelector, data)\n elif item == self.kWidgetID_windowHeightSelector:\n self.parameters.windowHeightSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_windowHeightSelector, data)\n elif item == self.kWidgetID_matboardPosition:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_matboardPosition, 3)\n if valid:\n self.parameters.pictureParameters.matboardPosition = str(value)\n elif item == self.kWidgetID_matboardClassSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_matboardClass, data == 0)\n self.parameters.matboardClassSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_matboardClassSelector, data)\n elif item == self.kWidgetID_matboardClass:\n index, self.parameters.pictureParameters.matboardClass = vs.GetSelectedChoiceInfo(self.dialog, self.kWidgetID_matboardClass, 0)\n elif item == self.kWidgetID_matboardTextureScaleSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureScale, data == 0)\n self.parameters.matboardTextureScaleSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_matboardTextureScaleSelector, data)\n elif item == self.kWidgetID_matboardTextureScale:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_matboardTextureScale, 1)\n if valid:\n self.parameters.pictureParameters.matboardTextureScale = str(value)\n elif item == self.kWidgetID_matboardTextureRotatSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureRotat, data == 0)\n self.parameters.matboardTextureRotatSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_matboardTextureRotatSelector, data)\n elif item == self.kWidgetID_matboardTextureRotat:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_matboardTextureRotat, 1)\n if valid:\n self.parameters.pictureParameters.matboardTextureRotat = str(value)\n elif item == self.kWidgetID_withGlassSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_withGlass, data == 0)\n self.parameters.withGlassSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_withGlassSelector, data)\n elif item == self.kWidgetID_withGlass:\n self.parameters.pictureParameters.withGlass = \"{}\".format(data != 0)\n elif item == self.kWidgetID_glassPositionSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_glassPosition, data == 0)\n self.parameters.glassPositionSelector = vs.GetChoiceText(\n self.dialog, self.kWidgetID_glassPositionSelector, data)\n elif item == self.kWidgetID_glassPosition:\n valid, value = vs.GetEditReal(self.dialog, self.kWidgetID_glassPosition, 3)\n if valid:\n self.parameters.pictureParameters.glassPosition = str(value)\n elif item == self.kWidgetID_glassClassSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_glassClass, data == 0)\n self.parameters.glassClassSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_glassClassSelector, data)\n elif item == self.kWidgetID_glassClass:\n index, self.parameters.pictureParameters.glassClass = vs.GetSelectedChoiceInfo(self.dialog, self.kWidgetID_glassClass, 0)\n elif item == self.kWidgetID_excelCriteriaSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_excelCriteriaValue, data != 0)\n new_excel_criteria_selector = vs.GetChoiceText(self.dialog, self.kWidgetID_excelCriteriaSelector, data)\n if new_excel_criteria_selector != self.parameters.excelCriteriaSelector:\n self.parameters.excelCriteriaSelector = new_excel_criteria_selector\n self.update_criteria_values(False)\n if data != 0:\n self.update_criteria_values(True)\n else:\n index = vs.GetChoiceIndex(self.dialog, self.kWidgetID_excelCriteriaValue, self.parameters.excelCriteriaValue)\n if index == -1:\n vs.SelectChoice(self.dialog, self.kWidgetID_excelCriteriaValue, 0, True)\n self.parameters.excelCriteriaValue = \"Select a value ...\"\n else:\n vs.SelectChoice(self.dialog, self.kWidgetID_excelCriteriaValue, index, True)\n elif item == self.kWidgetID_excelCriteriaValue:\n self.parameters.excelCriteriaValue = vs.GetChoiceText(self.dialog, self.kWidgetID_excelCriteriaValue, data)\n elif item == self.kWidgetID_symbolCreateSymbol:\n self.parameters.symbolCreateSymbol = \"{}\".format(data != 0)\n selector_index = vs.GetSelectedChoiceIndex(self.dialog, self.kWidgetID_symbolFolderSelector, 0)\n vs.EnableItem(self.dialog, self.kWidgetID_symbolFolderSelector, data)\n vs.EnableItem(self.dialog, self.kWidgetID_symbolFolder, selector_index == 0 and data == 1)\n elif item == self.kWidgetID_symbolFolderSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_symbolFolder, data == 0)\n self.parameters.symbolFolderSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_symbolFolderSelector, data)\n elif item == self.kWidgetID_classAssignPictureClass:\n self.parameters.classAssignPictureClass = \"{}\".format(data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_classPictureClassSelector, data == 1)\n selector_index = vs.GetPopUpChoiceIndex(self.dialog, self.kWidgetID_classPictureClassSelector, self.parameters.classClassPictureSelector)\n vs.EnableItem(self.dialog, self.kWidgetID_classPictureClass, selector_index == 0 and data != 0)\n elif item == self.kWidgetID_classPictureClassSelector:\n vs.EnableItem(self.dialog, self.kWidgetID_classPictureClass, data == 0)\n self.parameters.classClassPictureSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_classPictureClassSelector, data)\n elif item == self.kWidgetID_classPictureClass:\n index, self.parameters.pictureParameters.pictureClass = vs.GetSelectedChoiceInfo(self.dialog, self.kWidgetID_classPictureClass, 0)\n elif item == self.kWidgetID_classCreateMissingClasses:\n self.parameters.createMissingClasses = \"{}\".format(data == 1)\n elif item == self.kWidgetID_metaImportMetadata:\n self.parameters.metaImportMetadata = \"{}\".format(data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaArtworkTitleSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaAuthorNameSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaArtworkCreationDateSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaArtworkMediaSelector, data == 1)\n # vs.EnableItem(self.dialog, self.kWidgetID_metaTypeSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaRoomLocationSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaArtworkSourceSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaRegistrationNumberSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaAuthorBirthCountrySelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaAuthorBirthDateSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaAuthorDeathDateSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaDesignNotesSelector, data == 1)\n vs.EnableItem(self.dialog, self.kWidgetID_metaExhibitionMediaSelector, data == 1)\n elif item == self.kWidgetID_metaArtworkTitleSelector:\n self.parameters.metaArtworkTitleSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaArtworkTitleSelector, data)\n elif item == self.kWidgetID_metaAuthorNameSelector:\n self.parameters.metaAuthorNameSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaAuthorNameSelector, data)\n elif item == self.kWidgetID_metaArtworkCreationDateSelector:\n self.parameters.metaArtworkCreationDateSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaArtworkCreationDateSelector, data)\n elif item == self.kWidgetID_metaArtworkMediaSelector:\n self.parameters.metaArtworkMediaSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaArtworkMediaSelector, data)\n # elif item == self.kWidgetID_metaTypeSelector:\n # self.parameters.metaTypeSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaTypeSelector, data)\n elif item == self.kWidgetID_metaRoomLocationSelector:\n self.parameters.metaRoomLocationSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaRoomLocationSelector, data)\n elif item == self.kWidgetID_metaArtworkSourceSelector:\n self.parameters.metaArtworkSourceSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaArtworkSourceSelector, data)\n elif item == self.kWidgetID_metaRegistrationNumberSelector:\n self.parameters.metaRegistrationNumberSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaRegistrationNumberSelector, data)\n elif item == self.kWidgetID_metaAuthorBirthCountrySelector:\n self.parameters.metaAuthorBirthCountrySelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaAuthorBirthCountrySelector, data)\n elif item == self.kWidgetID_metaAuthorBirthDateSelector:\n self.parameters.metaAuthorBirthDateSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaAuthorBirthDateSelector, data)\n elif item == self.kWidgetID_metaAuthorDeathDateSelector:\n self.parameters.metaAuthorDeathDateSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaAuthorDeathDateSelector, data)\n elif item == self.kWidgetID_metaDesignNotesSelector:\n self.parameters.metaDesignNotesSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaDesignNotesSelector, data)\n elif item == self.kWidgetID_metaExhibitionMediaSelector:\n self.parameters.metaExhibitionMediaSelector = vs.GetChoiceText(self.dialog, self.kWidgetID_metaExhibitionMediaSelector, data)\n elif item == self.kWidgetID_importIgnoreErrors:\n self.parameters.importIgnoreErrors = \"{}\".format(data != 0)\n vs.ShowItem(self.dialog, self.kWidgetID_importErrorCount, data == 0)\n elif item == self.kWidgetID_importIgnoreExisting:\n self.parameters.importIgnoreExisting = \"{}\".format(data != 0)\n elif item == self.kWidgetID_importIgnoreUnmodified:\n self.parameters.importIgnoreUnmodified = \"{}\".format(data != 0)\n elif item == self.kWidgetID_importButton:\n self.import_pictures()\n vs.SetItemText(self.dialog, self.kWidgetID_importNewCount, \"New Pictures: {}\".format(self.importNewCount))\n vs.SetItemText(self.dialog, self.kWidgetID_importUpdatedCount, \"Updated Pictures: {}\".format(self.importUpdatedCount))\n vs.SetItemText(self.dialog, self.kWidgetID_importDeletedCount, \"Deleted Pictures: {}\".format(self.importDeletedCount))\n vs.SetItemText(self.dialog, self.kWidgetID_importErrorCount, \"Error Pictures: {}\".format(self.importErrorCount))\n\n # This section handles the following cases:\n # - The Dialog is initializing\n # - The name of the workbook file has changed\n if item == self.kWidgetID_fileName or item == self.kWidgetID_fileBrowseButton or item == KDialogInitEvent:\n self.set_workbook()\n\n # The image selection has changed\n if item == self.kWidgetID_withImageSelector or item == self.kWidgetID_withImage or item == self.kWidgetID_excelSheetName:\n state = vs.GetSelectedChoiceIndex(self.dialog, self.kWidgetID_withImageSelector, 0) != 0 or \\\n vs.GetBooleanItem(self.dialog, self.kWidgetID_withImage) is True\n\n vs.EnableItem(self.dialog, self.kWidgetID_imageWidthLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imageWidthSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imageHeightLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imageHeightSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imagePositionLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imagePositionSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imagePosition, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imageTextureLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_imageTextureSelector, state)\n\n # The frame selection has changed\n if item == self.kWidgetID_withFrameSelector or item == self.kWidgetID_withFrame or item == self.kWidgetID_excelSheetName:\n state = vs.GetSelectedChoiceIndex(self.dialog, self.kWidgetID_withFrameSelector, 0) != 0 or \\\n vs.GetBooleanItem(self.dialog, self.kWidgetID_withFrame) is True\n\n vs.EnableItem(self.dialog, self.kWidgetID_frameWidthLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameWidthSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameHeightLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameHeightSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameThicknessLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameThicknessSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameThickness, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameDepthLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameDepthSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameDepth, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameClassLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameClassSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameClass, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureScaleLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureScaleSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureScale, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureRotationLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureRotationSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_frameTextureRotation, state)\n\n # The matboard selection has changed\n if item == self.kWidgetID_withMatboardSelector or item == self.kWidgetID_withMatboard or item == self.kWidgetID_excelSheetName:\n state = vs.GetSelectedChoiceIndex(self.dialog, self.kWidgetID_withMatboardSelector, 0) != 0 or \\\n vs.GetBooleanItem(self.dialog, self.kWidgetID_withMatboard) is True\n\n vs.EnableItem(self.dialog, self.kWidgetID_windowWidthLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_windowWidthSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_windowHeightLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_windowHeightSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardPositionLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardPositionSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardPosition, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardClassLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardClassSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardClass, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureScaleLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureScaleSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureScale, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureRotatLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureRotatSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_matboardTextureRotat, state)\n\n # The glass selection has changed\n if item == self.kWidgetID_withGlassSelector or item == self.kWidgetID_withGlass or item == self.kWidgetID_excelSheetName:\n state = vs.GetSelectedChoiceIndex(self.dialog, self.kWidgetID_withGlassSelector, 0) != 0 or \\\n vs.GetBooleanItem(self.dialog, self.kWidgetID_withGlass) is True\n\n vs.EnableItem(self.dialog, self.kWidgetID_glassPositionLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_glassPositionSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_glassPosition, state)\n vs.EnableItem(self.dialog, self.kWidgetID_glassClassLabel, state)\n vs.EnableItem(self.dialog, self.kWidgetID_glassClassSelector, state)\n vs.EnableItem(self.dialog, self.kWidgetID_glassClass, state)\n\n # After the event has been handled, update some of the import validity settings accordingly\n self.parameters.imageValid = ((self.parameters.withImageSelector == \"-- Manual\" and self.parameters.pictureParameters.withImage == \"True\") or\n self.parameters.withImageSelector != \"-- Manual\") and \\\n (self.parameters.imageTextureSelector != \"-- Select column ...\") and \\\n (self.parameters.imageWidthSelector != \"-- Select column ...\") and \\\n (self.parameters.imageHeightSelector != \"-- Select column ...\")\n\n self.parameters.frameValid = ((self.parameters.withFrameSelector == \"-- Manual\" and self.parameters.pictureParameters.withFrame == \"True\") or\n self.parameters.withFrameSelector != \"-- Manual\") and \\\n (self.parameters.frameWidthSelector != \"-- Select column ...\") and \\\n (self.parameters.frameHeightSelector != \"-- Select column ...\")\n\n self.parameters.matboardValid = ((self.parameters.withMatboardSelector == \"-- Manual\" and self.parameters.pictureParameters.withMatboard == \"True\") or\n self.parameters.withMatboardSelector != \"-- Manual\") and \\\n (self.parameters.windowWidthSelector != \"-- Select column ...\") and \\\n (self.parameters.windowHeightSelector != \"-- Select column ...\")\n\n self.parameters.glassValid = ((self.parameters.withGlassSelector == \"-- Manual\" and\n self.parameters.pictureParameters.withGlass == \"True\") or self.parameters.withGlassSelector != \"-- Manual\")\n\n self.parameters.criteriaValid = \\\n (self.parameters.excelCriteriaSelector != \"-- Select column ...\" and self.parameters.excelCriteriaValue != \"Select a value ...\")\n\n self.parameters.importValid = (self.parameters.imageValid or self.parameters.frameValid) and self.parameters.criteriaValid\n\n vs.EnableItem(self.dialog, self.kWidgetID_importButton, self.parameters.importValid)\n vs.EnableItem(self.dialog, self.kWidgetID_importNewCount, self.parameters.importValid)\n vs.EnableItem(self.dialog, self.kWidgetID_importUpdatedCount, self.parameters.importValid)\n vs.EnableItem(self.dialog, self.kWidgetID_importDeletedCount, self.parameters.importValid)",
"def handle_command(self, data):\n if data is not None:\n command, input = data\n if command == CommandTypes.GUI:\n self.exec_gui(input)\n elif command == CommandTypes.CONNECT:\n self.exec_connect(input)\n elif command == CommandTypes.REFRESH:\n self.exec_refresh()\n elif command == CommandTypes.BACK:\n self.exec_back()",
"def run(self):\n # Create the dialog with elements (after translation) and keep reference\n # Only create GUI ONCE in callback, so that it will only load when the plugin is started\n\n # show the dialog\n self.dlg.show()\n self.loadVectors()\n self.loadDTM()",
"def fileBrowserDialog(fileType=\"string\", mode=int, actionName=\"string\", includeName=\"string\", operationMode=\"string\", fileCommand=\"string\", tipMessage=\"string\", dialogStyle=int, filterList=\"string\", windowTitle=\"string\"):\n pass",
"def _execute_op(self, op):\n operation_flow.pass_op_to_next_stage(self, op)",
"def action_operation(self, operation: str) -> None:\n new_str = operation\n screen_val = self.display_string.get()\n if \"=\" in screen_val:\n self.display_string.set(\"\")\n new_str = screen_val.split(\"=\")[1] + operation\n self.insert_screen(new_str)",
"def process(self): \n\t\tself.status = wx.ID_OK\n\t\tscripting.unregisterDialog(self.dialogName)\n\t\tself.Close()",
"def run(self):\n assert guistate.dlg_handler, 'dlg_handler not set'\n handler = guistate.dlg_handler\n guistate.dlg_handler = None\n response = handler(self)\n assert response is not None, 'dlg_handler returned None'\n return response",
"def _perform_action(self, option):\n if option == 1:\n self.current_user.view_budgets()\n elif option == 2:\n self.current_user.record_transaction()\n elif option == 3:\n self.current_user.view_transactions()\n elif option == 4:\n self.current_user.view_bank_details()\n else:\n print(\"Please enter a valid option.\")",
"def test_edit_shot_button_opens_up_shot_editor_with_the_given_shot(self):\n proj1 = Project('Test Project')\n proj1.save()\n \n seq1 = Sequence(proj1, 'Test Sequence')\n seq1.save()\n \n shot = Shot(seq1, 1, 2, 435)\n shot.handle_at_start = 23\n shot.handle_at_end = 12\n shot.save()\n \n dialog1 = project_manager.MainDialog()\n# self.show_dialog(dialog1)\n \n # hit to the edit shot button\n# QTest.mouseClick(\n# dialog1.edit_shot_pushButton,\n# QtCore.Qt.LeftButton\n# )\n \n # check if the shot_editor dialog is opened\n # HOW ????\n self.fail('test is not finished yet')",
"def file_browse(self, dialog_action, file_name=\"\"):\n\n if (dialog_action==gtk.FILE_CHOOSER_ACTION_OPEN):\n dialog_buttons = (gtk.STOCK_CANCEL\n , gtk.RESPONSE_CANCEL\n , gtk.STOCK_OPEN\n , gtk.RESPONSE_OK)\n else:\n dialog_buttons = (gtk.STOCK_CANCEL\n , gtk.RESPONSE_CANCEL\n , gtk.STOCK_SAVE\n , gtk.RESPONSE_OK)\n\n file_dialog = gtk.FileChooserDialog(title=\"Select File\"\n , action=dialog_action\n , buttons=dialog_buttons)\n \"\"\"set the filename if we are saving\"\"\"\n if (dialog_action==gtk.FILE_CHOOSER_ACTION_SAVE):\n file_dialog.set_current_name(file_name)\n \"\"\"Create and add the pywine filter\"\"\"\n filter = gtk.FileFilter()\n filter.set_name(\"Text\")\n filter.add_pattern(\"*.\" + FILE_EXT)\n file_dialog.add_filter(filter)\n \"\"\"Create and add the 'all files' filter\"\"\"\n filter = gtk.FileFilter()\n filter.set_name(\"All files\")\n filter.add_pattern(\"*\")\n file_dialog.add_filter(filter)\n\n \"\"\"Init the return value\"\"\"\n result = \"\"\n if file_dialog.run() == gtk.RESPONSE_OK:\n result = file_dialog.get_filename()\n file_dialog.destroy()\n\n return result",
"def _open(self, file):\n dialog = self._app.child(name = 'Open', roleName = 'dialog')\n if not dialog.child('Location:').showing:\n dialog.child('Type a file name').point()\n time.sleep(2)\n dialog.child('Type a file name').click()\n time.sleep(2)\n dialog.child(roleName = 'text').text = file # we want the first text box\n time.sleep(3)\n dialog.child(name = 'Open', roleName = 'push button').click()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Open panorama dialog window to perform image stitching.
|
def run_panorama_dialog(images):
images = {img.name: img.data for img in images}
panorama = ImagePanorama(images.copy())
if panorama.exec():
return panorama.pano_data, panorama.pano_name
|
[
"def main_show():\n variant = sys.argv[2]\n if variant == 'original':\n obj = view.Original()\n cmap=None\n elif variant == 'aligned':\n obj = view.Aligned()\n cmap=glumpy.colormap.Grey\n elif variant == 'funneled':\n obj = view.Funneled()\n cmap=None\n else:\n raise ValueError(variant)\n\n glumpy_viewer(\n img_array=obj.image_pixels,\n arrays_to_print=[obj.image_pixels],\n cmap=cmap,\n window_shape=(250, 250),\n )",
"def main():\n single_patch = create_filtered_image(1.5, 0, 1.5)\n if single_patch != None:\n single_patch.show()\n\n warhol_image = make_warhol()\n if warhol_image != None:\n warhol_image.show()",
"def get_enhanced_panorama(method,panorama,frame,last_frame,prec_trans, projection_matrix, moving_fg_mask, static_fg_mask):\n if(method == \"cylindrical\"):\n # Warping the image : proj\n return EnhancedCylindricalWarpImages(panorama,frame,last_frame,prec_trans, projection_matrix, moving_fg_mask, static_fg_mask)\n else:\n print(\"Error : Unknown or Unimplemented panorama method \" + method + \".\")",
"def open_display_window(width, height):\n cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)\n cv2.resizeWindow(WINDOW_NAME, width, height)\n cv2.moveWindow(WINDOW_NAME, 0, 0)\n cv2.setWindowTitle(WINDOW_NAME, 'AI-Hygiene-Tracker[ROOM]')",
"def get_panorama(method,panorama,frame,last_frame,prec_trans, cam_matrix, scaling_factor, resolution, projection_matrice):\n if(method == \"cylindrical\"):\n # Warping the image : proj\n return cylindricalWarpImages(panorama,frame,last_frame,prec_trans, cam_matrix, scaling_factor, resolution, projection_matrice)\n else:\n print(\"Error : Unknown or Unimplemented panorama method \" + method + \".\")",
"def get_panorama(method,panorama,frame,last_frame,prec_trans, projection_matrix):\n if(method == \"cylindrical\"):\n # Warping the image : proj\n return cylindricalWarpImages(panorama,frame,last_frame,prec_trans, projection_matrix)\n else:\n print(\"Error : Unknown or Unimplemented panorama method \" + method + \".\")",
"def main():\n # Import a image\n original_mt = SimpleImage('images/mt-rainier.jpg')\n # Show the original image\n original_mt.show()\n reflected = reflect('images/mt-rainier.jpg')\n # Show the vertically mirrored image\n reflected.show()",
"def openPictureTool(picture):\n #import PictureExplorer\n thecopy = duplicatePicture(picture)\n #Constructor has side effect of showing it\n PictureExplorer(thecopy)",
"def showWindow(self, sender):",
"def show_monitor_img(img):\n show_img = 'eog --fullscreen ' + img + ' &'\n #time.sleep(0.1)\n os.system(show_img)",
"def _runMeshViewer(self):\n self._hideWindow()\n wiz = MeshViewer()\n wiz.run()\n cv2.destroyAllWindows()\n self._showWindow()",
"def openPredict(self):\r\n self.predictWindow = Toplevel(self.master)\r\n self.predicter = SongPredictorWindow(self.predictWindow)",
"def selectSideView(self):\r\n basicFilter = \"Image Files (*.png *.tiff);;PNG (*.png);;TIFF (*.tiff);;All Files (*.*)\"\r\n self.hide()\r\n self.SideImagePath = cmds.fileDialog2(caption=\"Please select side image\", fileFilter=basicFilter, fm=1)\r\n self.lineSide.setText(str(self.SideImagePath[0]))\r\n self.show()",
"def main():\n\n # prep picamera\n with picamera.PiCamera() as camera:\n camera.resolution = (1024, 768)\n camera.rotation = 180\n camera.crop = (0.0, 0.0, 1.0, 1.0)\n\n # display preview\n camera.start_preview()\n\n # continuously updates the overlayed layer and display stats\n overlay_renderer = None\n while True:\n text = time.strftime('%H:%M:%S', time.gmtime())\n img = Image.new(\"RGB\", (1024, 768))\n draw = ImageDraw.Draw(img)\n draw.font = ImageFont.truetype(\n \"/usr/share/fonts/truetype/freefont/FreeSerif.ttf\",\n 50)\n draw.text((10,10), text, (255, 255, 255))\n\n if not overlay_renderer:\n \"\"\"\n If overlay layer is not created yet, get a new one. Layer\n parameter must have 3 or higher number because the original\n preview layer has a # of 2 and a layer with smaller number will\n be obscured.\n \"\"\"\n overlay_renderer = camera.add_overlay(img.tostring(),\n layer=3,\n size=img.size,\n alpha=128);\n else:\n overlay_renderer.update(img.tostring())",
"def opening(img):\n kernel = numpy.ones((7, 7), numpy.uint8)\n opening_img = cv2.morphologyEx(img, cv2.MORPH_OPEN, kernel)\n cv2.imshow('Opening', opening_img)\n cv2.waitKey()\n cv2.destroyAllWindows()",
"def main():\n original_img = image.Image('pres_casey.gif')\n red_image = red_filter(original_img)\n win = image.ImageWin(original_img.getWidth(), original_img.getHeight())\n red_image.draw(win)\n\n grayscale_img = grayscale(original_img)\n grayscale_img.draw(win)\n\n cycle_colors_img = cycle_colors(original_img)\n cycle_colors_img.draw(win)\n\n negative_img = negative(original_img)\n negative_img.draw(win)\n\n brightness_img = brightness(original_img, 90)\n brightness_img.draw(win)\n\n increase_contrast_img = increase_contrast(original_img)\n increase_contrast_img.draw(win)\n\n vertical_flip_image = vertical_flip(original_img)\n vertical_flip_image.draw(win)\n\n posterize_image = posterize(original_img)\n posterize_image.draw(win)\n\n scroll_image = scroll(original_img, 10)\n scroll_image.draw(win)\n\n horizontal_mirror_image = horizontal_mirror(original_img)\n horizontal_mirror_image.draw(win)\n\n obamafy_image = obamafy(original_img)\n obamafy_image.draw(win)",
"def display(self):\n if not hasattr(self, 'actor'):\n self.addActor()\n # Generate a renderer window\n win = vtkRenWin()\n # Set the number of viewports\n win.setnumViewports(1)\n # Set the background colour\n win.setBackground([1,1,1])\n # Set camera projection \n renderWindowInteractor = vtk.vtkRenderWindowInteractor()\n renderWindowInteractor.SetRenderWindow(win)\n renderWindowInteractor.SetInteractorStyle(vtk.vtkInteractorStyleTrackballCamera())\n # Set camera projection \n win.setView()\n win.renderActors([self.actor,])\n win.Render()\n win.rens[0].GetActiveCamera().Azimuth(0)\n win.rens[0].GetActiveCamera().SetParallelProjection(True)\n win.Render()\n return win",
"def display_img():\n global o_img, p_img\n if o_img is None or p_img is None:\n messagebox.showinfo('Error', 'No image to compare.')\n return\n o_img_first = decode_resize_img(o_img[0])\n p_img_first = decode_resize_img(p_img[0])\n disp_window = Toplevel()\n o_img_label = ttk.Label(disp_window, text='Original Image')\n o_img_label.grid(column=0, row=0)\n o_img_canv = Canvas(disp_window, bg='white', width=500, height=300)\n o_img_canv.grid(column=0, row=1)\n o_img_canv.create_image(250, 200, image=o_img_first)\n p_img_label = ttk.Label(disp_window, text='Processed Image')\n p_img_label.grid(column=1, row=0)\n p_img_canv = Canvas(disp_window, bg='white', width=500, height=300)\n p_img_canv.grid(column=1, row=1)\n p_img_canv.create_image(250, 200, image=p_img_first)\n disp_window.mainloop()\n return None",
"def perspective_distortion(self):\r\n bias = 10\r\n pts11 = pts12 = np.float32([[self.cols//2-bias,self.rows//2-bias],[self.cols//2+bias,self.rows//2-bias],[self.cols//2-bias,self.rows//2+bias],[self.cols//2+bias,self.rows//2+bias]])\r\n pts12[0,0] = pts12[0,0] + self.parameter_dict['static_perspecitive_x_p1_img1']\r\n pts12[0,1] = pts12[0,1] + self.parameter_dict['static_perspecitive_y_p1_img1']\r\n\r\n\r\n M1 = cv2.getPerspectiveTransform(pts11,pts12)\r\n\r\n dst = cv2.warpPerspective(self.img_hsv2, M1, (int(self.cols*self.resizefactor),int(self.rows*self.resizefactor)))\r\n \r\n return dst"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Validate the given point. Make sure point coordinates aren't beyond the corners of the image.
|
def __validate_point(self, point):
if point.x() < 0:
point.setX(0)
if point.y() < 0:
point.setY(0)
img_width = self._data.shape[1] - 1
if point.x() > img_width:
point.setX(img_width)
img_height = self._data.shape[0] - 1
if point.y() > img_height:
point.setY(img_height)
return point
|
[
"def is_valid_point(map_grid, point):\n x = point[0]\n y = point[1]\n width = map_grid.info.width\n height = map_grid.info.height\n return 0 <= x < width and 0 <= y < height",
"def sanity_check_point(cls, gridmap: np.array, point: Tuple[int, int]) -> None:\n rows, cols = gridmap.shape\n if point[0] < 0 or point[1] < 0 or rows <= point[0] or cols <= point[1]:\n raise OutOfBoundsError(\n f\"point {point} is out of map bounds {gridmap.shape}\")",
"def is_inside(self, point):\n return not (np.prod(point[0] - self._x_range) > 0 or np.prod(point[1] - self._y_range) > 0)",
"def contains_point(self, point):\n point = IntPoint.make(point)\n return point.x >= self.left and point.x < self.right and point.y >= self.top and point.y < self.bottom",
"def pointInRectangle(self, point):\r\n return self.topLeft[0] <= point[0] <= self.botRight[0] and \\\r\n self.topLeft[1] >= point[1] >= self.botRight[1]",
"def is_inside(self, point):\n x = point[0]\n y = point[1]\n if self.regtype == \"box\":\n #print(\"WARNING: rotation box currently not supported!\",\n # file=sys.stderr)\n xmin = self.xc - self.width/2.0\n xmax = self.xc + self.width/2.0\n ymin = self.yc - self.height/2.0\n ymax = self.yc + self.height/2.0\n if all([x >= xmin, x <= xmax, y >= ymin, y <= ymax]):\n return True\n else:\n return False\n else:\n raise ValueError(\"region type '%s' currently not implemented\" %\\\n self.regtype)",
"def IsInBounds(point, width, height):\n return 0 <= point.x < width and 0 <= point.y < height",
"def point_is_valid( generator, x, y ):\n\n # These are the tests specified in X9.62.\n\n n = generator.order()\n curve = generator.curve()\n if x < 0 or n <= x or y < 0 or n <= y:\n return False\n if not curve.contains_point( x, y ):\n return False\n if not n*ellipticcurve.Point( curve, x, y ) == \\\n ellipticcurve.INFINITY:\n return False\n return True",
"def point_in_box(point, corners):\n assert corners.shape == (4,2)\n a = corners[0, :]\n b = corners[1, :]\n d = corners[3, :]\n ab = b - a\n am = point - a\n ad = d - a\n # consider projection of AM on the edge AB and AD\n p_ab = np.dot(ab, am)\n norm_ab = np.dot(ab, ab)\n p_ad = np.dot(ad, am)\n norm_ad = np.dot(ad, ad)\n cond1 = p_ab > 0 and p_ab < norm_ab\n cond2 = p_ad > 0 and p_ad < norm_ad\n return cond1 and cond2",
"def inside(self, point):\n inv_trans = np.linalg.inv(self.transformation)\n scale = self.scale\n point_w = np.matmul(inv_trans[:3, :3], point) + inv_trans[:3, 3]\n for i in range(3):\n if abs(point_w[i]) > scale[i] / 2.:\n return False\n return True",
"def contains_point(self, x=0, y=0):\n return 0 <= x < self.get_width() and 0 <= y < self.get_height()",
"def test_point_validity( generator, x, y, expected ):\n if point_is_valid( generator, x, y ) == expected:\n print_(\"Point validity tested as expected.\")\n else:\n raise TestFailure(\"*** Point validity test gave wrong result.\")",
"def is_valid_coordinates(self, wilderness, coordinates):\n x, y = coordinates\n if x < 0:\n return False\n if y < 0:\n return False\n\n return True",
"def collide_point(self, point):\n # This could probably be optimized as well\n return point[0] > self.left and point[0] < self.right and \\\n point[1] > self.top and point[1] < self.bottom",
"def point_within_dimensions(point, image_dimensions):\n assert len(point) == len(\n image_dimensions\n ), \"Point dimensions {} doesn't equal image dimension {}\".format(\n len(point), len(image_dimensions)\n )\n\n within_bounds = True\n for i, val in enumerate(point):\n within_bounds = within_bounds and 0 <= val < image_dimensions[i]\n\n return within_bounds",
"def _check_piece_bounds(piece):\n\n # If x or y is negative it must be out of bounds\n if piece[0] < 0 or piece[1] < 0:\n return False\n elif piece[0] > 8:\n return False\n\n # Max x index of E-I is y + 4\n if piece[0] < 5:\n if piece[1] > piece[0] + 4:\n return False\n # Max x index of the rest hard coded\n elif piece[0] == 5:\n if piece[1] > 7:\n return False\n elif piece[0] == 6:\n if piece[1] > 6:\n return False\n elif piece[0] == 7:\n if piece[1] > 5:\n return False\n elif piece[0] == 8:\n if piece[1] > 4:\n return False\n\n return True",
"def pointWithinRectangle(point, rectangle):\n px = point[1]\n py = point[0]\n x_vals = [x for (y, x) in rectangle]\n y_vals = [y for (y, x) in rectangle]\n x_min = min(x_vals)\n x_max = max(x_vals)\n y_min = min(y_vals)\n y_max = max(y_vals) \n if (x_min < px < x_max and y_min < py < y_max):\n return True\n else:\n return False",
"def isPointInside(self, point):\n\n code = self.caster.InsideOrOutside(point)\n\n if code == -1: #point is inside\n return True\n else: #point is either outside the surface or can not be located\n return False",
"def isInside(point, leftTop, rightBottom):\n\n if not (leftTop[0] < point[0] < rightBottom[0]):\n return False\n if not (leftTop[1] < point[1] < rightBottom[1]):\n return False\n return True"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Check if the image is grayscale. ``True`` if the image has one channel, otherwise ``False``.
|
def _is_grayscale(self):
return len(self._data.shape) == 2
|
[
"def is_img_gray(image):\n is_gray = True\n for i in range(image.size[0]):\n c = image.getpixel((i,image.size[1] / 2))\n c = np.asarray(c)\n mean = np.mean(c)\n tmp = True\n for b in c: tmp = tmp and (b == mean)\n if not tmp:\n is_gray = False\n break\n return is_gray",
"def isRGB(img):\r\n return len(img.shape) == RGB_SIZE and img.shape[-1] >= RGB_SIZE",
"def is_3_channels(image_path):\r\n img = cv2.imread(image_path)\r\n try:\r\n h, w, c = img.shape\r\n if c == 3:\r\n return True\r\n except Exception:\r\n return False",
"def iscolor(self):\n return self.channels() == 3",
"def _is_image(self, blob):\n if blob.layout != \"NCHW\":\n return False\n channels = blob.shape[1]\n return channels == 3",
"def _return_num_channels(rgb, gray):\n\n # Handling the edge cases\n if not isinstance(rgb, bool):\n raise ValueError('`rgb` must be a boolean value')\n \n if not isinstance(gray, bool):\n raise ValueError('`gray` must be a boolean value')\n\n # We use this approach:\n # Preference is given to gray. \n # If gray == True, then we return channels=1\n \n # rgb can be either True or False \n # IF rgb == True, it implies that the user expects an RGB image\n # If rgb == False, we return a BGR image Tensor \n\n if gray:\n return 1 # Grayscale images are 1-channeled\n else:\n return 3 # BGR/RGB images are 3-channeled",
"def is_white(self, image_path):\n\t\treturn self._image_color(image_path, \"white\")",
"def is_black(self, image_path):\n\t\treturn self._image_color(image_path, \"black\")",
"def grayscale(images):\n # R -> 0.299, G -> 0.587, B -> 0.114.\n img_gray = images #torch.tensor(images)\n gray_channel = (\n 0.299 * images[:, :, :, 0] + 0.587 * images[:, :, :, 1] + 0.114 * images[:, :, :, 2]\n )\n img_gray[:, 0] = gray_channel\n img_gray[:, 1] = gray_channel\n img_gray[:, 2] = gray_channel\n return img_gray",
"def compute_gray_level(self, image: ndarray):\n try:\n return \"success\", divide(image, 3)\n except TypeError:\n self.logger.error(\"Wrong given type to divide: {}\".format(type(image)))\n return \"error\", None\n except Exception as error:\n self.logger.exception(error)\n return \"error\", None",
"def greyscale(im):\n return rgb2gray(im)",
"def grayscale(image):\n image = tf.image.rgb_to_grayscale(image) # this will create one dimension\n image = tf.image.grayscale_to_rgb(image) # this will create three dimension again\n return image",
"def Convert2Gray(self):\n if not self.loaded:\n return 0\n \n try:\n return cv2.cvtColor(self.imagem, cv2.COLOR_BGR2GRAY)\n except:\n return 0",
"def is_mono(self):\n return self.num_channels == 1",
"def __nonzero__(self):\r\n return self.original_image_type is not None",
"def detect_color_image(file, thumb_size=40, MSE_cutoff=80, adjust_color_bias=True):\n \"\"\" see https://stackoverflow.com/questions/20068945/detect-if-image-is-color-grayscale-or-black-and-white-with-python-pil \"\"\"\n pil_img = Image.open(file)\n bands = pil_img.getbands()\n if bands == ('R','G','B') or bands== ('R','G','B','A'):\n thumb = pil_img.resize((thumb_size,thumb_size))\n SSE, bias = 0, [0,0,0]\n if adjust_color_bias:\n bias = ImageStat.Stat(thumb).mean[:3]\n bias = [b - sum(bias)/3 for b in bias ]\n for pixel in thumb.getdata():\n mu = sum(pixel)/3\n SSE += sum((pixel[i] - mu - bias[i])*(pixel[i] - mu - bias[i]) for i in [0,1,2])\n MSE = float(SSE)/(thumb_size*thumb_size)\n if MSE <= MSE_cutoff:\n #print \"grayscale\\t\"\n #print \"( MSE=\",MSE,\")\"\n return 0\n else:\n #print \"Color\\t\\t\\t\"\n #print \"( MSE=\",MSE,\")\"\n return 1\n elif len(bands)==1:\n #print \"Black and white\", bands\n return 0\n else:\n #print \"Don't know...\", bands\n return 1",
"def isRGBMode(self) -> \"SbBool\":\n return _coin.SoSceneManager_isRGBMode(self)",
"def make_gray(image):\n assert isinstance(image, np.ndarray), type(image)\n if image.ndim == 2:\n return image\n assert image.ndim == 3, image.shape\n assert image.shape[2] in [1, 3, 4], image.shape\n return np.mean(image[:, :, :3], 2)",
"def img_a_gris(self):\n self.img=cv2.cvtColor(self.img,cv2.COLOR_BGR2GRAY)\n return None"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Set an image data and update the image window
|
def set_img_data(self, img_data):
self._data = img_data
self.update_window()
self.update_icon()
|
[
"def update_image(window: tk.Tk, img: Image):\r\n\r\n window.display_image(img)",
"def set_img(arr):\n global tkImg, canvasImg, canvas\n tkImg = tk_img(arr)\n canvasImg = tk_imshow(canvas, tkImg)",
"def updateImage(self):\n self.image = self.getImage(self.location, self.name, self.imageType)",
"def edit_image(self):\n self.update()",
"def setData(self, *args):\n return _coin.SoGLImage_setData(self, *args)",
"def set_image(self):\r\n self.sc.set_image()",
"def update_display(self):\n self.disp.image(self.image)\n self.disp.display()",
"def _new_image(self, msg):\n filepath = msg.data\n self.set_image(filepath)\n self.set_state(ImageViewer.STATE_IDLE)\n self.Refresh()",
"def setData(self, *args):\n return _coin.SoGLBigImage_setData(self, *args)",
"def img_cb(self, image):\n \n self.last_img = image\n self.is_new_img = True",
"def _new_image(self, msg):\n filepath = msg.data\n self.set_image(filepath)\n self.set_state(BallotScreen.STATE_IDLE)\n self.Refresh()",
"def update_image(self, img_arr):\n pm = rgb_arr_to_rgb_pixmap(img_arr)\n self.clear()\n self.img_pmi = self.addPixmap(pm)\n #\n self.h, self.w = img_arr.shape[:2]\n self.setSceneRect(0, 0, self.w, self.h)",
"def update_image(self):\n self.image = self.capture_image()\n self.update_background()",
"def update_test_image(self):\n self.test_image_label.setPixmap(numpy2pixmap(self.test_image))",
"def updateCanvas(self):\n image = Image.open(self.filename)\n self.canvasImage = ImageTk.PhotoImage(image)\n self.mainCanvas.create_image(0, 0, anchor=\"nw\", image=self.canvasImage)\n self.mainCanvas.config(width=self.imageDimensions[0], height=self.imageDimensions[1])\n print(Globals.pixelationWindowPixels)",
"def update(self):\n\t\tself.gui.draw_image(self.image, self.getRealPos())\n\t\tWidget.update(self)",
"def setImage(self, image, normalize = None):\n \n self.viewer.setImage(image, normalize)\n self.updateCaption()",
"def setImage(self, img, regions, sizes, image_id=...) -> None:\n ...",
"def setPhoto(self, image):\n\n self.image = image\n # Convert the float32 monochrome image into uint8 format.\n image_uint8 = self.image.astype(uint8)\n self.shape_y = image_uint8.shape[0]\n self.shape_x = image_uint8.shape[1]\n\n # Normalize the frame brightness.\n image_uint8 = normalize(image_uint8, None, alpha=0, beta=255, norm_type=NORM_MINMAX)\n\n qt_image = QtGui.QImage(image_uint8, self.shape_x, self.shape_y, self.shape_x,\n QtGui.QImage.Format_Grayscale8)\n pixmap = QtGui.QPixmap(qt_image)\n\n if pixmap and not pixmap.isNull():\n self._empty = False\n self._photo.setPixmap(pixmap)\n else:\n self._empty = True\n self._photo.setPixmap(QtGui.QPixmap())"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Update the image window scale depending on the mode.
|
def zoom(self, mode):
if mode == "out":
self.scale -= 0.1
elif mode == "in":
self.scale += 0.1
else:
self.scale = 1
self.scale = round(self.scale, 1)
self.update_window()
|
[
"def windowEvent(self, *args, **kwargs):\n super().windowEvent(*args, **kwargs)\n\n for win, cam, pixel2d in self.forcedAspectWins:\n aspectRatio = self.getAspectRatio(win)\n cam.node().getLens().setAspectRatio(aspectRatio)\n\n # Fix pixel2d scale for new window size\n # Temporary hasattr for old Pandas\n if not hasattr(win, 'getSbsLeftXSize'):\n pixel2d.setScale(2.0 / win.getXSize(), 1.0, 2.0 / win.getYSize())\n else:\n pixel2d.setScale(2.0 / win.getSbsLeftXSize(), 1.0, 2.0 / win.getSbsLeftYSize())",
"def on_window_resize(self, event):\n image_width = event.width\n image_height = int(event.width / self.aspect_ratio)\n\n if image_height > event.height:\n image_height = event.height\n image_width = int(event.height * self.aspect_ratio)\n\n self.cv_displayed_image = cv2.resize(self.cv_image, (image_width, image_height))\n self.zoom_ratio = self.cv_displayed_image.shape[1] / self.cv_image.shape[1]\n self.add_rectangles()\n self.show_cv_image(self.cv_displayed_image)",
"def updateCatSize(self): \n self.size=(Window.size[0]*1/4,Window.size[1]*2/3)",
"def rescaled_image():",
"def adjust_window_to_current_state(self, event=None):\n # zoomed to normal\n if((self.unit == self.root.winfo_screenheight()//GRID_ROWS-2 or\n self.unit == self.root.winfo_screenwidth()//GRID_COLUMNS) and\n self.root.state() == \"normal\"):\n width = DEFAULT_WIDTH_WINDOW\n self.set_window_size(width)\n # normal to zoomed\n if(not (self.unit == self.root.winfo_screenheight()//GRID_ROWS-2 or\n self.unit == self.root.winfo_screenwidth()//GRID_COLUMNS) and\n self.root.state() == \"zoomed\"):\n width = self.root.winfo_screenwidth()\n self.set_window_size(width)",
"def ikHandleDisplayScale():\n pass",
"def update_scales(self, value):\n size_var = 0.8\n min_value = self.min_lvl_scale.get()\n max_value = self.max_lvl_scale.get()\n middle = (min_value+max_value)/2\n\n min_size = middle * size_var\n max_size = (255 - middle) * size_var\n\n self.min_lvl_scale.config(from_ = 0, to = middle - 1, length = min_size)\n self.max_lvl_scale.config(from_ = middle +1, to = 255, length = max_size)\n\n self.gamma.set(1/10**(self.gamma_scale.get()))\n self.gamma_scale.config(label = \"Gamma: \"+\"{0:.2f}\".format(round(self.gamma.get(), 2)))\n\n self.update_image(0, update_levels = True)",
"def scale_window_to(self, px):\n #assert self.mode == 'hidden', 'Can only scale hidden window.'\n assert self.is_running, 'Window can only be scaled once app has been started.'\n\n w, h = self.window.fbo.size \n s = px / max(w, h)\n\n self.window.create_framebuffer(s * w, s * h)",
"def DrawingScale(self) -> float:",
"def awz_changed(self, value):\n self.winsize_new = value",
"def recalcAspectRatio(self, window):\n # set the mainframe size to the window borders again\n self.frameMain[\"frameSize\"] = (\n base.a2dLeft, base.a2dRight,\n base.a2dTop, base.a2dBottom)\n\n # calculate new aspec tratio\n wp = window.getProperties()\n aspX = 1.0\n aspY = 1.0\n wpXSize = wp.getXSize()\n wpYSize = wp.getYSize()\n if wpXSize > wpYSize:\n aspX = wpXSize / float(wpYSize)\n else:\n aspY = wpYSize / float(wpXSize)\n # calculate new position/size/whatever of the gui items\n self.title.setPos(0.0, 0.0, base.a2dTop - self.textscale)\n self.menuBackground.setScale(1.0 * aspX, 1.0, 1.0 * aspY)\n self.cbVolumeMute.setPos(base.a2dRight - 0.15, 0, base.a2dBottom + 0.15)",
"def updateCanvas(self):\n image = Image.open(self.filename)\n self.canvasImage = ImageTk.PhotoImage(image)\n self.mainCanvas.create_image(0, 0, anchor=\"nw\", image=self.canvasImage)\n self.mainCanvas.config(width=self.imageDimensions[0], height=self.imageDimensions[1])\n print(Globals.pixelationWindowPixels)",
"def updateImage(self):\n if self.uiUpdating:\n return\n\n self.calImgFigure.clf()\n if self.calSettings is not None:\n if \"center\" in self.calSettings:\n center = self.calSettings[\"center\"]\n self.fixedCenter.setChecked(True)\n self.centerX.setValue(center[0])\n self.centerY.setValue(center[1])\n if self.calImageGrp.isChecked():\n self.resize(500, 800)\n self.calImgCanvas.setHidden(False)\n self.minIntLabel.setHidden(False)\n self.minInt.setHidden(False)\n self.maxIntLabel.setHidden(False)\n self.maxInt.setHidden(False)\n center = self.calSettings[\"center\"]\n radius = self.calSettings[\"radius\"]\n _, disp_img = self.getImage()\n ax = self.calImgFigure.add_subplot(111)\n ax.cla()\n ax.imshow(disp_img)\n ax.plot([center[0]], [center[1]], 'ro')\n ax.add_patch(\n patches.Circle(center, radius, linewidth=2, edgecolor='r', facecolor='none', linestyle='dotted'))\n ax.set_xlim((0, disp_img.shape[1]))\n ax.set_ylim((0, disp_img.shape[0]))\n ax.set_title(\"center:\" + str(center) + \" radius:\" + str(radius))\n ax.invert_yaxis()\n self.calImgFigure.tight_layout()\n else:\n self.resize(500, 1)\n self.calImgCanvas.setHidden(True)\n\n self.calImgCanvas.draw()",
"def on_configure(self, event):\n self.display_value(self.scale.get())\n self.place_ticks()",
"def set_scale(self, scale):\n def dofancyresize(imgsize, scale):\n if scale < 1.0:\n return True\n else:\n return False\n if scale <= 0.1:\n # Set limit on zooming-out\n return\n\n self.scale = scale\n # Rescale img bitmap\n w, h = self.img_pil.size\n w_scaled, h_scaled = int(round(w*scale)), int(round(h*scale))\n if dofancyresize((w,h), scale):\n rescaled_img = self.img_pil.resize((w_scaled, h_scaled), resample=Image.ANTIALIAS)\n else:\n rescaled_img = self.img_pil.resize((w_scaled, h_scaled))\n self.img_resize_pil = rescaled_img\n self.img_bitmap = util_gui.PilImageToWxBitmap(rescaled_img)\n self._setup_scrollbars()",
"def config_scale(self, cnf={}, **kwargs):\n self._scale.config(cnf, **kwargs)\n # Update self._variable limits in case the ones of the scale have changed\n self._variable.configure(high=self._scale['to'],\n low=self._scale['from'])\n if 'orient' in cnf or 'orient' in kwargs:\n self._grid_widgets()",
"def set_scale(self,scale_factor):\n self.scale_factor = scale_factor\n\n if (self._orig_bitmap):\n self.bitmap = copy.copy(self._orig_bitmap)\n self.bitmap.image = self._orig_bitmap.zoom(self.scale_factor)",
"def _resize_pillars(self):\n self.image = pygame.transform.smoothscale(self.image, (100, 650))",
"def update_image(window: tk.Tk, img: Image):\r\n\r\n window.display_image(img)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Update icon for image window. Grayscale image has grayscale picture, color image has color one.
|
def update_icon(self):
icon = QIcon()
if self._is_grayscale():
icon.addPixmap(QPixmap("icons/picture_gray.png"), QIcon.Normal, QIcon.Off)
else:
icon.addPixmap(QPixmap("icons/picture_color.png"), QIcon.Normal, QIcon.Off)
self.setWindowIcon(icon)
|
[
"def update_icon(active, window):\n\tif active:\n\t\twindow.tk.call('wm', 'iconphoto', window._w, green_icon)\n\telse:\n\t\twindow.tk.call('wm', 'iconphoto', window._w, red_icon)",
"def update_icon(self):\n fn = self.icon_filename[self.state]\n path = os.path.join(self.location, fn)\n assert os.path.exists(path), 'File not found: %s' % path\n if hasattr(self, 'icon'):\n self.icon.set_from_file(path)\n else:\n self.icon = gtk.status_icon_new_from_file(path)",
"def getImageIcon(self) -> javax.swing.ImageIcon:\n ...",
"def UpdateIcon(self):\r\n if sys.argv[0].endswith('.exe'):\r\n try:\r\n loc = wx.IconLocation(sys.argv[0], 0)\r\n self.SetIcon(wx.IconFromLocation(loc))\r\n return\r\n except:\r\n pass\r\n\r\n try:\r\n icon_path = os.path.join(os.path.dirname(__file__), 'icon.ico')\r\n except NameError:\r\n # __file__ does not exist\r\n return\r\n if os.path.exists(icon_path):\r\n self.SetIcon(wx.Icon(icon_path, wx.BITMAP_TYPE_ICO))",
"def set_icon(self, image_name):\n self._icon = self._image[image_name]\n pygame.display.set_icon(self.get_icon())",
"def update_image(window: tk.Tk, img: Image):\r\n\r\n window.display_image(img)",
"def update_project_icon(self, path, old_image_path, new_image_path):\n self.api.save_icon(new_image_path, path)\n icon = QIcon(QPixmap(new_image_path))\n item = self.get_item_by_path(path)\n item.setIcon(icon)",
"def change_button_icon(self):\n self.icon_button.setIcon(QIcon(random.choice(self.images)))\n self.icon_button.setIconSize(QSize(60, 60))",
"def setIcon(self):\r\n icon = wx.Icon('nagara.ico', wx.BITMAP_TYPE_ICO)\r\n self.SetIcon(icon)",
"def bitmapButtonIcon_Clicked(self, event):\n filename = DM.ChooseGraphic(self, 'Icons', self.SelectedItem.icon_name)\n if filename:\n self.SelectedItem.icon_name = filename\n self.refreshValues()",
"def setIcon(self, icon):\n if icon.isNull():\n self.__image = None\n else:\n self.__image = icon.pixmap(16, 16).toImage()\n super(E5LineEditButton, self).setIcon(icon)",
"def setIcon(self, m_path, m_type=\"\"):\n self.setKv(\"icon\", self.__define_icon(m_path, m_type))",
"def set_icon(self, icon):\n self.ICON = icon",
"def net_wm_icon_change(self, func):\n return self._subscribe(\"net_wm_icon_change\", func)",
"def populateIcon(self, *args):\n\n # default\n self.characterIcon.setPixmap(self.defaultPixMap)\n\n # get a list of the existing folders in projects\n selectedProject = self.projectMenu.currentText()\n fullPath = utils.returnNicePath(self.projectPath, selectedProject)\n selectedGroup = self.groupMenu.currentText()\n if len(selectedGroup) > 1:\n fullPath = utils.returnNicePath(fullPath, selectedGroup)\n\n selectedCharacter = self.characterList.currentItem().text()\n fullPath = utils.returnNicePath(fullPath, selectedCharacter + \".png\")\n\n if os.path.exists(fullPath):\n pixmap = QtGui.QPixmap(fullPath)\n self.characterIcon.setPixmap(pixmap)\n\n else:\n self.characterIcon.setPixmap(self.defaultPixMap)",
"def update_image(self, cv_img):\r\n qt_img = self.convert_cv_qt(cv_img)\r\n self.image_container_label.setPixmap(qt_img)\r\n self.displayStatus()",
"def update(self):\n label_text, img_path = self.toggle_dict[self.is_on]\n self.label.setText(label_text)\n self.img.setPixmap(QtGui.QPixmap(img_path))",
"def _update_fs_icon(self):\n self._icon_path = tempfile.mktemp()\n with open(self._icon_path, 'wb') as f:\n self.icon.save(f, 'PNG')\n self._icon_valid = True",
"def update_image(self, cv_img):\r\n qt_img = self.convert_cv_qt(cv_img)\r\n self.image_label.setPixmap(qt_img)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create intensity profile window.
|
def create_profile(self):
self.intensity_profile.create_profile(self.points, self._data)
|
[
"def create_picture_gui(self):\n self.screen.fill((255, 255, 255)) # asd\n self.display_author()\n self.display_title()\n # Getting width, height and steps from input boxes.\n w, h, s, _ = self.display_creating_picture_boxes()\n # Checking data validate\n try:\n width, height, steps = gexc.check_creating_data(w, h, s)\n # Checks whether width is int\n except gexc.WidthException:\n self.display_error(\"width\")\n self.create_picture_gui()\n # Checks whether height is int\n except gexc.HeightException:\n self.display_error(\"height\")\n self.create_picture_gui()\n # Checks whether steps is int\n except gexc.StepsException:\n self.display_error(\"steps\")\n self.create_picture_gui()\n # Creating white picture from user's data\n white_picture = ImgInp.create_white_picture(width, height)\n # Makes a simulation\n self.make_simulation(steps, white_picture)",
"def graphic_window(self):",
"def CreateImage( self, dialog, title = 'Create Montage Image' ):\n self._RunBegin( dialog, title )",
"def __init__(self, window):\n self.window = window\n self.window.title(\"Sorting Algorithm Visualizer\")\n self.window.geometry(\"800x450\")\n self.window.minsize(800, 450)\n self.window.maxsize(800, 450)\n self.window.config(bg = \"#152e57\")",
"def create_hist_window(self):\n\n self.histogram_graphical.create_histogram_plot(self.calc_histogram())",
"def make_histogram(self): # connected to make histogram (btn_histogram)\n print(\"make hist\")\n# self.calculate_images()\n self.intensitys = np.linspace(0,10,10)\n self.intensitys2 = self.intensitys\n try:\n self.intensitys2 = np.concatenate((self.intensitys,\n self.intensitys2))\n except:\n self.intensitys2 = self.intensitys\n self.doit()\n\n self.histo_data = True",
"def create_heatmap(self):\n # Profile the file.\n self.__profile_file()\n # Map profile stats to heatmap data.\n self.__fetch_heatmap_data_from_profile()\n # Create heatmap.\n self.__create_heatmap_plot()",
"def probability_picture_gui(self):\n self.screen.fill((255, 255, 255))\n self.display_author()\n self.display_title()\n # getting width, height, steps and probablity\n w, h, s, p = self.display_creating_picture_boxes(True)\n # checking whether data is correct\n try:\n # converts input data to necessary types\n wid, height, steps, prob = gexc.check_creating_data(w, h, s, p)\n # correct width\n except gexc.WidthException:\n self.display_error(\"width\")\n self.probability_picture_gui()\n # correct height\n except gexc.HeightException:\n self.display_error(\"height\")\n self.probability_picture_gui()\n # correct steps\n except gexc.StepsException:\n self.display_error(\"steps\")\n self.probability_picture_gui()\n # correct probablity\n except gexc.ProbabilityException:\n self.display_error(\"probability\")\n self.probability_picture_gui()\n # creating white picture, converting it to picture with probability\n white_picture = ImgInp.create_white_picture(wid, height)\n prob_picture = ImgInp.create_probability_picture(white_picture, prob)\n # making a simulation\n self.make_simulation(steps, prob_picture)",
"def make_profile(self, z, station_counts):\n\n # figure with multiline\n TOOLS = \"box_zoom, pan, xwheel_zoom, reset, tap\" \n (max_z, min_z), (min_c, max_c) = compute_profile_axis_ranges(z, station_counts)\n p = figure(plot_width=PROFILE_WIDTH, plot_height=PROFILE_HEIGHT, x_range=(min_c,max_c), y_range=(max_z,min_z),\n x_axis_label=PROFILE_X_LABEL, y_axis_label=PROFILE_Y_LABEL, title=PROFILE_TITLE, tools=TOOLS)\n p.title.text_font_size = TITLE_TEXT_SIZE\n p.multi_line(source=self.profile_datasource, xs='cs', ys='zs', line_width=2, line_alpha=0.5,\n line_color=PROFILE_LINE_COLOR, hover_line_alpha=1, hover_line_color=PROFILE_LINE_COLOR)\n\n hover = HoverTool(tooltips=[\n ('depth', '$y m'),\n ('count', '$x'),\n ('species', '@tax[$index]'),\n ('annotation', '@ann[$index]'),\n ])\n\n p.add_tools(hover)\n\n return p",
"def create_rois(self):\n viewbox = self.im_canvas.getViewBox()\n for i, mw in enumerate(self.mw[:self._a+1]):\n j = i // self._m\n try: \n x, y, w, h, t = self.stats['ROIs'][j] # xc, yc, width, height, threshold\n except IndexError as e:\n error('Not enough ROIs for main windows: %s\\n'%j+str(e))\n self.stats['ROIs'].append([1,1,1,1,1])\n x, y, w, h, t = 1, 1, 1, 1, 1\n if not i % self._m: # for the first window in each set of _m\n try:\n self.rois[j].roi.show()\n self.rois[j].label.show()\n self.rois[j].resize(x, y, w, h)\n self.rois[j].t = t\n except IndexError: # make a new ROI \n self.rois.append(ROI((self.stats['pic_width'], self.stats['pic_height']), x, y, w, h, t, ID=j))\n self.rois[j].roi.sigRegionChangeFinished.connect(self.user_roi) \n self.rois[j].roi.setZValue(10) # make sure the ROI is drawn above the image\n viewbox.addItem(self.rois[j].roi)\n viewbox.addItem(self.rois[j].label)\n mw.roi.setSize((w, w)) # triggers user_roi. Must set width first.\n mw.roi.setPos(x - w//2, y - w//2) # triggers user_roi\n mw.bias_offset_edit.setText(str(self.stats['bias']))\n for j in range(len(self.mw[:self._a+1])//self._m, len(self.rois)):\n self.rois[j].roi.hide() # remove extra ROIs\n self.rois[j].label.hide()",
"def Window(*args, **kwargs):\n return _aui.AuiPaneInfo_Window(*args, **kwargs)",
"def open_display_window(width, height):\n cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)\n cv2.resizeWindow(WINDOW_NAME, width, height)\n cv2.moveWindow(WINDOW_NAME, 0, 0)\n cv2.setWindowTitle(WINDOW_NAME, 'AI-Hygiene-Tracker[ROOM]')",
"def show(self):\n plt.figure(randint(0, 256))\n plt.imshow(self.image,)\n plt.xticks([]), plt.yticks([])\n plt.show()",
"def display_setup(self, window):\n ## TO DO: GET ACTUAL PARAMETERS FOR THESE\n\n scnWidth = self.screen_width\n scnHeight = self.screen_height\n\n window.mouseVisible = False\n\n screen_share = EyeLinkCoreGraphicsPsychoPy(self.tracker, window)\n pylink.openGraphicsEx(screen_share)\n# pylink.setTargetSize(int(surf.get_rect().w/150), int(surf.get_rect().w/500)); \n return window",
"def display_info_creating_pictures(self, with_probability=False):\n text = \"PLEASE TYPE NECESSARY DATA AND PRESS ENTER!\"\n self.display_text(text, 190, 160)\n text = \"ENTER WIDTH [NUMBER]:\"\n self.display_text(text, 50, 250)\n text = \"ENTER HEIGHT [NUMBER]:\"\n self.display_text(text, 50, 350)\n text = \"ENTER STEPS TO DO [NUMBER]:\"\n self.display_text(text, 50, 450)\n if with_probability:\n text = \"ENTER PROBABILITY [NUMBER (0-1)]:\"\n self.display_text(text, 50, 550)\n pg.display.update()",
"def display_settings(self):\n self.reim_edit.setText(str(self.stats['num_reim']))\n self.coim_edit.setText(str(self.stats['num_coim']))\n reset_slot(self.m_edit.editingFinished, self.im_inds_validator, False)\n self.m_edit.setText(str(self.stats['num_images']))\n reset_slot(self.m_edit.editingFinished, self.im_inds_validator, True)\n self.a_edit.setText(str(self.stats['num_saia']//self._m))\n self.a_ind_edit.setText(','.join(map(str, [i%self._m for i in range(self._a)])))\n self.cam_pic_size_changed(self.stats['pic_width'], self.stats['pic_height'])\n self.create_rois()",
"def popup_window_analysis(self):\n window_analysis = tk.Toplevel()\n window_analysis.attributes('-topmost', True)\n window_analysis.title(\"Output Analysis\")\n window_analysis.minsize(570, 530)\n\n #########################\n # Initialize widgets\n #########################\n grid = tk.Frame(window_analysis)\n label_analysis_folder = tk.Label(window_analysis, text=self.label_analysis_folder,\n font=(\"Helvetica\", 13, \"bold\"), justify=tk.LEFT, anchor=\"w\")\n label_analysis_folder_text = tk.Label(window_analysis, text=self.label_analysis_folder_text, justify=tk.LEFT,\n anchor=\"w\")\n label_image = tk.Label(window_analysis, text=self.label_image, font=(\"Helvetica\", 13, \"bold\"),\n justify=tk.LEFT, anchor=\"w\")\n label_image_text = tk.Label(window_analysis, text=self.label_image_text, justify=tk.LEFT,\n anchor=\"w\")\n label_residual = tk.Label(window_analysis, text=self.label_residual, font=(\"Helvetica\", 13, \"bold\"),\n justify=tk.LEFT, anchor=\"w\")\n label_residual_text = tk.Label(window_analysis, text=self.label_residual_text, justify=tk.LEFT,\n anchor=\"w\")\n label_fidelity = tk.Label(window_analysis, text=self.label_fidelity, font=(\"Helvetica\", 13, \"bold\"),\n justify=tk.LEFT, anchor=\"w\")\n label_fidelity_text = tk.Label(window_analysis, text=self.label_fidelity_text, justify=tk.LEFT,\n anchor=\"w\")\n label_matplotlib = tk.Label(window_analysis, text=self.label_matplot, font=(\"Helvetica\", 13, \"bold\"),\n justify=tk.LEFT, anchor=\"w\")\n label_matplotlib_text = tk.Label(window_analysis, text=self.label_matplot_text, justify=tk.LEFT,\n anchor=\"w\")\n\n #########################\n # Layout widgets\n #########################\n label_analysis_folder.pack(fill='x', padx=10, pady=0, expand=True)\n label_analysis_folder_text.pack(fill='x', padx=50, pady=(0, 10), expand=True)\n label_image.pack(fill='x', padx=10, pady=0, expand=True)\n label_image_text.pack(fill='x', padx=50, pady=(0, 10), expand=True)\n label_residual.pack(fill='x', padx=10, pady=0, expand=True)\n label_residual_text.pack(fill='x', padx=50, pady=(0, 10), expand=True)\n label_fidelity.pack(fill='x', padx=10, pady=0, expand=True)\n label_fidelity_text.pack(fill='x', padx=50, pady=(0, 10), expand=True)\n label_matplotlib.pack(fill='x', padx=10, pady=0, expand=True)\n label_matplotlib_text.pack(fill='x', padx=50, pady=(0, 10), expand=True)\n\n #########################\n # Button for closing\n #########################\n button_close = tk.Button(grid, text=\"Close\", command=window_analysis.destroy, height=2, width=6)\n grid.pack(side=\"bottom\", fill=\"x\", expand=False, anchor=\"s\")\n grid.grid_columnconfigure(1, weight=1)\n grid.grid_columnconfigure(99, weight=1)\n button_close.grid(row=1, column=99, sticky=\"E\", padx=15, pady=15)",
"def init_window(self):\n\n self.setWindowTitle(self.video_name)\n self.setWindowIcon(QIcon(\"../resources/diamond_twist.png\"))\n self.setStyleSheet(wndw_style)",
"def plotSignificantRegions(phenotypeIndex,runId=\"\",statId=\"\",window=[50000,50000],res_path=\"/Network/Data/250k/tmp-bvilhjal/snp_res/\"):\n\tpass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Convert OGM property names/values to DB property names/values
|
def map_props_to_db(element, mapping):
property_tuples = []
props = mapping.ogm_properties
for ogm_name, (db_name, data_type) in props.items():
val = getattr(element, ogm_name, None)
if val and isinstance(val, (list, set)):
card = None
for v in val:
metaprops = get_metaprops(v, v.__mapping__)
property_tuples.append((card, db_name, data_type.to_db(
v.value), metaprops))
card = v.cardinality
else:
if hasattr(val, '__mapping__'):
metaprops = get_metaprops(val, val.__mapping__)
val = val.value
else:
metaprops = None
property_tuples.append((None, db_name, data_type.to_db(val),
metaprops))
return property_tuples
|
[
"def translate_db_fields(cls, data):\n dst_data = data.copy()\n for name, col in cls._columns.items():\n key = col.db_field or name\n if key in dst_data:\n dst_data[name] = dst_data.pop(key)\n\n return dst_data",
"def test_to_dict_hybrid_property(self):\r\n young = self.Person(name=u'John', age=15)\r\n old = self.Person(name=u'Sally', age=25)\r\n self.session.commit()\r\n\r\n assert to_dict(young)['is_minor']\r\n assert not to_dict(old)['is_minor']",
"def encode_properties(properties):\n\n # Construct a list of property assignment strings. The RHS is\n # URL-quoted. \n result = map(lambda p: \"%s=%s\" % (p[0], urllib.quote_plus(p[1])),\n properties.items())\n # Join them into a comma-delimited list.\n return string.join(result, \",\")",
"def model_to_dict(sqlalchemy_object):\n fields_arr = [prop.key for prop in\n class_mapper(sqlalchemy_object.__class__).iterate_properties\n if isinstance(prop, ColumnProperty)]\n _dict = {}\n for key in fields_arr:\n temp = getattr(sqlalchemy_object, key)\n if isinstance(temp, datetime.datetime):\n _dict[key] = str(temp)\n else:\n _dict[key] = temp\n return _dict",
"def db_format(obj):\n return {\n 'integer': obj['integer'],\n 'small_integer': obj['small_integer'],\n 'ip': obj['ip'],\n 'enum': obj['protocol'],\n 'emoji_text': obj['emoji_text'],\n 'creation_date': obj.get('creation_date', datetime.utcnow()),\n 'modified_date': obj.get('modified_date', None)\n }",
"def get_properties(self):\n key = (self.user_name, self.bucket_name, \"property_name\")\n data = yield get_relation(key)\n returnValue(dict([(x, ujson.loads(data[x])) for x in data]))",
"def create_property(property_info):\n return insert('property', property_info.keys(), property_info.values())",
"def to_mongodb(report: FormulaReport) -> Dict[str, Any]:\n return FormulaReport.to_json(report)",
"def make_properties(self, properties_dict):\n import uno\n props = []\n for key in properties_dict:\n prop = uno.createUnoStruct(\"com.sun.star.beans.PropertyValue\")\n prop.Name = key\n prop.Value = properties_dict[key]\n props.append(prop)\n return tuple(props)",
"def entity_to_mongo(entity):\n if entity:\n data = {}\n for key, val in entity.__values__.iteritems():\n field = entity.__fields__[key]\n\n if isinstance(val, FlexEntity):\n val = val.flatten()\n elif isinstance(val, Entity):\n val = entity_to_mongo(val)\n else:\n val = field.flatten(val)\n\n if key == 'id':\n key = '_id'\n\n data[key] = val\n\n return data",
"def generatePropertyAccessorNameList(property):\n from Products.ERP5Type.Utils import UpperCase\n res=[]\n cased_id = UpperCase(property['id'])\n for hidden in ('', '_'):\n for getset in ('get', 'set', 'has'): # 'is',\n for default in ('', 'Default', 'Translated'):\n for value in ('', 'Value', 'TranslationDomain'):\n for multivalued in ('', 'List', 'Set'):\n res.append('%s%s%s%s%s%s' % (hidden, getset, default, cased_id, value, multivalued))\n if property.has_key('acquired_property_id') and \\\n property['type'] == 'content':\n for aq_property_id in property['acquired_property_id']:\n cased_id = UpperCase('%s_%s' % (property['id'], aq_property_id))\n for hidden in ('', '_'):\n for getset in ('get', 'set'):\n for default in ('', 'Default'):\n for multivalued in ('', 'List'):\n res.append('%s%s%s%s%s' % (hidden, getset, default, cased_id, multivalued))\n return res",
"def to_db(self,**kwds):\n return(sub_to_db(self,**kwds))",
"def convert_sqlalchemy_todict(obj):\n return {c.key: getattr(obj, c.key)\n for c in inspect(obj).mapper.column_attrs}",
"def _get_fieldmap(self):\r\n field_map = {\r\n 'source_entity__string': 'key',\r\n 'source_entity__context': 'context',\r\n 'string': 'translation',\r\n 'reviewed': 'reviewed',\r\n 'source_entity__pluralized': 'pluralized',\r\n 'wordcount': 'wordcount',\r\n 'last_update': 'last_update',\r\n 'user__username': 'user',\r\n 'source_entity__position': 'position',\r\n 'source_entity__occurrences': 'occurrences',\r\n }\r\n return field_map",
"def map_to_es(self):\n full_name = self.query_path\n return set_default(\n {\n c.names[full_name]: c.es_column\n for k, cs in self.lookup.items()\n # if startswith_field(k, full_name)\n for c in cs if c.jx_type not in STRUCT\n },\n {\n c.names[\".\"]: c.es_column\n for k, cs in self.lookup.items()\n # if startswith_field(k, full_name)\n for c in cs if c.jx_type not in STRUCT\n }\n )",
"def _convertListProperty(model_property, entity):\n return [\n _newKey(old_key)\n for old_key in model_property.get_value_for_datastore(entity) or []]",
"def toObject(self):\r\n obj = {}\r\n for item in self._column_list:\r\n obj[item] = getattr(self,item)\r\n return obj",
"def known_properties() -> set[str]:\n return set(session.session.execute(\n select(Property.name).distinct()\n ).scalars())",
"def convertPropToParams(prop):\n proplist = []\n for key, val in prop.iteritems():\n proplist.append(str('--' + key))\n proplist.append(val)\n return proplist"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Map a vertex returned by DB to OGM vertex
|
def map_vertex_to_ogm(result, props, element, *, mapping=None):
props.pop('id')
label = props.pop('label')
for db_name, value in props.items():
metaprops = []
if len(value) > 1:
values = []
for v in value:
if isinstance(v, dict):
val = v.pop('value')
v.pop('key')
vid = v.pop('id')
if v:
v['id'] = vid
metaprops.append((val, v))
values.append(val)
else:
values.append(v)
value = values
else:
value = value[0]
if isinstance(value, dict):
val = value.pop('value')
value.pop('key')
vid = value.pop('id')
if value:
value['id'] = vid
metaprops.append((val, value))
value = val
name, data_type = mapping.db_properties.get(db_name, (db_name, None))
if data_type:
value = data_type.to_ogm(value)
setattr(element, name, value)
if metaprops:
vert_prop = getattr(element, name)
if hasattr(vert_prop, 'mapper_func'):
# Temporary hack for managers
vert_prop.mapper_func(metaprops, vert_prop)
else:
vert_prop.__mapping__.mapper_func(metaprops, vert_prop)
setattr(element, '__label__', label)
setattr(element, 'id', result.id)
return element
|
[
"def get_vertex(self, id_num):",
"def graph_vertex( g, i, add_if_necessary = False ):\n if add_if_necessary and i not in g.id_to_vertex:\n v = g.add_vertex()\n g.id_to_vertex[ i ] = v\n g.vertex_properties[ 'vertex_id' ][ v ] = i\n return g.id_to_vertex[ i ]",
"def get_or_create_vertex(self, label=None, **kwargs):",
"def vertex():\n return Vertex('v1')",
"def get_out_vertex(self):",
"def get_vertex(self, v_id):\n pass",
"def map_edge_to_ogm(result, props, element, *, mapping=None):\n props.pop('id')\n label = props.pop('label')\n for db_name, value in props.items():\n name, data_type = mapping.db_properties.get(db_name, (db_name, None))\n if data_type:\n value = data_type.to_ogm(value)\n setattr(element, name, value)\n setattr(element, '__label__', label)\n setattr(element, 'id', result.id)\n # Currently not included in graphson\n # setattr(element.source, '__label__', result.outV.label)\n # setattr(element.target, '__label__', result.inV.label)\n sid = result.outV.id\n esid = getattr(element.source, 'id', None)\n if _check_id(sid, esid):\n from goblin.element import GenericVertex\n element.source = GenericVertex()\n tid = result.inV.id\n etid = getattr(element.target, 'id', None)\n if _check_id(tid, etid):\n from goblin.element import GenericVertex\n element.target = GenericVertex()\n setattr(element.source, 'id', sid)\n setattr(element.target, 'id', tid)\n return element",
"def remap_vertex(vertex, symmetry):\n assert vertex >= 0 and vertex < 361\n x = vertex % 19\n y = vertex // 19\n if symmetry >= 4:\n x, y = y, x\n symmetry -= 4\n if symmetry == 1 or symmetry == 3:\n x = 19 - x - 1\n if symmetry == 2 or symmetry == 3:\n y = 19 - y - 1\n return y * 19 + x",
"def vertex_to_dict(graph, vertex):\n properties = graph.V(vertex).valueMap().toList()[0]\n for key in properties.keys():\n properties[key] = properties.get(key)[0]\n return {\n 'id': vertex.id,\n 'label': vertex.label,\n 'properties': properties\n }",
"def vertexId(*args, **kwargs):\n \n pass",
"def append_vertex(self, vertex):",
"def add_vertex(self, vertex):\n if vertex not in self.__graph_dict:\n self.__graph_dict[vertex] = []\n self.__directed_dict[vertex] = []",
"def add_vertex(self, key):\n\n if key in self.vert_dict:\n print(f'Vertex {key} already exists')\n return\n\n # create a new vertex\n new_vertex = Vertex(key)\n self.vert_dict[key] = new_vertex\n self.num_vertices += 1\n\n return self.vert_dict[key]",
"def vertexIndex(*args, **kwargs):\n \n pass",
"def add_vertex(self, vertex):\n if vertex.label not in self.vertices():\n self.__graph_dict[vertex.label] = vertex",
"def get_vertex(self, ID):\r\n return self.adj_list.get(ID)",
"def add_vertex(self):\n u = self.g.add_vertex()\n return u",
"def create_vertex(self, key):\n new_vertex = SpVertex(key)\n self._vertex_list[key] = new_vertex\n return new_vertex",
"def getVertex(self, bMFace, bIndex):\n\t\tvertex = Vertex(self.bMesh, bMFace, bIndex, len(self.vertexList), self.fixUpAxis, self.armatureExporter)\n\t\tif self.vertexDict.has_key(bMFace.v[bIndex].index):\n\t\t\t# check Ogre vertices for that Blender vertex\n\t\t\tvertexList = self.vertexDict[bMFace.v[bIndex].index]\n\t\t\tfound = 0\n\t\t\tlistIndex = 0\n\t\t\twhile (not(found) and (listIndex < len(vertexList))):\n\t\t\t\tif (vertex == vertexList[listIndex]):\n\t\t\t\t\tvertex = vertexList[listIndex]\n\t\t\t\t\tfound = 1\n\t\t\t\tlistIndex = listIndex + 1\n\t\t\tif not(found):\n\t\t\t\t# create Ogre vertex for that Blender vertex\n\t\t\t\tself.vertexDict[bMFace.v[bIndex].index].append(vertex)\n\t\t\t\tself.vertexList.append(vertex)\n\t\telse:\n\t\t\t# create Ogre vertex for that Blender vertex\n\t\t\tself.vertexDict[bMFace.v[bIndex].index] = [vertex]\n\t\t\tself.vertexList.append(vertex)\n\t\treturn vertex"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Map an edge returned by DB to OGM edge
|
def map_edge_to_ogm(result, props, element, *, mapping=None):
props.pop('id')
label = props.pop('label')
for db_name, value in props.items():
name, data_type = mapping.db_properties.get(db_name, (db_name, None))
if data_type:
value = data_type.to_ogm(value)
setattr(element, name, value)
setattr(element, '__label__', label)
setattr(element, 'id', result.id)
# Currently not included in graphson
# setattr(element.source, '__label__', result.outV.label)
# setattr(element.target, '__label__', result.inV.label)
sid = result.outV.id
esid = getattr(element.source, 'id', None)
if _check_id(sid, esid):
from goblin.element import GenericVertex
element.source = GenericVertex()
tid = result.inV.id
etid = getattr(element.target, 'id', None)
if _check_id(tid, etid):
from goblin.element import GenericVertex
element.target = GenericVertex()
setattr(element.source, 'id', sid)
setattr(element.target, 'id', tid)
return element
|
[
"def edge_to_dict(graph, edge):\n e_id = edge.id.get('@value').get('relationId')\n properties = graph.E(e_id).valueMap().toList()[0]\n return {\n 'id': e_id,\n 'label': edge.label,\n 'properties': properties\n }",
"def remapped_edge(self, remap, v0, v1):\n self_v0 = remap[v0]\n self_v1 = remap[v1]\n eid = self.edge_tuple(self_v0, self_v1)\n return eid",
"def load_edge(self, edge: Relationship):\n\n edge_key = str(uuid.uuid4())\n edge_subject = edge.start\n edge_predicate = edge.properties\n edge_object = edge.end\n\n subject_id = edge_subject['id'] if 'id' in edge_subject else edge_subject.id\n object_id = edge_object['id'] if 'id' in edge_object else edge_object.id\n\n attributes = {}\n\n for key, value in edge_predicate.items():\n attributes[key] = value\n\n # TODO: Is this code residual from attempting to adapt to several drivers?\n if 'subject' not in attributes:\n attributes['subject'] = subject_id\n if 'object' not in attributes:\n attributes['object'] = object_id\n if 'type' not in attributes:\n attributes['type'] = edge.type\n if 'predicate' not in attributes:\n attributes['predicate'] = attributes['type'] if 'type' in attributes else edge.type\n\n if not self.graph.has_node(subject_id):\n self.load_node(edge_subject)\n\n if not self.graph.has_node(object_id):\n self.load_node(edge_object)\n\n self.graph.add_edge(\n subject_id,\n object_id,\n edge_key,\n attr_dict=attributes\n )",
"def db_visit_edge(self):\n kwargs = {\n '_key': '{}:{}'.format(self.domain, self.start),\n '_from': 'traffic/{}:{}'.format(self.domain, self.start),\n '_to': 'domain/{}'.format(self.domain),\n }\n\n return kwargs",
"def render_edge(self, e):\n\n return Element('edge', {'from': e.from_node.id,\n 'to': e.to_node.id,\n 'xml:id': e.id})",
"def fetch_edges_from_ori_graph(self, edges: list) -> list:\n ori_edge = []\n for edge in edges:\n ori_edge.append((edge[0], edge[1], self.ori_G[edge[0]][edge[1]]))\n return ori_edge",
"def get_edge(edge, key):\n\n try:\n res = facade.get_edge(edge, key)\n return res, 200\n except gmap_exc.EdgeNotExist as err:\n return err.message, 404\n except gmap_exc.DocumentNotExist as err:\n return err.message, 404\n except Exception as err:\n return str(err), 500",
"def create_edge(self, join_info_joins, join_path_to_entity_field):\n edge = collections.OrderedDict()\n column_to = join_info_joins[\"joins\"][0][\"to\"]\n column_from = join_info_joins[\"joins\"][0][\"from\"]\n edge[\"source_entity\"], edge[\"source_field\"] = join_path_to_entity_field[\n column_to\n ]\n (\n edge[\"destination_entity\"],\n edge[\"destination_field\"],\n ) = join_path_to_entity_field[column_from]\n edge[\"relationship\"] = join_info_joins[\"relationship\"]\n return edge",
"def reverse_edge(\n edge: EdgeData,\n invert: Optional[Iterable[str]] = None,\n flip: Optional[Iterable[str]] = None,\n geom_column: str = \"geom\",\n) -> EdgeData:\n edge_copy = copy.deepcopy(edge)\n rev_coords = list(reversed(edge_copy[geom_column][\"coordinates\"]))\n edge_copy[geom_column][\"coordinates\"] = rev_coords\n if invert is not None:\n for key in invert:\n if key in edge_copy:\n edge_copy[key] = edge_copy[key] * -1\n if flip is not None:\n for key in flip:\n if key in edge:\n edge_copy[key] = type(edge_copy[key])(not edge_copy[key])\n return edge_copy",
"def create_edge(nodes, edge):\n projection_type = edge.projection_type.kind\n source = nodes[edge.input.id]\n target = nodes[edge.output.id]\n return nest.Connect(source, target, projection_type)",
"def add_edge(self, edge):\n edge = set(edge)\n (vertex1, vertex2) = tuple(edge)\n if vertex1 in self.__graph_dict:\n self.__graph",
"def EdgePropertySet(edge=None):\n if edge is None:\n return {PROV2NEO_EDGE}\n label = (PROV2NEO_LABEL, edge_label(edge))\n properties = [*edge.attributes[2:], *edge.extra_attributes]\n return {PROV2NEO_EDGE, label, *properties}",
"def add_edge(self, edge):\n\t\tedge = set(edge)\n\t\t(vertex, neighbor) = tuple(edge)\n\t\tif vertex not in self.g:\n\t\t\tself.g[vertex] = [neighbor]\n\t\telse:\n\t\t\tself.g[vertex].append(neighbor)\n\t\tprint \"Added Edge : {}\".format(edge)",
"def add_edge(self, edge):\n edge = set(edge)\n (label1, label2) = tuple(edge)\n if label1 in self.vertices() and label2 in self.vertices():\n vertex1 = self[label1]\n vertex2 = self[label2]\n vertex1.add_edge(vertex2)\n vertex2.add_edge(vertex1) # assume undirected",
"def test_graph_orm_edge(self):\n\n for e, v in self.graph.edges.items():\n label = v.get('label')\n if label == 'bi':\n self.assertTrue(hasattr(self.graph.getedges(e), 'get_label'))\n self.assertEqual(self.graph.getedges(e).get_label(), \"bi class\")\n elif label == 'mo':\n self.assertTrue(hasattr(self.graph.getedges(e), 'get_label'))\n self.assertEqual(self.graph.getedges(e).get_label(), \"mo class\")",
"def _adj_to_edge(df):\n return nx.to_pandas_edgelist(nx.from_pandas_adjacency(df))",
"def append_edge(self, edge):",
"def edges(self):\n ret = []\n for layer,edges in self._room.getLayers().items():\n cur = []\n for direction,edge in edges.items():\n cur.append((str(direction), Lua_Map_Edge(self._lua, self._room, direction, edge)))\n ret.append((str(layer), self._lua.lua.table(**dict(cur))))\n\n return self._lua.lua.table(**dict(ret))",
"def edge_features(self):"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Get a user's favourite number and store it in a json file.
|
def get_fav_number():
number = int(input("What is your favourite number? "))
filename = 'c10_11_number.json'
with open(filename, 'w') as f:
json.dump(number, f)
return number
|
[
"def get_stored_number():\n filename = 'fav_num.json'\n try:\n with open(filename) as f_obj:\n fav_num = json.load(f_obj)\n except FileNotFoundError:\n return None\n else:\n return fav_num",
"def print_fav_number():\n filename = 'c10_11_number.json'\n with open(filename, 'r') as f:\n number_loaded = json.load(f)\n print(f\"I know your favourite number! It's {number_loaded}\")",
"def favorite_number():\r\n\tfavorite_number = load_favorite_number()\r\n\tif favorite_number:\r\n\t\tprint(f\"I know your favorite number, it is {favorite_number}!\")\r\n\telse:\r\n\t\tfavorite_number = get_favorite_number()\r\n\t\tprint(f\"I'll remember your favorite number of {favorite_number} when you come back.\")",
"def get_fav(obj, user):\n fav_obj = Favorites.objects.get_favorite(user, obj)\n return fav_obj",
"def get(self, id):\n user_favourites = Favourite.query.filter_by(user_id=id).all()\n if user_favourites is None:\n return { 'message': 'User has no Favourites'}, 404\n\n return favourites_schema.dump(user_favourites)",
"def favorite( self, favorite ) :\n return self.client.ticketfav( self.project, self, favorite )",
"def add_favourite():\n user_id = mongo.db.users.find_one({\"username\": session[\"user\"]})[\"_id\"]\n user = mongo.db.users.find_one({\"username\": session[\"user\"]})\n\n # To find if check if user already has created favourites,\n # and if wine exists in favourites\n if \"favourites\" in user:\n existing_favourites = user[\"favourites\"]\n if existing_favourites:\n for favourite in existing_favourites:\n if favourite[\"wine_id\"] == request.form.get(\"wine_id\"):\n flash(\"This wine was was already\"\n \" added to your favourites list\")\n return redirect(url_for('view_wines'))\n if request.method == \"POST\":\n # To add the wine to user favourites\n favourite = {\n \"wine_id\": request.form.get(\"wine_id\"),\n \"wine_name\": request.form.get(\"wine_name\").lower(),\n \"grape\": request.form.get(\"grape\").lower(),\n \"vintage\": request.form.get(\"vintage\").lower(),\n \"country\": request.form.get(\"country\").lower(),\n }\n mongo.db.users.update_one({\"_id\": ObjectId(user_id)},\n {\"$push\": {\"favourites\": favourite}})\n\n flash(\"Wine is now added to your favourites list\")\n return redirect(url_for('view_wines'))",
"def new_favourite():\n\n user_id = int(request.args['user_id'])\n photo_id = int(request.args['photo_id'])\n\n # check if photo is already in favourites\n for post in get_favourites(user_id):\n if post[\"photo_id\"] == photo_id:\n return \"NoSucces\"\n\n # add favourite into database\n add_favourite(user_id, photo_id)\n\n return \"Succes\"",
"def check_favorite(user, obj):\n return get_fav(obj, user)",
"def get_user_favorites(self):\n\n return self.parse_raw_response(requests_util.run_request('get', self.API_BASE_URL + '/user/favorites',\n headers=self.__get_header_with_auth()))",
"def get_favme(self):\r\n\r\n page = 'https://www.fanfiction.net/stats/user.php'\r\n payload = {'action': 'favs'}\r\n tree = self.pgetter.get_page(page, payload)\r\n return tree",
"def favorite_count(self) -> int:\n return int(self.statistics.get('favoriteCount'))",
"def get_favorites(self, user_id):\n request_url = API_URL + 'users/%d/favorites' % user_id\n r = requests.get(url=request_url, cookies={'jwt_token': self.jwt})\n\n if r.status_code != 200:\n raise ValueError(r.text)\n\n json_response = r.json()\n json_favorite_decks = [fav for fav in json_response['favorites']\n if 'deck' in fav]\n favorites = []\n try:\n for fav in json_favorite_decks:\n current_favorite = json_converter.json_to_favorite(fav)\n favorites.append(current_favorite)\n except KeyError as ke:\n raise Exception(\"Unexpected JSON format:\\n%s\" % ke)\n\n return favorites",
"def get_favorites(user_id=None, limit=3):\n if 'limit' in request.args:\n limit = int(request.args.get('limit'))\n if 'user_id' in request.args:\n try:\n user_id = int(request.args.get('user_id'))\n except ValueError as e:\n return abort(400)\n else:\n user_id = login_session.get('user_id', None)\n if user_id is not None:\n recs = app.q_Rating().filter_by(user_id=user_id, rating=1)\n else:\n return abort(400)\n count = recs.count()\n # Make a list of the serializable version of each rec.\n recs_json = [each.item.sdict for each in recs]\n # Return a random sampling of the items up to the limit.\n return jsonify(items=sample(recs_json, min(limit, count)))",
"def favorite_review():\n\n review_id = request.form.get('reviewID')\n asin = request.form.get('asin')\n user_id = session['user']['id']\n\n user = User.query.get(user_id)\n\n # Adds or removes a product from a user's favorites\n favorite_status = user.update_favorite_review(review_id)\n\n # If the user favorites a review, automatically favorite the product\n if favorite_status == \"Favorited\":\n user.add_favorite_product_from_review(asin)\n\n return favorite_status",
"def favorite( self, favorite ) :\n return self.client.wikifav( self.project, self, favorite )",
"def _setFavoriteCount(self):\n favoriteCount = 0\n if \"retweeted_status\" in self._tweet:\n favoriteCount = self._tweet[\"retweeted_status\"][\"favorite_count\"]\n \n return favoriteCount",
"def ft_favorite(url):\r\n\ttry:\r\n\t\tids = url.split(\"/\")\r\n\t\ttwitter.create_favorite(id=ids[-1])\r\n\t\tsleep(1)\r\n\texcept Exception as e:\r\n\t\tprint(e)\r\n\texcept TwythonError as e:\r\n\t\tprint(e)",
"def add_favorite(self, user_id, deck_id):\n request_url = API_URL + 'users/%d/favorites' % user_id\n request_payload = {'deckId': deck_id}\n r = requests.post(url=request_url, json=request_payload,\n cookies={'jwt_token': self.jwt})\n\n json_response = r.json()\n added_favorite = json_converter.json_to_favorite(json_response)\n\n return added_favorite"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Read a user's favourite number and print it.
|
def print_fav_number():
filename = 'c10_11_number.json'
with open(filename, 'r') as f:
number_loaded = json.load(f)
print(f"I know your favourite number! It's {number_loaded}")
|
[
"def favorite_number():\r\n\tfavorite_number = load_favorite_number()\r\n\tif favorite_number:\r\n\t\tprint(f\"I know your favorite number, it is {favorite_number}!\")\r\n\telse:\r\n\t\tfavorite_number = get_favorite_number()\r\n\t\tprint(f\"I'll remember your favorite number of {favorite_number} when you come back.\")",
"def get_fav_number():\n number = int(input(\"What is your favourite number? \"))\n filename = 'c10_11_number.json'\n with open(filename, 'w') as f:\n json.dump(number, f)\n return number",
"def display_values(self, response, favourites):\n for r in response:\n print r[0]\n print r[1]\n\n print \"Favourites:\"\n\n for f in favourites:\n print f['number']\n\n self.collect_fizzbuzz_parameters()",
"def get_stored_number():\n filename = 'fav_num.json'\n try:\n with open(filename) as f_obj:\n fav_num = json.load(f_obj)\n except FileNotFoundError:\n return None\n else:\n return fav_num",
"def get_favme(self):\r\n\r\n page = 'https://www.fanfiction.net/stats/user.php'\r\n payload = {'action': 'favs'}\r\n tree = self.pgetter.get_page(page, payload)\r\n return tree",
"def favorite_book(title):\n\tprint(\"One of my favorite books is \" + title.title())",
"def favorite_book(book_title):\n print(f\"One of my favorite books is {book_title}.\")",
"def favorite_book(book):\n print(f\"My fav book is {book.title()}\")",
"def pick_new_number(self):\n self.fav_number = random.randrange(101)",
"def favorites():\n\n # Select the current user's favorited fish\n rows = db.execute(\"SELECT fishname FROM favorites WHERE user_id = :user_id\",\n user_id=session[\"user_id\"])\n\n # If GET, show the users favotited fish\n if request.method == \"GET\":\n\n return render_template(\"favorites.html\", rows=rows)\n\n # If POST, render the selected fish's HTML page\n else:\n\n fish = request.form.get(\"fishname\")\n return render_template(\"fish.html\", fish=fish, verify=1)",
"def show_favourites(update: Update, context: CallbackContext):\n bot_typing(context.bot, update.message.chat_id)\n db.execute('SELECT DISTINCT * FROM users WHERE user_id=%s', (update.message.chat_id,))\n favourites = db.fetchall()\n if favourites:\n for favourite in favourites:\n message = '<b>{}\\nBus Stop Code: /{}</b>'.format(favourite[3], favourite[1])\n keyboard = [\n [InlineKeyboardButton('Select', callback_data='select_favourite'),\n InlineKeyboardButton('Delete', callback_data='delete_favourite')],\n [InlineKeyboardButton('Rename', callback_data='rename_bus_stop')]\n ]\n reply_markup = InlineKeyboardMarkup(keyboard)\n update.message.reply_text(message, reply_markup=reply_markup, parse_mode=ParseMode.HTML)\n else:\n update.message.reply_text(no_fav_msg())",
"def favorite( self, favorite ) :\n return self.client.ticketfav( self.project, self, favorite )",
"def favorite_count(self) -> int:\n block = self.soup.find(\"div\", class_=\"submission-artist-stats\").text.split('|')\n return int(block[1])",
"def showFavorites(request):\n\ttemplate = 'pages/favorites.html'\n\n\tfavoritesList = []\n\n\tif request.method == 'GET':\n\t\t\"\"\"collecte the user profil\"\"\"\n\t\tuser = request.user\n\t\tprofil = search_profil(user.username)\n\n\t\tprofil = profil[0]\n\n\t\t\"\"\"collect user favorite in a list\"\"\"\n\t\tfavoritesList = profil.favorites.all()\n\n\treturn render(request, template, {'detailForm': DetailForm(),\n\t\t'searchForm': SearchForm(),\n\t\t'favoritesList': favoritesList})",
"def check_favorite(user, obj):\n return get_fav(obj, user)",
"def favorite_review():\n\n review_id = request.form.get('reviewID')\n asin = request.form.get('asin')\n user_id = session['user']['id']\n\n user = User.query.get(user_id)\n\n # Adds or removes a product from a user's favorites\n favorite_status = user.update_favorite_review(review_id)\n\n # If the user favorites a review, automatically favorite the product\n if favorite_status == \"Favorited\":\n user.add_favorite_product_from_review(asin)\n\n return favorite_status",
"def favorite_count(self) -> int:\n return int(self.statistics.get('favoriteCount'))",
"def display_book(book_name):\n\tprint(\"One of my favourite books is \" + book_name + \".\")",
"def userFavorites():\n\n # Logged in user's username\n username = db.execute(\"SELECT username FROM users WHERE id=?\", session.get(\"user_id\"))[0]['username']\n\n # User-reached route via GET\n return redirect(\"/favorites/\" + username)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns how many numbers lie within `maximum` and `minimum` in a given `row`
|
def howmany_within_range2(i, row, minimum, maximum):
count = 0
for n in row:
if minimum <= n <= maximum:
count = count + 1
return (i, count)
|
[
"def max_connected_cells(matrix):\n max_region = 0\n for row in range(len(matrix)):\n for col in range(len(matrix[0])):\n if matrix[row][col]:\n count = count_cells_in_region(matrix, row, col)\n if count > max_region:\n max_region = count\n return max_region",
"def count_cells_in_region(matrix, row, col):\n cell_count = 0\n m = len(matrix)\n n = len(matrix[0])\n frontier = [[row, col]]\n while frontier:\n point = frontier.pop()\n x, y = point[0], point[1]\n if matrix[x][y]:\n matrix[x][y] = 0\n cell_count += 1\n for i in range(x - 1, x + 2):\n if i >= 0 and i < m:\n for j in range(y - 1, y + 2):\n if j >= 0 and j < n and matrix[i][j]:\n matrix[i][j] = 0\n cell_count += 1\n frontier.append([i, j])\n return cell_count",
"def number_cells_within_range(query_cell, cells, signaling_range):\n\n if len(cells) == 0:\n return 0\n else:\n query_cell_position = np.array([query_cell.position])\n cell_positions = np.array([cell.position for cell in cells])\n query_cell_distances = cdist(query_cell_position, cell_positions).ravel()\n return len(query_cell_distances[query_cell_distances < signaling_range])",
"def calc_bound(matrix: list):\n # deep copy\n m = [row.copy() for row in matrix]\n bound = 0\n\n # check that if each row has a \"0\"(at least)\n for row_index, row in enumerate(m):\n if 0 not in row:\n min_item = min(row)\n bound += min_item\n # each item in current row minus the min_item\n for item_index, item in enumerate(m[row_index]):\n m[row_index][item_index] -= min_item\n\n # check that if each column has a \"0\"(at least)\n for col_index in range(len(m)):\n cur_col = [row[col_index] for row in m]\n if 0 not in cur_col:\n min_item = min(cur_col)\n bound += min_item\n # each item in cur_col minus the min_item\n for row_index, row in enumerate(m):\n m[row_index][col_index] -= min_item\n return bound, m",
"def count_islands(rows,columns,tiles):\n numOfIslands = 0\n for i in range(0,rows):\n for j in range(0,columns):\n if tiles[i][j] == True:\n numOfIslands += 1\n find_all_parts_of_island(rows,columns,i,j,tiles)\n return numOfIslands",
"def maxRange(self):\n if len(self._cells) > 0:\n maxRow = self._cells[0].row\n maxCol = self._cells[0].col\n for element in self._cells:\n if element.row > maxRow:\n maxRow = element.row\n if element.col > maxCol:\n maxCol = element.col\n maxRow = int(maxRow)\n maxCol = int(maxCol)\n return (maxRow,maxCol)\n else:\n return (0,0)",
"def count_occurrences(curr_vals, num, x, y):\r\n\r\n square = get_square(curr_vals, x, y)\r\n\r\n col_count = np.count_nonzero(curr_vals[:, x // 60] == num)\r\n row_count = np.count_nonzero(curr_vals[y // 60, :] == num)\r\n sq_count = np.count_nonzero(square == num)\r\n\r\n return (col_count, row_count, sq_count)",
"def num_mines(self) -> int:\n count = 0\n for row in self:\n for cell in row:\n if cell.mine:\n count += 1\n return count",
"def count_in_range(li, low, high):\n\tcount = 0\n\tfor i in range(0, len(li)):\n\t\tif low <= li[i] < high:\n\t\t\tcount = count + 1\n\t\telse:\n\t\t\tpass\n\treturn count",
"def count_above_mean(x):\n\tm = mean(x)\n\treturn np.where(x>m)[0].size",
"def count_less_than_median(arr, range_start, range_end):\n median = (range_start + range_end) / 2\n count = 0\n for val in arr:\n if val <= median:\n count += 1\n return count",
"def range_span(ranges):\n if not ranges:\n return 0\n\n ranges.sort()\n ans = 0\n for seq, lt in groupby(ranges, key=lambda x: x[0]):\n lt = list(lt)\n ans += max(max(lt)[1:]) - min(min(lt)[1:]) + 1\n return ans",
"def count_below_mean(x):\n\tm = mean(x)\n\treturn np.where(x<m)[0].size",
"def computeNumberBuff(numrows, buffrows, overlap):\n nbuff = _computeNbuff(numrows, buffrows, overlap)\n niter = 1 + int(nbuff)\n totalrows = niter * buffrows\n # We need to account for the case where the number of\n # iterations ends up being greater than needed due to the\n # overlap.\n #if totalrows > numrows: niter -= 1\n lastbuff = numrows - (niter*(buffrows-overlap))\n\n if lastbuff < overlap+1 and nbuff > 1:\n good = False\n while not good:\n if buffrows > overlap+1:\n buffrows -= 1\n\n nbuff = _computeNbuff(numrows, buffrows, overlap)\n niter = 1 + int(nbuff)\n totalrows = niter * (buffrows - overlap)\n lastbuff = numrows - (niter*(buffrows-overlap))\n if lastbuff > overlap + 1:\n good = True\n else:\n good = True\n return niter,buffrows",
"def count_adjacent_mines(self, *, column, row):\r\n num_adjacent_mines = 0\r\n for i in range(-1, 2):\r\n for j in range(-1, 2):\r\n if i == 0 and j == 0:\r\n continue\r\n test_column = column + i\r\n test_row = row + j\r\n # Make sure that the tile we're checking is an actual tile\r\n if ((test_column < 0 or test_column >= self.columns) or\r\n (test_row < 0 or test_row >= self.rows)):\r\n continue\r\n test_tile = str(test_column) + ',' + str(test_row)\r\n if self.tiles[test_tile].is_mine:\r\n num_adjacent_mines += 1\r\n logging.debug(f'The tile at column {column}, row {row} has '\r\n f'{num_adjacent_mines} adjacent mine(s)')\r\n return num_adjacent_mines",
"def countAdjacentMines(board, x, y):\n adjacentMines = 0\n for newX in range(x - 1, x + 2):\n if 0 <= newX < GRID_SIZE:\n for newY in range(y - 1, y + 2):\n # print(newX, newY)\n if 0 <= newY < GRID_SIZE and board[newY][newX] == \"X\":\n # print(\"mines\")\n adjacentMines += 1\n return adjacentMines",
"def count(divisions):\n N = float(divisions)\n occupied = sum(any((x >= xmin + i * L / N) * (x < xmin + (i + 1) * L / N) *\n (p >= pmin + j * L / N) * (p < pmin + (j + 1) * L / N))\n for i in range(divisions) for j in range(divisions))\n return occupied",
"def n_less_than(self, row_zb: int, col_zb: int) -> int:\n return len(list(self.iter_cells_less_than(row_zb, col_zb)))",
"def test_returns_number_of_islands_in_large_matrix(self):\n matrix = [[1, 0, 0, 0, 0, 1],\n [0, 1, 1, 0, 0, 1],\n [1, 0, 1, 0, 0, 1],\n [0, 1, 0, 0, 1, 0],\n [0, 1, 0, 0, 0, 0],\n [0, 0, 1, 0, 0, 0]]\n result = island_counter(matrix)\n self.assertEqual(result, 7)",
"def minimum_rows(boxes, minimum):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the memory mappings of the currentlyrunning process using PANDA's operating system introspection.
|
def get_mappings(self):
l.debug("getting the vmmap of the concrete process")
mapping_output = self.panda.get_mappings(self.panda.get_cpu())
vmmap = []
for mapping in mapping_output:
if mapping.file == self.panda.ffi.NULL:
continue # Unknown name
filename = self.panda.ffi.string(mapping.file).decode()
vmmap.append(MemoryMap(mapping.base, mapping.base + mapping.size, mapping.offset,
filename))
return vmmap
|
[
"def memory_maps(pid: int) -> Iterable[MemoryMapRegion]:\n\n maps = []\n\n # if we can't access the process' memory, this will\n # raise a PermissionError\n with open('/proc/{}/maps'.format(pid), 'rb') as f:\n for line in f:\n range, perms, *_ = line.split()\n start, end = [int(addr, 16) for addr in range.decode().split('-')]\n region = MemoryMapRegion(start, end, perms)\n maps.append(region)\n\n return maps",
"def memory_map(self) -> Dict[str, OMMemoryRegion]:\n return self._memorymap",
"def MemoryInfo(self):\r\n\t\tres = {}\r\n\t\tfor line in cat(\"/proc/meminfo\").split(\"\\n\")[:-1]:\r\n\t\t\tline = RE_SPACES.sub(\" \", line).strip().split(\" \")\r\n\t\t\tname, value = line[:2]\r\n\t\t\tres[name.replace(\"(\", \"_\").replace(\")\", \"_\").replace(\":\", \"\")] = int(value)\r\n\t\treturn res",
"def process_memory_info(pid):\n \n \n # Open process Handle\n handle = get_process_handle(pid)\n \n # Define structure to hold memory data\n meminfo = _PROCESS_MEMORY_COUNTERS()\n \n # Pass pointer to meminfo to processMemory to store the output\n _process_memory(handle, ctypes.pointer(meminfo), ctypes.sizeof(_PROCESS_MEMORY_COUNTERS))\n \n # Close Process Handle\n _close_handle(handle)\n \n # Extract data from meminfo structure and return as python\n # dictionary structure\n return {'PageFaultCount':meminfo.PageFaultCount,\n 'PeakWorkingSetSize':meminfo.PeakWorkingSetSize,\n 'WorkingSetSize':meminfo.WorkingSetSize,\n 'QuotaPeakPagedPoolUsage':meminfo.QuotaPeakPagedPoolUsage,\n 'QuotaPagedPoolUsage':meminfo.QuotaPagedPoolUsage,\n 'QuotaPeakNonPagedPoolUsage':meminfo.QuotaPeakNonPagedPoolUsage,\n 'QuotaNonPagedPoolUsage':meminfo.QuotaNonPagedPoolUsage,\n 'PagefileUsage':meminfo.PagefileUsage,\n 'PeakPagefileUsage':meminfo.PeakPagefileUsage}",
"def fetch_maps_local():\n pid = gdb.selected_inferior().pid\n with open(f\"/proc/{pid}/maps\") as maps:\n return maps.read()",
"def memory():\n\n mem_info = {}\n memory_stat = psutil.virtual_memory()\n mem_info['total'] = int(memory_stat.total / 1024)\n mem_info['free'] = int(memory_stat.available / 1024)\n\n return mem_info",
"def _read_maps(self):\n try:\n f = open(self._procpath('%d/maps' % self.pid))\n except IOError as e:\n # ignore killed process\n if e.errno != errno.ENOENT:\n raise\n return\n for line in f:\n try:\n so = line.split()[5].strip()\n self.mapped.append(so)\n except IndexError:\n pass",
"def get_memory_counters(args={}):\n res = {}\n\n if args.get('virtual_memory', False):\n res['virtual_memory'] = to_dict(psutil.virtual_memory())\n \n if args.get('swap_memory', False):\n res['swap_memory'] = to_dict(psutil.swap_memory())\n \n return res",
"def get_maps(self):\n return # osid.mapping.MapList",
"def get_memory():\n with open('/proc/meminfo') as f:\n return sum(map(lambda x: int(x.split()[1]),\n filter(re_mem.match, f.readlines())))",
"def _GetMemInfo(self):\n totalmem = 0\n freemem = 0\n with open(PROC_MEMINFO) as pfile:\n for line in pfile:\n fields = line.split()\n name = fields[0]\n value = fields[1]\n if name == 'MemTotal:':\n totalmem = int(value)\n elif name == 'MemFree:':\n freemem = int(value)\n return (totalmem, freemem)",
"def fetch_maps():\n try:\n # Try to fetch maps from a remote debug server.\n # If we're not using a remote server, this will throw an exception\n # and we'll fall back to the local implementation.\n return fetch_maps_remote()\n except gdb.error:\n # Try to fetch maps from the current inferior PID by reading the local\n # /proc/PID/maps file directly.\n return fetch_maps_local()",
"def get_memory(self):\n return self.__memories[self.__server_specific_name]",
"def get_processes(self):\n processes = {}\n for proc in psutil.process_iter():\n try:\n pinfo = proc.as_dict(attrs=['pid', 'name'])\n processes[pinfo['name']] = pinfo['pid']\n except psutil.NoSuchProcess:\n pass\n return(processes)",
"def machine_info():\n BYTES_IN_GIG = 1073741824\n free_bytes = psutil.virtual_memory().available\n return [{\"memory\": int(free_bytes / BYTES_IN_GIG), \"cores\": multiprocessing.cpu_count(),\n \"name\": socket.gethostname()}]",
"def getProcesses(self):\n return self.dbg.enumerate_processes()",
"def libs(self):\n try:\n maps_raw = open('/proc/%d/maps' % self.pid).read()\n except IOError:\n maps_raw = None\n\n if not maps_raw:\n import pwnlib.elf.elf\n\n with context.quiet:\n return pwnlib.elf.elf.ELF(self.executable).maps\n\n # Enumerate all of the libraries actually loaded right now.\n maps = {}\n for line in maps_raw.splitlines():\n if '/' not in line: continue\n path = line[line.index('/'):]\n path = os.path.realpath(path)\n if path not in maps:\n maps[path]=0\n\n for lib in maps:\n path = os.path.realpath(lib)\n for line in maps_raw.splitlines():\n if line.endswith(path):\n address = line.split('-')[0]\n maps[lib] = int(address, 16)\n break\n\n return maps",
"def list_memory(self):\n\n doc = self.client.enumerate(uris.DCIM_MemoryView)\n\n installed_memory = utils.find_xml(doc, 'DCIM_MemoryView',\n uris.DCIM_MemoryView,\n find_all=True)\n\n return [self._parse_memory(memory) for memory in installed_memory]",
"def read_memory(self):\n # Get the procfs status information for each process.\n #\n # Done in a loop instead of a comprehension because we need to handle\n # the case where one of these processes is no longer alive, which will\n # cause a DoesNotExist exception.\n statuses = []\n for process in self.processes:\n try:\n statuses.append(process.status)\n except (DoesNotExist, UnknownProcess):\n # Process is no longer running.\n continue\n\n # Dispatch metrics for each worker.\n for idx, status in enumerate(statuses):\n try:\n vmsize = Values(\n plugin_instance='vmsize',\n values=(status['VmSize'],),\n )\n\n vmrss = Values(\n plugin_instance='vmrss',\n values=(status['VmRSS'],),\n )\n except (DoesNotExist, UnknownProcess):\n # Process is no longer running.\n continue\n\n for metric in (vmsize, vmrss):\n metric.dispatch(\n plugin=self.name,\n type='gauge',\n type_instance=str(idx),\n )"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests that tokenize_by_sentence function can handle ideal two sentence input
|
def test_tokenize_by_sentence_ideal(self):
text = 'She is happy. He is happy.'
expected = (
(('_', 's', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_')),
(('_', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_'))
)
actual = tokenize_by_sentence(text)
self.assertEqual(expected, actual)
|
[
"def test_tokenize_by_sentence_complex(self):\n text = 'Mar#y wa$nted, to swim. However, she was afraid of sharks.'\n expected = (\n (('_', 'm', 'a', 'r', 'y', '_'), ('_', 'w', 'a', 'n', 't', 'e', 'd', '_'),\n ('_', 't', 'o', '_'), ('_', 's', 'w', 'i', 'm', '_')),\n (('_', 'h', 'o', 'w', 'e', 'v', 'e', 'r', '_'), ('_', 's', 'h', 'e', '_'),\n ('_', 'w', 'a', 's', '_'), ('_', 'a', 'f', 'r', 'a', 'i', 'd', '_'),\n ('_', 'o', 'f', '_'), ('_', 's', 'h', 'a', 'r', 'k', 's', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_inappropriate_sentence(self):\n text = '$#&*@#$*#@)'\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_punctuation_marks(self):\n text = 'The, first sentence - nice. The second sentence: bad!'\n expected = (\n (('_', 't', 'h', 'e', '_'), ('_', 'f', 'i', 'r', 's', 't', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'n', 'i', 'c', 'e', '_')),\n (('_', 't', 'h', 'e', '_'), ('_', 's', 'e', 'c', 'o', 'n', 'd', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'b', 'a', 'd', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def tokenize(self, sentence):\n ...",
"def test_tokenize_by_sentence_incorrect_input(self):\n bad_inputs = [[], {}, (), None, 9, 9.34, True]\n expected = ()\n for bad_input in bad_inputs:\n actual = tokenize_by_sentence(bad_input)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_empty_sentence(self):\n text = ''\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def sentences(a, b):\n\n a, b = sent_tokenize(a), sent_tokenize(b)\n\n return compare_lists(a, b)",
"def sentences(a, b):\n\n # Turn the inputs into sentences\n asplit = splitter(a, \"se\", 0)\n bsplit = splitter(b, \"se\", 0)\n\n # Find matches\n matches = matcher(asplit, bsplit)\n\n return matches",
"def test_sentence_splitter():\n inputfile = \"test_coleto.py\"\n sampletext = \"This is one sentence. \\\n This is another sentence. \\\n This is a third sentence. \\\n This is a fourth sentence. \\\n This is a fifth sentence. \\\n This is a sixth sentence.\"\n sentences, num_sentences = text_preprocess.sentence_splitter(inputfile,\n sampletext)\n assert num_sentences == 6, \"Number of sentences in test textshould be 6.\"",
"def test_sentence_segmentation(self):\n\n input = 'This is the first paragraph.\\n\\n\\nThis is the second paragraph.'\n re_paragraph_splitter = '\\n\\n+'\n result = self.datacleaner.sentence_segmentation(input, re_paragraph_splitter)\n self.assertEqual(result, ['This is the first paragraph.', 'This is the second paragraph.'])",
"def test_tokenize_sentences(self):\n tok = tokenize_sentences(self.docs)\n dd = defaultdict(list)\n\n self.assertEqual(type(tok), type(dd))\n\n for doc_key, value in self.docs.items():\n self.assertTrue(doc_key in key for key, value in tok.items())",
"def tokenize_sents(self, sentences, keep_token_positions: bool = ...):\n ...",
"def sentence_tokenize(input_text):\n sent_lst = []\n sent_pipe = PARSER.create_pipe(\"sentencizer\")\n PARSER.add_pipe(sent_pipe)\n doc = PARSER(input_text)\n for sent in doc.sents:\n sent_lst.append(sent.text)\n return sent_lst",
"def test_tokenize_english(case, text):\n tokens, tokens_span = rd.tokenize_en(text)\n for i, (start, end) in enumerate(tokens_span):\n case.assertEqual(text[start:end], tokens[i])",
"def tokenize(lines):\n return word_tokenize(lines)",
"def tokenize(self):",
"def test_english_tokenization():\n text = 'It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness.'\n tknzr = Tokenizer(Languages.ENGLISH)\n tokens = tknzr.tokenize(text)\n correct_tokens = ['It', 'was', 'the', 'best', 'of', 'times', 'it', 'was', 'the', 'worst', 'of', 'times', 'it', 'was', 'the', 'age', 'of', 'wisdom', 'it', 'was', 'the', 'age', 'of', 'foolishness']\n assert tokens == correct_tokens",
"def preprocess_sentence(sentence):\n sentence = sentence.lower()\n return word_tokenize(sentence)",
"def test_similar_sentence_gets_same_response_multiple_times(self):\n training_data = [\n 'how do you login to gmail?',\n 'Goto gmail.com, enter your login information and hit enter!'\n ]\n\n similar_question = 'how do I login to gmail?'\n\n self.trainer.train(training_data)\n\n response_to_trained_set = self.chatbot.get_response(\n text='how do you login to gmail?',\n conversation='a'\n )\n response1 = self.chatbot.get_response(\n text=similar_question,\n conversation='b'\n )\n response2 = self.chatbot.get_response(\n text=similar_question,\n conversation='c'\n )\n\n self.assertEqual(response_to_trained_set.text, training_data[1])\n self.assertEqual(response1.text, training_data[1])\n self.assertEqual(response2.text, training_data[1])"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests that tokenize_by_sentence function can process and ignore different punctuation marks
|
def test_tokenize_by_sentence_punctuation_marks(self):
text = 'The, first sentence - nice. The second sentence: bad!'
expected = (
(('_', 't', 'h', 'e', '_'), ('_', 'f', 'i', 'r', 's', 't', '_'),
('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'n', 'i', 'c', 'e', '_')),
(('_', 't', 'h', 'e', '_'), ('_', 's', 'e', 'c', 'o', 'n', 'd', '_'),
('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'b', 'a', 'd', '_'))
)
actual = tokenize_by_sentence(text)
self.assertEqual(expected, actual)
|
[
"def test_tokenize_by_sentence_inappropriate_sentence(self):\n text = '$#&*@#$*#@)'\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_ideal(self):\n text = 'She is happy. He is happy.'\n expected = (\n (('_', 's', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_')),\n (('_', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_complex(self):\n text = 'Mar#y wa$nted, to swim. However, she was afraid of sharks.'\n expected = (\n (('_', 'm', 'a', 'r', 'y', '_'), ('_', 'w', 'a', 'n', 't', 'e', 'd', '_'),\n ('_', 't', 'o', '_'), ('_', 's', 'w', 'i', 'm', '_')),\n (('_', 'h', 'o', 'w', 'e', 'v', 'e', 'r', '_'), ('_', 's', 'h', 'e', '_'),\n ('_', 'w', 'a', 's', '_'), ('_', 'a', 'f', 'r', 'a', 'i', 'd', '_'),\n ('_', 'o', 'f', '_'), ('_', 's', 'h', 'a', 'r', 'k', 's', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_empty_sentence(self):\n text = ''\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def tokenize(self, sentence):\n ...",
"def preprocess_sentence(sentence):\n sentence = sentence.lower()\n return word_tokenize(sentence)",
"def test_tokenize_by_sentence_incorrect_input(self):\n bad_inputs = [[], {}, (), None, 9, 9.34, True]\n expected = ()\n for bad_input in bad_inputs:\n actual = tokenize_by_sentence(bad_input)\n self.assertEqual(expected, actual)",
"def segment_by_punctuation(text: str):\n\treturn nltk.sent_tokenize(text)",
"def test_tokenize_english(case, text):\n tokens, tokens_span = rd.tokenize_en(text)\n for i, (start, end) in enumerate(tokens_span):\n case.assertEqual(text[start:end], tokens[i])",
"def test_english_tokenization():\n text = 'It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness.'\n tknzr = Tokenizer(Languages.ENGLISH)\n tokens = tknzr.tokenize(text)\n correct_tokens = ['It', 'was', 'the', 'best', 'of', 'times', 'it', 'was', 'the', 'worst', 'of', 'times', 'it', 'was', 'the', 'age', 'of', 'wisdom', 'it', 'was', 'the', 'age', 'of', 'foolishness']\n assert tokens == correct_tokens",
"def process_text(text, stem=True):\r\n #text = text.translate(None,string.punctuation)\r\n tokens = word_tokenize(text)\r\n \r\n if stem:\r\n stemmer = PorterStemmer()\r\n tokens = [stemmer.stem(t) for t in tokens]\r\n \r\n return tokens",
"def tokenize_sents(self, sentences, keep_token_positions: bool = ...):\n ...",
"def sentence_pre_processing(raw_sentence):\n words = np.asarray(word_tokenize(raw_sentence.lower())) # lower case and tokenization\n punctuation_removed = map(remove_punctuation, words) # remove punctuation\n stopwords_filtered = filter(lambda word: word not in ALL_STOPWORDS, punctuation_removed) # stop word removal\n return np.asarray(list(filter(is_alphanumeric, stopwords_filtered))) # remove non-alphanumeric words",
"def sentence_tokenize(input_text):\n sent_lst = []\n sent_pipe = PARSER.create_pipe(\"sentencizer\")\n PARSER.add_pipe(sent_pipe)\n doc = PARSER(input_text)\n for sent in doc.sents:\n sent_lst.append(sent.text)\n return sent_lst",
"def tokenize(lines):\n return word_tokenize(lines)",
"def test_tokenize_sentences(self):\n tok = tokenize_sentences(self.docs)\n dd = defaultdict(list)\n\n self.assertEqual(type(tok), type(dd))\n\n for doc_key, value in self.docs.items():\n self.assertTrue(doc_key in key for key, value in tok.items())",
"def get_words(sentence):\n return sentence.translate(None,string.punctuation).split(' ')",
"def tokenize_description(self, sentence):\n lower_string = sentence.lower()\n token_list = []\n\n tok_list = re.findall(r'[\\w]+', lower_string)\n\n for word in tok_list:\n if word not in self.stop_words:\n token_list.append(stemmer.stem(word))\n\n return token_list",
"def test_sentence_splitter():\n inputfile = \"test_coleto.py\"\n sampletext = \"This is one sentence. \\\n This is another sentence. \\\n This is a third sentence. \\\n This is a fourth sentence. \\\n This is a fifth sentence. \\\n This is a sixth sentence.\"\n sentences, num_sentences = text_preprocess.sentence_splitter(inputfile,\n sampletext)\n assert num_sentences == 6, \"Number of sentences in test textshould be 6.\""
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests that tokenize_by_sentence function can handle incorrect input cases
|
def test_tokenize_by_sentence_incorrect_input(self):
bad_inputs = [[], {}, (), None, 9, 9.34, True]
expected = ()
for bad_input in bad_inputs:
actual = tokenize_by_sentence(bad_input)
self.assertEqual(expected, actual)
|
[
"def test_tokenize_by_sentence_inappropriate_sentence(self):\n text = '$#&*@#$*#@)'\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_ideal(self):\n text = 'She is happy. He is happy.'\n expected = (\n (('_', 's', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_')),\n (('_', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_empty_sentence(self):\n text = ''\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_complex(self):\n text = 'Mar#y wa$nted, to swim. However, she was afraid of sharks.'\n expected = (\n (('_', 'm', 'a', 'r', 'y', '_'), ('_', 'w', 'a', 'n', 't', 'e', 'd', '_'),\n ('_', 't', 'o', '_'), ('_', 's', 'w', 'i', 'm', '_')),\n (('_', 'h', 'o', 'w', 'e', 'v', 'e', 'r', '_'), ('_', 's', 'h', 'e', '_'),\n ('_', 'w', 'a', 's', '_'), ('_', 'a', 'f', 'r', 'a', 'i', 'd', '_'),\n ('_', 'o', 'f', '_'), ('_', 's', 'h', 'a', 'r', 'k', 's', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_punctuation_marks(self):\n text = 'The, first sentence - nice. The second sentence: bad!'\n expected = (\n (('_', 't', 'h', 'e', '_'), ('_', 'f', 'i', 'r', 's', 't', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'n', 'i', 'c', 'e', '_')),\n (('_', 't', 'h', 'e', '_'), ('_', 's', 'e', 'c', 'o', 'n', 'd', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'b', 'a', 'd', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def tokenize(self, sentence):\n ...",
"def test_tokenize_english(case, text):\n tokens, tokens_span = rd.tokenize_en(text)\n for i, (start, end) in enumerate(tokens_span):\n case.assertEqual(text[start:end], tokens[i])",
"def test_tokenise_bad_string_fail3(self):\n\n # Tokenise an invalid string.\n self.assertRaises(RelaxError, tokenise, '')",
"def _process_sentence(self, sentence):\n raise ValueError(\"Please override this class!\")",
"def test_english_tokenization():\n text = 'It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness.'\n tknzr = Tokenizer(Languages.ENGLISH)\n tokens = tknzr.tokenize(text)\n correct_tokens = ['It', 'was', 'the', 'best', 'of', 'times', 'it', 'was', 'the', 'worst', 'of', 'times', 'it', 'was', 'the', 'age', 'of', 'wisdom', 'it', 'was', 'the', 'age', 'of', 'foolishness']\n assert tokens == correct_tokens",
"def test_tokenise_bad_string_fail1(self):\n\n # Tokenise an invalid string.\n self.assertRaises(RelaxError, tokenise, '13')",
"def test_tokenise_out_of_order_mol_id_fail2(self):\n\n # Tokenise an invalid string.\n self.assertRaises(RelaxError, tokenise, '@N,CA#A')",
"def test_tokenise_bad_string_fail2(self):\n\n # Tokenise an invalid string.\n self.assertRaises(RelaxError, tokenise, 'XXX')",
"def test_tokenize_sentences(self):\n tok = tokenize_sentences(self.docs)\n dd = defaultdict(list)\n\n self.assertEqual(type(tok), type(dd))\n\n for doc_key, value in self.docs.items():\n self.assertTrue(doc_key in key for key, value in tok.items())",
"def preprocess_sentence(sentence):\n sentence = sentence.lower()\n return word_tokenize(sentence)",
"def test_english_contractions():\n tknzr = Tokenizer(Languages.ENGLISH)\n text = \"Don't doesn't didn't can't couldn't I've haven't hasn't hadn't\"\n tokens = tknzr.tokenize(text)\n correct_tokens = ['Do', 'not', 'does', 'not', 'did', 'not', 'can', 'not', 'could', 'not', 'I', 'have', 'have', 'not', 'has', 'not', 'had', 'not']\n assert tokens == correct_tokens \n \n text = \"I'll he'll she'll it'll won't wouldn't I'm\"\n tokens = tknzr.tokenize(text)\n correct_tokens = ['I', 'will', 'he', 'will', 'she', 'will', 'it', 'will', 'will', 'not', 'would', 'not', 'I', 'am']\n assert tokens == correct_tokens",
"def test_sentence_splitter():\n inputfile = \"test_coleto.py\"\n sampletext = \"This is one sentence. \\\n This is another sentence. \\\n This is a third sentence. \\\n This is a fourth sentence. \\\n This is a fifth sentence. \\\n This is a sixth sentence.\"\n sentences, num_sentences = text_preprocess.sentence_splitter(inputfile,\n sampletext)\n assert num_sentences == 6, \"Number of sentences in test textshould be 6.\"",
"def test_invalid_input_tokens(self):\n msg1 = 'Must raise `TypeError` when input `tokens` is invalid.'\n msg2 = 'Inconsistent error message.'\n examples = (\n False, True, 0, 1, -1, 0.0, 1.0, math.nan, -math.nan, math.inf,\n -math.inf, 0j, 1j, object(), lambda x: x, type, None,\n NotImplemented, ..., [False], [True], [0], [1], [-1], [0.0], [1.0],\n [math.nan], [-math.nan], [math.inf], [-math.inf], [0j], [1j],\n [b''], [object()], [lambda x: x], [type], [None], [NotImplemented],\n [...], ['', False], ['', True], ['', 0], ['', 1], ['', -1],\n ['', 0.0], ['', 1.0], ['', math.nan], ['', -math.nan],\n ['', math.inf], ['', -math.inf], ['', 0j], ['', 1j], ['', b''],\n ['', object()], ['', lambda x: x], ['', type], ['', None],\n ['', NotImplemented], ['', ...],\n )\n\n for invalid_input in examples:\n for tokenizer in self.tokenizers:\n with self.assertRaises(TypeError, msg=msg1) as cxt_man:\n tokenizer.convert_tokens_to_ids(tokens=invalid_input)\n\n self.assertEqual(\n cxt_man.exception.args[0],\n '`tokens` must be an instance of `Iterable[str]`.',\n msg=msg2\n )",
"def test_sentence_segmentation(self):\n\n input = 'This is the first paragraph.\\n\\n\\nThis is the second paragraph.'\n re_paragraph_splitter = '\\n\\n+'\n result = self.datacleaner.sentence_segmentation(input, re_paragraph_splitter)\n self.assertEqual(result, ['This is the first paragraph.', 'This is the second paragraph.'])"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests that tokenize_by_sentence function can handle complex split case
|
def test_tokenize_by_sentence_complex(self):
text = 'Mar#y wa$nted, to swim. However, she was afraid of sharks.'
expected = (
(('_', 'm', 'a', 'r', 'y', '_'), ('_', 'w', 'a', 'n', 't', 'e', 'd', '_'),
('_', 't', 'o', '_'), ('_', 's', 'w', 'i', 'm', '_')),
(('_', 'h', 'o', 'w', 'e', 'v', 'e', 'r', '_'), ('_', 's', 'h', 'e', '_'),
('_', 'w', 'a', 's', '_'), ('_', 'a', 'f', 'r', 'a', 'i', 'd', '_'),
('_', 'o', 'f', '_'), ('_', 's', 'h', 'a', 'r', 'k', 's', '_'))
)
actual = tokenize_by_sentence(text)
self.assertEqual(expected, actual)
|
[
"def test_tokenize_by_sentence_ideal(self):\n text = 'She is happy. He is happy.'\n expected = (\n (('_', 's', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_')),\n (('_', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def tokenize(self, sentence):\n ...",
"def test_tokenize_by_sentence_inappropriate_sentence(self):\n text = '$#&*@#$*#@)'\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_punctuation_marks(self):\n text = 'The, first sentence - nice. The second sentence: bad!'\n expected = (\n (('_', 't', 'h', 'e', '_'), ('_', 'f', 'i', 'r', 's', 't', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'n', 'i', 'c', 'e', '_')),\n (('_', 't', 'h', 'e', '_'), ('_', 's', 'e', 'c', 'o', 'n', 'd', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'b', 'a', 'd', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_empty_sentence(self):\n text = ''\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_sentence_splitter():\n inputfile = \"test_coleto.py\"\n sampletext = \"This is one sentence. \\\n This is another sentence. \\\n This is a third sentence. \\\n This is a fourth sentence. \\\n This is a fifth sentence. \\\n This is a sixth sentence.\"\n sentences, num_sentences = text_preprocess.sentence_splitter(inputfile,\n sampletext)\n assert num_sentences == 6, \"Number of sentences in test textshould be 6.\"",
"def test_sentence_segmentation(self):\n\n input = 'This is the first paragraph.\\n\\n\\nThis is the second paragraph.'\n re_paragraph_splitter = '\\n\\n+'\n result = self.datacleaner.sentence_segmentation(input, re_paragraph_splitter)\n self.assertEqual(result, ['This is the first paragraph.', 'This is the second paragraph.'])",
"def sentence_split(text, properties={'annotators': 'ssplit', 'outputFormat': 'json'}):\n annotated = nlp.annotate(text, properties)\n sentence_split = list()\n for sentence in annotated['sentences']:\n s = [t['word'] for t in sentence['tokens']]\n k = [item.lower() for item in s if item not in [\",\", \".\", '...', '..']]\n sentence_split.append(\" \".join(k))\n return sentence_split",
"def test_tokenize_by_sentence_incorrect_input(self):\n bad_inputs = [[], {}, (), None, 9, 9.34, True]\n expected = ()\n for bad_input in bad_inputs:\n actual = tokenize_by_sentence(bad_input)\n self.assertEqual(expected, actual)",
"def tokenize(self):",
"def sentence_tokenize(input_text):\n sent_lst = []\n sent_pipe = PARSER.create_pipe(\"sentencizer\")\n PARSER.add_pipe(sent_pipe)\n doc = PARSER(input_text)\n for sent in doc.sents:\n sent_lst.append(sent.text)\n return sent_lst",
"def tokenize(lines):\n return word_tokenize(lines)",
"def test_tokenize_sentences(self):\n tok = tokenize_sentences(self.docs)\n dd = defaultdict(list)\n\n self.assertEqual(type(tok), type(dd))\n\n for doc_key, value in self.docs.items():\n self.assertTrue(doc_key in key for key, value in tok.items())",
"def test_tokenize_english(case, text):\n tokens, tokens_span = rd.tokenize_en(text)\n for i, (start, end) in enumerate(tokens_span):\n case.assertEqual(text[start:end], tokens[i])",
"def preprocess_sentence(sentence):\n sentence = sentence.lower()\n return word_tokenize(sentence)",
"def sentence_split(text: str,\n split_newline: Union[str, bool] = True,\n merge_apostrophe_word: bool = False,\n ) -> Generator[str, Any, None]:\n for sentence_tokens in sentence_split_tokens(text,\n split_newline=split_newline,\n merge_apostrophe_word=merge_apostrophe_word):\n sentence = ''.join(token.text for token in sentence_tokens).strip()\n if sentence:\n yield sentence",
"def tokenize(self, text):\n split_tokens = [] # list of `SubToken`s.\n for token, orig_token, is_good_token in self.basic_tokenizer.tokenize(text):\n if not is_good_token:\n split_tokens.append(SubToken(token, orig_token, is_good=False))\n continue\n\n # Preserve special tokens such as '[Q]' and '[SEP]'.\n if bert_tokenization.preserve_token(token, self.vocab):\n split_tokens.append(SubToken(token, orig_token, is_good=True))\n continue\n\n # For everything else, send the text-like tokens that have survived\n # whitespace and puncutation splitting through a wordpiece tokenizer.\n for sub_token in self.wordpiece_tokenizer.tokenize(\n [SubToken(token, orig_token, is_good_token)]):\n # `sub_token` has type `SubToken`.\n split_tokens.append(sub_token)\n\n return split_tokens",
"def tokenize_sents(self, sentences, keep_token_positions: bool = ...):\n ...",
"def testClassicSplit(self):\n self.assertEquals((\"Раз\", \"Два\", \"Три\"), pytils.utils.split_values(\"Раз,Два,Три\"))\n self.assertEquals((\"Раз\", \"Два\", \"Три\"), pytils.utils.split_values(\"Раз, Два,Три\"))\n self.assertEquals((\"Раз\", \"Два\", \"Три\"), pytils.utils.split_values(\" Раз, Два, Три \"))\n self.assertEquals((\"Раз\", \"Два\", \"Три\"), pytils.utils.split_values(\" Раз, \\nДва,\\n Три \"))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests that tokenize_by_sentence function can handle empty sentence input
|
def test_tokenize_by_sentence_empty_sentence(self):
text = ''
expected = ()
actual = tokenize_by_sentence(text)
self.assertEqual(expected, actual)
|
[
"def test_tokenize_by_sentence_inappropriate_sentence(self):\n text = '$#&*@#$*#@)'\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_incorrect_input(self):\n bad_inputs = [[], {}, (), None, 9, 9.34, True]\n expected = ()\n for bad_input in bad_inputs:\n actual = tokenize_by_sentence(bad_input)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_ideal(self):\n text = 'She is happy. He is happy.'\n expected = (\n (('_', 's', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_')),\n (('_', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_punctuation_marks(self):\n text = 'The, first sentence - nice. The second sentence: bad!'\n expected = (\n (('_', 't', 'h', 'e', '_'), ('_', 'f', 'i', 'r', 's', 't', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'n', 'i', 'c', 'e', '_')),\n (('_', 't', 'h', 'e', '_'), ('_', 's', 'e', 'c', 'o', 'n', 'd', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'b', 'a', 'd', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_complex(self):\n text = 'Mar#y wa$nted, to swim. However, she was afraid of sharks.'\n expected = (\n (('_', 'm', 'a', 'r', 'y', '_'), ('_', 'w', 'a', 'n', 't', 'e', 'd', '_'),\n ('_', 't', 'o', '_'), ('_', 's', 'w', 'i', 'm', '_')),\n (('_', 'h', 'o', 'w', 'e', 'v', 'e', 'r', '_'), ('_', 's', 'h', 'e', '_'),\n ('_', 'w', 'a', 's', '_'), ('_', 'a', 'f', 'r', 'a', 'i', 'd', '_'),\n ('_', 'o', 'f', '_'), ('_', 's', 'h', 'a', 'r', 'k', 's', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def tokenize(self, sentence):\n ...",
"def test_sentence_splitter():\n inputfile = \"test_coleto.py\"\n sampletext = \"This is one sentence. \\\n This is another sentence. \\\n This is a third sentence. \\\n This is a fourth sentence. \\\n This is a fifth sentence. \\\n This is a sixth sentence.\"\n sentences, num_sentences = text_preprocess.sentence_splitter(inputfile,\n sampletext)\n assert num_sentences == 6, \"Number of sentences in test textshould be 6.\"",
"def test_empty_corpus(self):\n\n extractor = TokenExtractor()\n candidates = extractor.extract([ ])\n self.assertFalse(len(candidates))",
"def preprocess_sentence(sentence):\n sentence = sentence.lower()\n return word_tokenize(sentence)",
"def test_tokenize_sentences(self):\n tok = tokenize_sentences(self.docs)\n dd = defaultdict(list)\n\n self.assertEqual(type(tok), type(dd))\n\n for doc_key, value in self.docs.items():\n self.assertTrue(doc_key in key for key, value in tok.items())",
"def test_tftext_sentencepiece_tokenizer_bos_eos(self):\n tftext_sp = tensorflow_text.SentencepieceTokenizer(\n self.sentencepiece_model, add_bos=True, add_eos=True)\n opt_sp = sentencepiece_tokenizer.SentencepieceTokenizer(\n self.sentencepiece_model, add_bos=True, add_eos=True)\n\n input_text = [\n u\" \", u\"to be or not to be\", u\"ignored by length text1\",\n u\"ignored by length text2\"\n ]\n tftext_tokenized = tftext_sp.tokenize(input_text)\n opt_tokenized = opt_sp.tokenize(input_text)\n self.assertAllEqual(tftext_tokenized, opt_tokenized)",
"def test_sentence_segmentation(self):\n\n input = 'This is the first paragraph.\\n\\n\\nThis is the second paragraph.'\n re_paragraph_splitter = '\\n\\n+'\n result = self.datacleaner.sentence_segmentation(input, re_paragraph_splitter)\n self.assertEqual(result, ['This is the first paragraph.', 'This is the second paragraph.'])",
"def test_parse_word_stemming_empty(self):\n feed = \"\"\n expected = \"\"\n\n result = StemParser().stem(feed)\n self.assertEqual(expected, result)",
"def test_tokenize_words(self):\n tok = tokenize_words(self.docs)\n l = list()\n\n self.assertEqual(type(tok), type(l))",
"def tokenize_sents(self, sentences, keep_token_positions: bool = ...):\n ...",
"def sentence_tokenize(input_text):\n sent_lst = []\n sent_pipe = PARSER.create_pipe(\"sentencizer\")\n PARSER.add_pipe(sent_pipe)\n doc = PARSER(input_text)\n for sent in doc.sents:\n sent_lst.append(sent.text)\n return sent_lst",
"def tokenize(self):",
"def get_tokens(annotated_sentence):\n\n return [seq for seq in annotated_sentence.split('\\n') if seq]",
"def _process_sentence(self, sentence):\n raise ValueError(\"Please override this class!\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tests that tokenize_by_sentence function can handle inappropriate sentence input
|
def test_tokenize_by_sentence_inappropriate_sentence(self):
text = '$#&*@#$*#@)'
expected = ()
actual = tokenize_by_sentence(text)
self.assertEqual(expected, actual)
|
[
"def test_tokenize_by_sentence_ideal(self):\n text = 'She is happy. He is happy.'\n expected = (\n (('_', 's', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_')),\n (('_', 'h', 'e', '_'), ('_', 'i', 's', '_'), ('_', 'h', 'a', 'p', 'p', 'y', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_incorrect_input(self):\n bad_inputs = [[], {}, (), None, 9, 9.34, True]\n expected = ()\n for bad_input in bad_inputs:\n actual = tokenize_by_sentence(bad_input)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_complex(self):\n text = 'Mar#y wa$nted, to swim. However, she was afraid of sharks.'\n expected = (\n (('_', 'm', 'a', 'r', 'y', '_'), ('_', 'w', 'a', 'n', 't', 'e', 'd', '_'),\n ('_', 't', 'o', '_'), ('_', 's', 'w', 'i', 'm', '_')),\n (('_', 'h', 'o', 'w', 'e', 'v', 'e', 'r', '_'), ('_', 's', 'h', 'e', '_'),\n ('_', 'w', 'a', 's', '_'), ('_', 'a', 'f', 'r', 'a', 'i', 'd', '_'),\n ('_', 'o', 'f', '_'), ('_', 's', 'h', 'a', 'r', 'k', 's', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_empty_sentence(self):\n text = ''\n\n expected = ()\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def test_tokenize_by_sentence_punctuation_marks(self):\n text = 'The, first sentence - nice. The second sentence: bad!'\n expected = (\n (('_', 't', 'h', 'e', '_'), ('_', 'f', 'i', 'r', 's', 't', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'n', 'i', 'c', 'e', '_')),\n (('_', 't', 'h', 'e', '_'), ('_', 's', 'e', 'c', 'o', 'n', 'd', '_'),\n ('_', 's', 'e', 'n', 't', 'e', 'n', 'c', 'e', '_'), ('_', 'b', 'a', 'd', '_'))\n )\n actual = tokenize_by_sentence(text)\n self.assertEqual(expected, actual)",
"def tokenize(self, sentence):\n ...",
"def preprocess_sentence(sentence):\n sentence = sentence.lower()\n return word_tokenize(sentence)",
"def _process_sentence(self, sentence):\n raise ValueError(\"Please override this class!\")",
"def test_tokenize_sentences(self):\n tok = tokenize_sentences(self.docs)\n dd = defaultdict(list)\n\n self.assertEqual(type(tok), type(dd))\n\n for doc_key, value in self.docs.items():\n self.assertTrue(doc_key in key for key, value in tok.items())",
"def test_sentence_splitter():\n inputfile = \"test_coleto.py\"\n sampletext = \"This is one sentence. \\\n This is another sentence. \\\n This is a third sentence. \\\n This is a fourth sentence. \\\n This is a fifth sentence. \\\n This is a sixth sentence.\"\n sentences, num_sentences = text_preprocess.sentence_splitter(inputfile,\n sampletext)\n assert num_sentences == 6, \"Number of sentences in test textshould be 6.\"",
"def test_tokenize_english(case, text):\n tokens, tokens_span = rd.tokenize_en(text)\n for i, (start, end) in enumerate(tokens_span):\n case.assertEqual(text[start:end], tokens[i])",
"def test_sentence_segmentation(self):\n\n input = 'This is the first paragraph.\\n\\n\\nThis is the second paragraph.'\n re_paragraph_splitter = '\\n\\n+'\n result = self.datacleaner.sentence_segmentation(input, re_paragraph_splitter)\n self.assertEqual(result, ['This is the first paragraph.', 'This is the second paragraph.'])",
"def tokenize_sents(self, sentences, keep_token_positions: bool = ...):\n ...",
"def test_english_tokenization():\n text = 'It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness.'\n tknzr = Tokenizer(Languages.ENGLISH)\n tokens = tknzr.tokenize(text)\n correct_tokens = ['It', 'was', 'the', 'best', 'of', 'times', 'it', 'was', 'the', 'worst', 'of', 'times', 'it', 'was', 'the', 'age', 'of', 'wisdom', 'it', 'was', 'the', 'age', 'of', 'foolishness']\n assert tokens == correct_tokens",
"def sentence_tokenize(input_text):\n sent_lst = []\n sent_pipe = PARSER.create_pipe(\"sentencizer\")\n PARSER.add_pipe(sent_pipe)\n doc = PARSER(input_text)\n for sent in doc.sents:\n sent_lst.append(sent.text)\n return sent_lst",
"def test_gen_sentence():\n\n # Test default length of generated sentence and first word from file\n test_sentence = text_utilities.gen_sentence(test_file)\n words = test_sentence.split(' ')\n assert len(words) == 10\n assert words[0] == 'hello'\n\n # Test specified length of sentence\n specified_length_test = text_utilities.gen_sentence(test_file, 3)\n small_length = specified_length_test.split(' ')\n assert len(small_length) == 3",
"def sentences(a, b):\n\n a, b = sent_tokenize(a), sent_tokenize(b)\n\n return compare_lists(a, b)",
"def test_tokenize_words(self):\n tok = tokenize_words(self.docs)\n l = list()\n\n self.assertEqual(type(tok), type(l))",
"def tokenize_description(self, sentence):\n lower_string = sentence.lower()\n token_list = []\n\n tok_list = re.findall(r'[\\w]+', lower_string)\n\n for word in tok_list:\n if word not in self.stop_words:\n token_list.append(stemmer.stem(word))\n\n return token_list"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Gets index of the dimension corresponding to height.
|
def get_height_dim(layout: str):
return layout.find('H')
|
[
"def height( self, pyArgs, index, wrappedOperation ):\n return self.arrayType.dimensions( pyArgs[self.pixelsIndex] )[1]",
"def get_dimension_index(self, dim):\n if isinstance(dim, Dimension): dim = dim.name\n if isinstance(dim, int):\n if (dim < (self.ndims + len(self.vdims)) or\n dim < len(self.dimensions())):\n return dim\n else:\n return IndexError('Dimension index out of bounds')\n try:\n if dim in self.kdims+self.vdims:\n return (self.kdims+self.vdims).index(dim)\n return self.dimensions().index(dim)\n except ValueError:\n raise Exception(\"Dimension %s not found in %s.\" %\n (dim, self.__class__.__name__))",
"def height_at(self, x, z):\n\n return self.heightmap[x, z]",
"def index_at(self, x, y):\n sx, sy = self.pixel_from_screen(x, y)\n return int(sx // self.tile_width), int(sy // self.tile_height)",
"def _get_idx(self, coord, is_y):\n if is_y:\n if coord < 0 or coord > len(self.grid) * self.tileLength:\n # TODO handle off grid case\n return None\n else:\n if coord < 0 or coord > len(self.grid[0]) * self.tileLength:\n # TODO handle off grid case\n return None\n\n coord -= (self.tileLength / 2)\n if (-self.tileLength / 2) < coord < 0:\n return 0\n\n else:\n low_estimate = int(coord // self.tileLength)\n offset = coord % self.tileLength\n ret = low_estimate + \\\n 1 if offset > (self.tileLength / 2) else low_estimate\n return ret\n # if is_y:\n # return (len(self.grid) - 1) - ret\n # else:\n # return ret",
"def searchDimension(self):\n\t\t\t\treturn pow(2, self._ppmResolution-1)",
"def get_dim(self, key):\n return self.dim.get(key, None)",
"def dimension(self):\n return self.field(Field.POSITION).shape[1]",
"def getDimension(self, unit: 'int const'=0) -> \"int32_t\":\n return _coin.SoMultiTextureCoordinateElement_getDimension(self, unit)",
"def grid_index(self, x, y):\n return (y - self.min_y) * self.width + (x - self.min_x)",
"def get_dimension_index(self, dimension, value):\n # Get an arbitrary dataset to look into the dimensions\n dataset = self.first_winter['u']\n\n # Get available levels for the dimension of interest\n available_levels = list(dataset[dimension][:].data)\n\n # Get the index where it is equal to the value of interest\n index = np.where(np.isclose(available_levels, value))[0]\n if len(index) == 0:\n raise LookupError(\"{} could not be found at {} level.\".\n format(dimension, value))\n\n return np.asscalar(index), available_levels",
"def _get_index(self) -> \"size_t\" :\n return _core.ToolbarPanel__get_index(self)",
"def getDimension():\n ierr = c_int()\n api__result__ = lib.gmshModelGetDimension(\n byref(ierr))\n if ierr.value != 0:\n raise ValueError(\n \"gmshModelGetDimension returned non-zero error code: \",\n ierr.value)\n return api__result__",
"def findLayoutAt(self, y: int) -> int:\n ...",
"def dim(self):\n return self._dim",
"def cell_dimension(self):\n return self.ufl_cell().topological_dimension()",
"def get_height(self):\n\t\treturn self.y[1] - self.y[0]",
"def index_of(self, axis):\n return _ffi_api.LayoutIndexOf(self, axis) # type: ignore",
"def _get_index(self) -> \"size_t\" :\n return _core.ToolbarTab__get_index(self)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Gets index of the dimension corresponding to width.
|
def get_width_dim(layout: str):
return layout.find('W')
|
[
"def get_dimension_index(self, dim):\n if isinstance(dim, Dimension): dim = dim.name\n if isinstance(dim, int):\n if (dim < (self.ndims + len(self.vdims)) or\n dim < len(self.dimensions())):\n return dim\n else:\n return IndexError('Dimension index out of bounds')\n try:\n if dim in self.kdims+self.vdims:\n return (self.kdims+self.vdims).index(dim)\n return self.dimensions().index(dim)\n except ValueError:\n raise Exception(\"Dimension %s not found in %s.\" %\n (dim, self.__class__.__name__))",
"def width( self, pyArgs, index, wrappedOperation ):\n return self.arrayType.dimensions( pyArgs[self.pixelsIndex] )[0]",
"def dimension(self):\n return self.field(Field.POSITION).shape[1]",
"def get_dim(self, key):\n return self.dim.get(key, None)",
"def get_width(self, index): \n\n coarse_index = self._fine_to_coarse(index)\n\n return self._fine_mesh[coarse_index]",
"def index_of(self, axis):\n return _ffi_api.LayoutIndexOf(self, axis) # type: ignore",
"def searchDimension(self):\n\t\t\t\treturn pow(2, self._ppmResolution-1)",
"def dim(self):\n return self._dim",
"def get_dimension_index(self, dimension, value):\n # Get an arbitrary dataset to look into the dimensions\n dataset = self.first_winter['u']\n\n # Get available levels for the dimension of interest\n available_levels = list(dataset[dimension][:].data)\n\n # Get the index where it is equal to the value of interest\n index = np.where(np.isclose(available_levels, value))[0]\n if len(index) == 0:\n raise LookupError(\"{} could not be found at {} level.\".\n format(dimension, value))\n\n return np.asscalar(index), available_levels",
"def getDimension(self, unit: 'int const'=0) -> \"int32_t\":\n return _coin.SoMultiTextureCoordinateElement_getDimension(self, unit)",
"def get_width(self):\n\t\treturn self.x[1] - self.x[0]",
"def getDimension():\n ierr = c_int()\n api__result__ = lib.gmshModelGetDimension(\n byref(ierr))\n if ierr.value != 0:\n raise ValueError(\n \"gmshModelGetDimension returned non-zero error code: \",\n ierr.value)\n return api__result__",
"def _get_width(self) -> \"int\" :\n return _core.Viewport__get_width(self)",
"def get_w(img):\n return len(img[1][0])",
"def _get_index(self) -> \"size_t\" :\n return _core.ToolbarPanel__get_index(self)",
"def index_at(self, x, y):\n sx, sy = self.pixel_from_screen(x, y)\n return int(sx // self.tile_width), int(sy // self.tile_height)",
"def num_dimensions(self):\n return self.numDim.value",
"def _get_width(self) -> \"int\" :\n return _core.Palette__get_width(self)",
"def grid_index(self, x, y):\n return (y - self.min_y) * self.width + (x - self.min_x)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Evaluates the given forwardfunction on batches from the given dataloader, and prints progress along the way.
|
def _foreach_batch(dl: DataLoader,
forward_fn: Callable[[Any], BatchResult],
verbose=True, max_batches=None) -> EpochResult:
losses = []
num_correct = 0
num_samples = len(dl.sampler)
num_batches = len(dl.batch_sampler)
if max_batches is not None:
if max_batches < num_batches:
num_batches = max_batches
num_samples = num_batches * dl.batch_size
if verbose:
pbar_file = sys.stdout
else:
pbar_file = open(os.devnull, 'w')
pbar_name = forward_fn.__name__
with tqdm.tqdm(desc=pbar_name, total=num_batches,
file=pbar_file) as pbar:
dl_iter = iter(dl)
for batch_idx in range(num_batches):
data = next(dl_iter)
batch_res = forward_fn(data)
pbar.set_description(f'{pbar_name} ({batch_res.loss:.3f})')
pbar.update()
losses.append(batch_res.loss)
num_correct += batch_res.num_correct
avg_loss = sum(losses) / num_batches
accuracy = 100. * num_correct / num_samples
pbar.set_description(f'{pbar_name} '
f'(Avg. Loss {avg_loss:.3f}, '
f'Accuracy {accuracy:.1f})')
return EpochResult(losses=losses, accuracy=accuracy)
|
[
"def _forward_with_dataloader(\n self,\n batched_perturbed_feature_indices: Tensor,\n dataloader: torch.utils.data.DataLoader,\n input_roles: Tuple[int],\n baselines: Tuple[Union[int, float, Tensor], ...],\n feature_mask: Tuple[Tensor, ...],\n reduce: Callable,\n to_metric: Optional[Callable],\n show_progress: bool,\n feature_idx_to_mask_idx: Dict[int, List[int]],\n ):\n\n # batched_perturbed_feature_indices in shape(n_perturb, n_features)\n # n_perturb is not always the same as perturb_per_pass if not enough perturb\n perturbation_mask_list: List[Tuple[Union[Tensor, None], ...]] = [\n _create_perturbation_mask(\n perturbed_feature_indices,\n feature_mask,\n feature_idx_to_mask_idx,\n )\n for perturbed_feature_indices in batched_perturbed_feature_indices\n ]\n\n # each perturbation needs an accum state\n accum_states = [None for _ in range(len(perturbation_mask_list))]\n\n # tranverse the dataloader\n for inputs in dataloader:\n # for each batch read from the dataloader,\n # apply every perturbation based on perturbations_per_pass\n for i, perturbation_mask in enumerate(perturbation_mask_list):\n perturbed_inputs = _perturb_inputs(\n inputs, input_roles, baselines, perturbation_mask\n )\n\n # due to explicitly defined roles\n # we can keep inputs in their original order\n # regardless of if they need attr\n # instead of using additional_forward_inputs\n forward_inputs = tuple(\n _\n for _, role in zip(perturbed_inputs, input_roles)\n if role != InputRole.no_forward\n )\n\n output = _run_forward(\n self.forward_func,\n forward_inputs,\n )\n\n accum_states[i] = reduce(accum_states[i], output, perturbed_inputs)\n\n accum_results = [\n to_metric(accum) if to_metric else accum for accum in accum_states\n ]\n\n assert all(type(r) is Tensor for r in accum_results), (\n \"Accumulated metrics for attribution must be a Tensor,\"\n f\"received: {next(r for r in accum_results if type(r) is not Tensor)}\"\n )\n\n # shape(n_perturb * output_dims[0], *output_dims[1:])\n # the underneath attr method needs to support forward_func output's\n # 1st dim to grow with perturb_per_eval\n batched_accum = torch.stack(accum_results, dim=0)\n return batched_accum",
"def Track_for_loop_progress(iterator, len_iterator, message=None):\n import timeit\n import sys\n from IPython.display import clear_output\n if( iterator==0 ):\n global Track_for_loop_progress_start\n Track_for_loop_progress_start = timeit.default_timer()\n stop = timeit.default_timer()\n if( (iterator/len_iterator)<0.05 ):\n expected_time = 0.0\n else:\n time_perc = timeit.default_timer()\n expected_time = np.round((time_perc-Track_for_loop_progress_start)/((iterator+1)/len_iterator), 2)\n clear_output(wait=True)\n print(\"Current progress:\", np.round( (iterator+1)/len_iterator*100, 2), \"%\")\n print(\"Current run time:\", int((stop-Track_for_loop_progress_start)/60),\"min\", int((stop-Track_for_loop_progress_start)%60), \"s\")\n print(\"Expected run time:\", int(expected_time/60),\"min\", int(expected_time%60), \"s\")\n if( message ):\n print(message)\n sys.stdout.flush()\n if( iterator==(len_iterator-1) ):\n del Track_for_loop_progress_start",
"def fit(self, \n train_step: Callable[[MiniBatch, Dict[str, Any]], Dict[str, Any]], \n train_data: Iterator[MiniBatch],\n n_steps: int, \n metric_freq: int=100,\n snapshot_freq: int=500,\n eval_step: Callable[[MiniBatch, Dict[str, Any]], Dict[str, Any]]=None,\n eval_data: Iterator[MiniBatch]=None,\n eval_freq: int=500) -> Dict[str, Any]:\n\n self.model.train()\n with torch.no_grad():\n for metric in self._metrics.values():\n metric.reset()\n\n step, epoch, state_dict = 0, 0, {}\n \n try:\n while step < n_steps:\n start = time.time()\n epoch += 1\n print(f'======================== Epoch {epoch} ========================')\n for minibatch in train_data:\n state_dict = train_step(minibatch, state_dict)\n\n with torch.no_grad():\n for metric in self._metrics.values():\n metric.accumulate(**state_dict)\n\n # Report Metrics\n if step % metric_freq == 0:\n print(f'Step {step:3d}', end=' ')\n self._steps.append(step)\n for name, metric in self._metrics.items():\n val = metric.compute_and_reset()\n self._log_scalar(name, val.item(), step, self._metric_values)\n print()\n \n # Log Snapshot Functions to TensorBoard\n if step % snapshot_freq == 0:\n for name, snapshot_fn in self._snapshot_fns.items():\n self._tb.add_histogram(name, snapshot_fn(**state_dict), step)\n \n # Report validation Metrics\n if eval_step and step % eval_freq == 0:\n eval_start_time = time.time()\n print(f'Step {step:3d}', end=' ')\n self._eval_steps.append(step)\n for name, val in self.evaluate(eval_step, eval_data).items():\n self._log_scalar(name, val.item(), step, self._eval_metric_values)\n print(f'Eval time: {time.time() - eval_start_time: .4f}s')\n self.model.train()\n\n step += 1\n if step >= n_steps: break \n\n print(f'Epoch Time: {time.time() - start: .4f}s')\n\n except KeyboardInterrupt:\n print(f'Training terminated by Keyboard Interrupt' )\n \n self.finish()\n print('Completed training.')\n return state_dict",
"def benchmark_forward(T, fn, *inputs, desc='', verbose=True, **kwinputs):\n if verbose:\n print(desc, '- Forward pass')\n t = B.Timer(\n stmt='fn(*inputs, **kwinputs)',\n globals={'fn': fn, 'inputs': inputs, 'kwinputs': kwinputs},\n num_threads=torch.get_num_threads(),\n )\n m = t.timeit(T)\n if verbose:\n print(m)\n return t, m",
"def log(test_env, iteration, step_idx, total_reward):\n time.sleep(.3)\n print()\n print(f\"Training Episodes: {iteration}\")\n test_env.colored_print()\n print(f\"Step: {step_idx}\")\n print(f\"Return: {total_reward}\")",
"def progressCallback(progress):\n print 'Progress: %f'%progress",
"def train(\n model, epoch, dataloader, criterion, optimizer, scheduler=None, mixup=False, alpha=0.4,\n logging_frequency=50\n):\n model.train()\n total, total_loss, total_correct = 0, 0., 0.\n\n for i, (x, y) in enumerate(dataloader):\n if torch.cuda.is_available():\n x, y = x.cuda(), y.cuda()\n mixed_x, y_a, y_b, lam, mixup_criterion, mixup_acc = mixup_data(\n x, y, criterion, alpha=alpha if mixup else 0.0\n )\n optimizer.zero_grad()\n output = model(mixed_x)\n prediction = torch.argmax(output, -1)\n loss = mixup_criterion(output)\n total_loss += loss.item() * len(y)\n total_correct += mixup_acc(prediction)\n total += len(y)\n loss.backward()\n optimizer.step()\n if scheduler is not None:\n scheduler.step()\n if i % logging_frequency == 0 and i > 0:\n \"\"\" TODO:\n Add Tensorboard functionality here - mainly writer.add_scalar for\n overall loss, accuracy (i.e. over all epochs).\n \"\"\"\n log.debug(\n \"[Epoch %d, Iteration %d / %d] Training Loss: %.5f, \"\n \"Training Accuracy: %.5f [Projected Accuracy: %.5f]\"\n % (\n epoch,\n i,\n len(dataloader),\n total_loss / total,\n total_correct / len(dataloader.dataset),\n (total_correct / len(dataloader.dataset)) / (i / len(dataloader))\n )\n )\n final_loss, final_acc = total_loss / total, total_correct / total\n log.info(\n \"Reporting %.5f training loss, %.5f training accuracy for epoch %d.\" %\n (final_loss, final_acc, epoch)\n )\n return final_loss, final_acc",
"def eval_step(self, iterator):\n pass",
"def progress(transactionId, function):\n _log.info(\"%s PROGRESS %s\" % (transactionId.hex, function))",
"def on_complete(self, objfuns, parameters):\n self.numeval += np.size(objfuns, 0)\n params = zip(*parameters)[0]\n simids = zip(*parameters)[1]\n iterids = zip(*parameters)[2]\n\n # Record calculated evaluations into a history list\n for indx, item in enumerate(objfuns):\n self.log_completion(item, params[indx], simids[indx], iterids[indx])\n self.fhat.add_point(params[indx], item)\n if item < self.fbest:\n self.xbest = params[indx]\n self.fbest = item\n if self.fbest < self.fbest_global:\n self.fbest_global = self.fbest\n self.xbest_global = self.xbest\n\n print (\"Iteration: %s previous fbest: %s new fbest: %s \"%(iterids[-1], self.fbest_old, self.fbest))",
"def predict_model(model: nn.Module, dataset: Dataset, batch_size: int, attack_function: Union[Callable, None] = None,\n attack_args: Union[Callable, None] = None) -> float:\n if attack_args is None:\n attack_args = {}\n test_loader = DataLoader(dataset, batch_size=batch_size, shuffle=False)\n num_batches = int(torch.ceil(torch.tensor(len(dataset) / batch_size)).item())\n predictions = []\n targets = []\n for x, y in tqdm(iter(test_loader), total=num_batches):\n ##########################################################\n # YOUR CODE HERE\n ...\n ##########################################################\n predictions = torch.cat(predictions)\n targets = torch.cat(targets)\n accuracy = (predictions == targets).float().mean().item()\n return accuracy",
"def _forward_step(self, inputs: list):\n self._model.eval()\n with torch.no_grad():\n return self._model(inputs)",
"def pytorch_profiler(T, fn, *inputs):\n\n with torch.profiler.profile(\n activities=[\n torch.profiler.ProfilerActivity.CPU,\n torch.profiler.ProfilerActivity.CUDA,\n ],\n record_shapes=True,\n profile_memory=True,\n with_stack=True,\n ) as p:\n # benchmark_forward(T, fn, *inputs)\n fn(*inputs)\n\n print(p.key_averages().table(\n sort_by=\"self_cuda_time_total\", row_limit=-1))",
"def eval_input_fn():\n dataset = tf.data.TFRecordDataset(\"test\")\n dataset = dataset.map(_parse_function)\n dataset = dataset.batch(32)\n return dataset.make_one_shot_iterator().get_next()",
"def train(self, training_data, n_iter, eta, test_data = None,\r\n mini_batch_size = 1):\r\n training_data_size = len(training_data)\r\n \r\n if test_data:\r\n self.progress = []\r\n self.progress.append(self.test(test_data))\r\n print(self.progress[-1])\r\n \r\n for _ in range(n_iter):\r\n for i in range(0,training_data_size-mini_batch_size+1,\r\n mini_batch_size):\r\n self.backpropagate_mini_batch(training_data[i:i+mini_batch_size],\r\n mini_batch_size, eta)\r\n random.shuffle(training_data)\r\n if test_data:\r\n self.progress.append(self.test(test_data))\r\n print(self.progress[-1])",
"def run_forward_solves(self, params):\n\n print('Running forward solves')\n self.make_forward_sim(params)\n iter = self.optimizer.iteration if self.store_all_simulations else 0\n self.sim.run(name = 'forward', iter = iter)\n \n get_eps = True\n get_D = not self.use_deps\n nointerpolation = not self.geometry.use_interpolation()\n \n self.forward_fields = get_fields(self.sim.fdtd,\n monitor_name = 'opt_fields',\n field_result_name = 'forward_fields',\n get_eps = get_eps,\n get_D = get_D,\n get_H = False,\n nointerpolation = nointerpolation,\n unfold_symmetry = self.unfold_symmetry)\n fom = self.fom.get_fom(self.sim)\n\n if self.store_all_simulations:\n self.sim.remove_data_and_save() #< Remove the data from the file to save disk space. TODO: Make optional?\n\n self.fomHist.append(fom)\n print('FOM = {}'.format(fom))\n return fom",
"def eval_loop(\n cavity_model_net,\n dataloader_val,\n loss_function,\n):\n # Eval loop. Due to memory, we don't pass the whole eval set to the model\n labels_true_val = []\n labels_pred_val = []\n loss_batch_list_val = []\n cavity_model_net.eval()\n\n for batch_x_val, batch_y_val in dataloader_val:\n batch_y_pred_val = cavity_model_net(batch_x_val)\n\n loss_batch_val = loss_function(\n batch_y_pred_val, torch.argmax(batch_y_val, dim=-1)\n )\n loss_batch_list_val.append(loss_batch_val.detach().cpu().item())\n\n labels_true_val.append(torch.argmax(batch_y_val, dim=-1).detach().cpu().numpy())\n labels_pred_val.append(\n torch.argmax(batch_y_pred_val, dim=-1).detach().cpu().numpy()\n )\n\n acc_val = np.mean(\n (np.reshape(labels_true_val, -1) == np.reshape(labels_pred_val, -1))\n )\n loss_val = np.mean(loss_batch_list_val)\n\n return acc_val, loss_val",
"def _functional_test(\n preds: torch.Tensor,\n target: torch.Tensor,\n metric_functional: Callable,\n sk_metric: Callable,\n metric_args: dict = {},\n atol: float = 1e-8,\n):\n metric = partial(metric_functional, **metric_args)\n\n for i in range(NUM_BATCHES):\n lightning_result = metric(preds[i], target[i])\n sk_result = sk_metric(preds[i], target[i])\n\n # assert its the same\n assert np.allclose(lightning_result.numpy(), sk_result, atol=atol)",
"def test_forward_consistency_with_functional(self):\n if not isinstance(self.instance, FunctionalInteraction):\n self.skipTest(\"Not a functional interaction\")\n\n # set in eval mode (otherwise there are non-deterministic factors like Dropout\n self.instance.eval()\n for hs, rs, ts in self._get_test_shapes():\n h, r, t = self._get_hrt(hs, rs, ts)\n scores = self.instance(h=h, r=r, t=t)\n kwargs = self.instance._prepare_for_functional(h=h, r=r, t=t)\n scores_f = self.cls.func(**kwargs)\n assert torch.allclose(scores, scores_f)",
"def train_forecaster(self, forecaster, data):\n forecaster.fit(data)\n print(\"Fitted {} on toy data\".format(forecaster.__class__.__name__))\n visualize_predictions_quick(forecaster, src_dir + \"/../plots/tests/{}\".format(forecaster.__class__.__name__), store_id=1)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Compute an aggregate embedding vector for an input str or iterable of str
|
def transform_sentence(self, text: Union[Iterable, str]) -> np.array:
def preprocess_text(raw_text: str) -> List[str]:
""" Prepare text for the model, excluding unknown words"""
if not isinstance(raw_text, list):
if not isinstance(raw_text, str):
raise TypeError('Input should be a str or a list of str, got ' + str(type(raw_text)))
raw_text = raw_text.split()
return list(filter(lambda x: x in self.model.vocab, raw_text))
tokens = preprocess_text(text)
if not tokens:
return np.zeros(self.model.vector_size)
if self.aggregation == 'average':
text_vector = np.mean(self.model[tokens], axis=0)
elif self.aggregation == 'sum':
text_vector = np.sum(self.model[tokens], axis=0)
elif self.aggregation == 'minmax':
maxi = np.max(self.model[tokens], axis=0)
mini = np.min(self.model[tokens], axis=0)
text_vector = np.concatenate([maxi, mini])
else:
raise ValueError('Unknown embeddings aggregation mode: ' + self.aggregation)
return text_vector
|
[
"def words_embedding(words: list, glove):\n\n word_embeddings = map(partial(get_word_vec, glove=glove), words)\n concat_words_embedding = np.concatenate(list(word_embeddings))\n return concat_words_embedding",
"def compute_vector(word, model):\n return sum([model.wv.get_vector(x) for x in [word[i:i + 3] for i in range(len(word) - 2)]])",
"def _vectorize_and_add(self, text):\n words = text.split()\n sum = np.zeros(self.word_model.vector_size)\n for w in words:\n try:\n v = self.word_model[w]\n sum += v\n except:\n pass\n return sum",
"def central_embedding(self, tokens):\n return np.average( [self.word2vec( i ) for i in tokens], axis=0 )",
"def sentence_to_avg(word_list, word_to_vec_map):\n \n # Initialize the average word vector, should have the same shape as your word vectors.\n shape = np.shape(50,)\n \n avg = np.zeros(shape)\n\n \n total = 0\n unknown_counter = 0\n for w in word_list:\n try:\n total += word_to_vec_map[w]\n except:\n unknown_counter += 1\n \n avg = total / len(word_list) - unknown_counter\n \n \n return avg",
"def avg_word_vectors(wordlist,size): \n sumvec=np.zeros(shape=(1,size))\n wordcnt=0\n for w in wordlist:\n if w in model_w2v:\n sumvec += model_w2v[w]\n wordcnt +=1\n \n if wordcnt ==0:\n return sumvec\n else:\n return sumvec / wordcnt",
"def vectorizeSentence(self, sentence):\n embeddedSentence = []\n vectorSize = self.model.vector_size\n\n for word in sentence:\n embedding = np.zeros(vectorSize)\n if(word == \"[None]\"):\n embedding = np.zeros(vectorSize)\n else:\n if(word in self.model):\n embedding = self.model[word]\n embedding=np.array(embedding)\n else:\n embedding=np.zeros(vectorSize)\n\n embeddedSentence += [embedding]\n\n return embeddedSentence",
"def infer_vector(self, text):\n raise NotImplementedError",
"def get_vector(word):\n return spacy_dictionary(word).vector",
"def buildWordVector(text, size, model, google):\n vec = np.zeros(size).reshape((1, size))\n count = 0.\n for word in text:\n try:\n if not google:\n vec += model.wv.__getitem__(word).reshape((1, size))\n else:\n vec += model.__getitem__(word).reshape((1, size))\n count += 1.\n except KeyError:\n continue\n if count != 0:\n vec /= count\n return vec",
"def words_avg_embedding(words: list, glove):\n\n word_embeddings = map(partial(get_word_vec, glove=glove), words)\n sum_words_embedding = reduce(np.add, word_embeddings)\n return sum_words_embedding / len(words)",
"def mean_terms_vector(sentence: str):\n vectors = list(terms_vectors(sentence.split()))\n # return np.mean(vectors, axis=0) if vectors else 0\n return np.mean(vectors, axis=0) if vectors else np.zeros(300, dtype=np.float32)",
"def transform(sentences):\n vec = np.array([np.mean([model[w] if (w in model.vocab) else zeros \\\n for w in sentence], axis=0) for sentence in sentences])\n return vec",
"def get_mean_word_vector(self, docs): \n n = len(docs)\n X = np.empty([n, self.word_vec_len])\n doc_vec = np.zeros(self.word_vec_len)\n for idx, doc in enumerate(docs):\n doc_vec = reduce(lambda x, y: x+y, [token.vector for token in self.nlp(doc)])\n doc_vec /= n \n X[idx, :] = doc_vec\n return X",
"def createEmbeddingFromText(input_text, embedding_model, weight_dictionary):\n ## Preprocess input text and turn into a bag of words\n processed_input_text = preprocess(input_text, lemmatize = True, remove_numbers = True, min_word_len = 1)\n processed_input_words = processed_input_text.split(\" \")\n\n ## Get weights for all available words in weight_dictionary\n processed_word_weights = {w:weight_dictionary[w] for w in processed_input_words if w in weight_dictionary.keys()}\n\n ## If no weights exist for all words in the input text, use a strict averaging\n if processed_word_weights == {}:\n processed_word_weights = {w:1 for w in processed_input_words}\n\n ## In the event that a word does not have a weight, the mean weight across all words that\n ## have weights will be used. If no words have weights, then the mean weight across all \n ## available weights is used\n try:\n mean_weight = np.mean(list(processed_word_weights.values()))\n except:\n mean_weight = np.mean(list(weight_dictionary.values()))\n\n ## Assign mean weight to all words missing one\n for word in [w for w in processed_input_words if w not in processed_word_weights.keys()]:\n processed_word_weights[word] = mean_weight\n\n ## Make an embedding for each word and multiply by the weight\n embeddings = []\n total_weight = 0.0\n for w in processed_input_words:\n sentence_ = Sentence(w)\n embedding_model.embed(sentence_)\n embeddings.append(processed_word_weights[w]*sentence_[0].embedding.numpy().astype(float))\n total_weight += processed_word_weights[w]\n embeddings = np.array(embeddings)\n \n return np.sum(embeddings, axis = 0)/total_weight",
"def get_embedding(word, nlp):\n token = nlp(word)\n return token.vector",
"def string_vectorisation(string, lexicon):\n\twords = word_tokenize(string.lower())\n\twords = [WordNetLemmatizer().lemmatize(word) for word in words]\n\tvector = np.zeros(len(lexicon))\n\n\tfor word in words:\n\t\tif word.lower() in lexicon:\n\t\t\tword_index = lexicon.index(word.lower())\n\t\t\tvector[word_index] += 1\n\treturn vector",
"def sentence_to_avg(sentence, word_to_vec_map):\n\n # Step 1: Split sentence into list of lower case words (≈ 1 line)\n words = sentence.lower().split()\n\n # Initialize the average word vector, should have the same shape as your word vectors.\n avg = np.zeros(word_to_vec_map[words[0]].shape)\n\n # Step 2: average the word vectors. You can loop over the words in the list \"words\".\n for w in words:\n avg += word_to_vec_map[w]\n\n avg = avg / len(words)\n\n\n return avg",
"def sif_embeddings(sentences, model, vocab_freq, alpha=1e-3):\n\n vlookup = vocab_freq # Gives us access to word index and count\n vectors = model # Gives us access to word vectors\n size = model.vector_size # Embedding size\n\n Z = sum(vlookup.values())\n\n output = []\n\n # Iterate all sentences\n for s in sentences:\n v = np.zeros(size, dtype=REAL) # Summary vector\n # Iterate all words\n count = 0\n for w in s.split():\n # A word must be present in the vocabulary\n if w in vectors and w in vlookup:\n v += (alpha/(alpha + (vlookup[w] / Z))) * vectors[w]\n count += 1\n if count > 0:\n v = v/count\n output.append(v)\n return np.column_stack(tuple(output)).astype(REAL)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Deletes the created test project(s) on github
|
def clean_github(self):
# set url on project to be able to delete
dbsession = db_session()
dbsession.query(Project).filter(Project.id == self.pjid) \
.first().repo_url = GITHUB_URL + "/" + GITHUB_USER + "/" + REMOTE_REPO_NAME
dbsession.commit()
# Clean github repository
arg = {'project_id': self.pjid, 'repo_name': REMOTE_REPO_NAME}
self.app.delete("/" + constants.WORKSPACES + "/" + self.wsid + "/" + constants.GIT + "/delete",
headers={'Content-Type': 'application/json'},
data=json.dumps(arg))
|
[
"def test_delete_projects(self, logger, rbac_test_data, rw_conman_proxy):\n projects_test_data = rbac_test_data['projects']\n\n # Delete the projects\n for project in projects_test_data:\n logger.debug('Deleting project {}'.format(project))\n rift.auto.mano.delete_project(rw_conman_proxy, project)",
"def test_delete_multiple_projects_success(\n self, mock_config_load, mock_kfp_client, mock_custom_objects_api\n ):\n project_id_1 = util.MOCK_UUID_1\n project_id_2 = util.MOCK_UUID_2\n\n rv = TEST_CLIENT.post(\n \"/projects/deleteprojects\", json=[project_id_1, project_id_2]\n )\n result = rv.json()\n\n expected = {\"message\": \"Successfully removed projects\"}\n self.assertDictEqual(expected, result)\n self.assertEqual(rv.status_code, 200)\n\n mock_custom_objects_api.assert_any_call()\n mock_kfp_client.assert_any_call(host=\"http://ml-pipeline.kubeflow:8888\")\n mock_config_load.assert_any_call()",
"def test_api_v3_projects_project_public_id_delete(self):\n pass",
"def delete(repo):\n click.echo('Destroying repo %s' % repo.home)\n click.echo('Deleted!')",
"def test_remove_project_for_task(self):\n pass",
"def test_delete_stack_domain_project(self):\r\n\r\n self._stub_domain_admin_client()\r\n self.mock_admin_client.projects = self.m.CreateMockAnything()\r\n self.mock_admin_client.projects.delete(project='aprojectid')\r\n self.mock_admin_client.projects.delete(project='aprojectid').AndRaise(\r\n kc_exception.NotFound)\r\n self.m.ReplayAll()\r\n\r\n ctx = utils.dummy_context()\r\n ctx.trust_id = None\r\n heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)\r\n heat_ks_client.delete_stack_domain_project(project_id='aprojectid')\r\n # Second delete will raise ignored NotFound\r\n heat_ks_client.delete_stack_domain_project(project_id='aprojectid')",
"def delete_project(self):\n try:\n os.remove(self.PROJECT_FILE)\n except FileNotFoundError:\n print(\n '{warning}No project exists.{reset}'\n .format(\n warning=Fore.WARNING,\n reset=Style.RESET_ALL,\n )\n )\n sys.exit()\n except:\n print(\n '{fail}Unable to delete the project.{reset}'\n .format(\n fail=Fore.FAIL,\n reset=Style.RESET_ALL,\n )\n )\n sys.exit()",
"def test_delete(self):\n\n n = ProjectCode.objects.count()\n\n # Get the first project code\n code = ProjectCode.objects.first()\n\n # Delete it\n self.delete(\n reverse('api-project-code-detail', kwargs={'pk': code.pk}),\n expected_code=204\n )\n\n # Check it is gone\n self.assertEqual(ProjectCode.objects.count(), n - 1)",
"def test_superuser_can_delete_project(self):\n self.assertEqual(Project.objects.count(), 2)\n self.client.login(username=self.super_username, password=self.password)\n url = '{}{}/'.format(self.url, self.project2.id)\n response = self.client.delete(url, format='json')\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(Project.objects.count(), 1)",
"def test_delete_stack_domain_project(self):\n\n self._stub_domain_admin_client()\n self.mock_admin_client.projects = self.m.CreateMockAnything()\n dummy = self.m.CreateMockAnything()\n dummy.id = 'aproject123'\n dummy.domain_id = 'adomain123'\n dummy.delete().AndReturn(None)\n self.mock_admin_client.projects.get(project='aprojectid').AndReturn(\n dummy)\n self.m.ReplayAll()\n\n ctx = utils.dummy_context()\n ctx.trust_id = None\n heat_ks_client = heat_keystoneclient.KeystoneClient(ctx)\n heat_ks_client.delete_stack_domain_project(project_id='aprojectid')",
"def test_delete_project_success(\n self,\n mock_config_load,\n mock_kfp_client,\n mock_custom_objects_api,\n ):\n project_id = util.MOCK_UUID_1\n\n rv = TEST_CLIENT.delete(f\"/projects/{project_id}\")\n result = rv.json()\n\n expected = {\"message\": \"Project deleted\"}\n self.assertDictEqual(expected, result)\n\n mock_custom_objects_api.assert_any_call()\n mock_kfp_client.assert_any_call(host=\"http://ml-pipeline.kubeflow:8888\")\n mock_config_load.assert_any_call()",
"def test_project_create_list_delete(self):\n\n print(\"Run 'test_project_create_list_delete'\")\n\n perunid = self.__uuid()\n\n denbi_project = self.ks.projects_create(perunid)\n\n # check internal project list\n denbi_project_map = self.ks.denbi_project_map\n self.assertTrue(perunid in denbi_project_map,\n \"Project with PerunId '\" + perunid + \"' does not exists in local project map.\")\n\n # check keystone project list\n denbi_project_map = self.ks.projects_map()\n self.assertTrue(perunid in denbi_project_map, \"Project with PerunId '\" + perunid + \"' does not exists.\")\n\n # delete previous created project\n self.ks.projects_delete(perunid)\n\n # project should still exists but marked as deleted\n self.assertTrue(perunid in denbi_project_map, \"Project with PerunId '\" + perunid + \"' does not exists.\")\n tmp = denbi_project_map[perunid]\n self.assertTrue(tmp['scratched'],\n \"Project with PerunId '\" + perunid + \"' not marked as deleted (but should be).\")\n\n # terminate previous marked project\n self.ks.projects_terminate(denbi_project['perun_id'])\n\n # check internal project list\n denbi_project_map = self.ks.denbi_project_map\n self.assertFalse(perunid in denbi_project_map,\n \"Project with PerunId '\" + perunid + \"' does exists in local project map.\")\n\n # check keystone project list\n denbi_project_map = self.ks.projects_map()\n self.assertFalse(perunid in denbi_project_map, \"Project with PerunId '\" + perunid + \"' does exists.\")",
"def project_function(testdir_session):\n project = Project(testdir_session)\n yield project\n project.remove()",
"def delete_project(conn, id):\n sql = 'DELETE FROM projects WHERE id=?'\n cur = conn.cursor()\n cur.execute(sql, (id,))\n conn.commit()",
"def test_delete(self):\r\n p = Project(slug=\"rm\")\r\n p.name = \"RM me\"\r\n p.source_language = self.language_en\r\n p.save()\r\n Gtranslate.objects.create(project=p)\r\n p.delete()\r\n self.assertEquals(Gtranslate.objects.all().count(), 0)",
"def test_Project_delete(self):\n my_model = ProjectF.create()\n\n my_model.delete()\n\n # check if deleted\n self.assertTrue(my_model.pk is None)",
"def delete_project(proj):\n remove(join(mcdatapath(), proj.name+'.db'))",
"def test_repo_delete_git_hook(self):\n pass",
"def test_delete_multiple_projects_at_least_one_project_error(self):\n rv = TEST_CLIENT.post(\"/projects/deleteprojects\", json=[])\n result = rv.json()\n\n expected = {\n \"message\": \"inform at least one project\",\n \"code\": \"MissingRequiredProjectId\",\n }\n self.assertDictEqual(expected, result)\n self.assertEqual(rv.status_code, 400)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create a stack of orthographic plots with optional overlays. Use mask_image and/or threshold_image to preprocess images to be be overlaid and display the overlays in a given range. See the wiki examples. Example >>> import ants >>> mni = ants.image_read(ants.get_data('mni')) >>> ch2 = ants.image_read(ants.get_data('ch2')) >>> ants.plot_ortho_stack([mni,mni,mni])
|
def plot_ortho_stack(
images,
overlays=None,
reorient=True,
# xyz arguments
xyz=None,
xyz_lines=False,
xyz_color="red",
xyz_alpha=0.6,
xyz_linewidth=2,
xyz_pad=5,
# base image arguments
cmap="Greys_r",
alpha=1,
# overlay arguments
overlay_cmap="jet",
overlay_alpha=0.9,
# background arguments
black_bg=True,
bg_thresh_quant=0.01,
bg_val_quant=0.99,
# scale/crop/domain arguments
crop=False,
scale=False,
domain_image_map=None,
# title arguments
title=None,
titlefontsize=24,
title_dx=0,
title_dy=0,
# 4th panel text arguemnts
text=None,
textfontsize=24,
textfontcolor="white",
text_dx=0,
text_dy=0,
# save & size arguments
filename=None,
dpi=500,
figsize=1.0,
colpad=0,
rowpad=0,
transpose=False,
transparent=True,
orient_labels=True,
):
def mirror_matrix(x):
return x[::-1, :]
def rotate270_matrix(x):
return mirror_matrix(x.T)
def reorient_slice(x, axis):
return rotate270_matrix(x)
# need this hack because of a weird NaN warning from matplotlib with overlays
warnings.simplefilter("ignore")
n_images = len(images)
# handle `image` argument
for i in range(n_images):
if isinstance(images[i], str):
images[i] = iio2.image_read(images[i])
if not isinstance(images[i], iio.ANTsImage):
raise ValueError("image argument must be an ANTsImage")
if images[i].dimension != 3:
raise ValueError("Input image must have 3 dimensions!")
if overlays is None:
overlays = [None] * n_images
# handle `overlay` argument
for i in range(n_images):
if overlays[i] is not None:
if isinstance(overlays[i], str):
overlays[i] = iio2.image_read(overlays[i])
if not isinstance(overlays[i], iio.ANTsImage):
raise ValueError("overlay argument must be an ANTsImage")
if overlays[i].components > 1:
raise ValueError("overlays[i] cannot have more than one voxel component")
if overlays[i].dimension != 3:
raise ValueError("Overlay image must have 3 dimensions!")
if not iio.image_physical_space_consistency(images[i], overlays[i]):
overlays[i] = reg.resample_image_to_target(
overlays[i], images[i], interp_type="linear"
)
for i in range(1, n_images):
if not iio.image_physical_space_consistency(images[0], images[i]):
images[i] = reg.resample_image_to_target(
images[0], images[i], interp_type="linear"
)
# reorient images
if reorient != False:
if reorient == True:
reorient = "RPI"
for i in range(n_images):
images[i] = images[i].reorient_image2(reorient)
if overlays[i] is not None:
overlays[i] = overlays[i].reorient_image2(reorient)
# handle `slices` argument
if xyz is None:
xyz = [int(s / 2) for s in images[0].shape]
for i in range(3):
if xyz[i] is None:
xyz[i] = int(images[0].shape[i] / 2)
# resample image if spacing is very unbalanced
spacing = [s for i, s in enumerate(images[0].spacing)]
if (max(spacing) / min(spacing)) > 3.0:
new_spacing = (1, 1, 1)
for i in range(n_images):
images[i] = images[i].resample_image(tuple(new_spacing))
if overlays[i] is not None:
overlays[i] = overlays[i].resample_image(tuple(new_spacing))
xyz = [
int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)
]
# potentially crop image
if crop:
for i in range(n_images):
plotmask = images[i].get_mask(cleanup=0)
if plotmask.max() == 0:
plotmask += 1
images[i] = images[i].crop_image(plotmask)
if overlays[i] is not None:
overlays[i] = overlays[i].crop_image(plotmask)
# pad images
for i in range(n_images):
if i == 0:
images[i], lowpad, uppad = images[i].pad_image(return_padvals=True)
else:
images[i] = images[i].pad_image()
if overlays[i] is not None:
overlays[i] = overlays[i].pad_image()
xyz = [v + l for v, l in zip(xyz, lowpad)]
# handle `domain_image_map` argument
if domain_image_map is not None:
if isinstance(domain_image_map, iio.ANTsImage):
tx = tio2.new_ants_transform(
precision="float", transform_type="AffineTransform", dimension=3
)
for i in range(n_images):
images[i] = tio.apply_ants_transform_to_image(
tx, images[i], domain_image_map
)
if overlays[i] is not None:
overlays[i] = tio.apply_ants_transform_to_image(
tx, overlays[i], domain_image_map, interpolation="linear"
)
elif isinstance(domain_image_map, (list, tuple)):
# expect an image and transformation
if len(domain_image_map) != 2:
raise ValueError("domain_image_map list or tuple must have length == 2")
dimg = domain_image_map[0]
if not isinstance(dimg, iio.ANTsImage):
raise ValueError("domain_image_map first entry should be ANTsImage")
tx = domain_image_map[1]
for i in range(n_images):
images[i] = reg.apply_transforms(dimg, images[i], transform_list=tx)
if overlays[i] is not None:
overlays[i] = reg.apply_transforms(
dimg, overlays[i], transform_list=tx, interpolator="linear"
)
# potentially find dynamic range
if scale == True:
vmins = []
vmaxs = []
for i in range(n_images):
vmin, vmax = images[i].quantile((0.05, 0.95))
vmins.append(vmin)
vmaxs.append(vmax)
elif isinstance(scale, (list, tuple)):
if len(scale) != 2:
raise ValueError(
"scale argument must be boolean or list/tuple with two values"
)
vmins = []
vmaxs = []
for i in range(n_images):
vmin, vmax = images[i].quantile(scale)
vmins.append(vmin)
vmaxs.append(vmax)
else:
vmin = None
vmax = None
if not transpose:
nrow = n_images
ncol = 3
else:
nrow = 3
ncol = n_images
fig = plt.figure(figsize=((ncol + 1) * 2.5 * figsize, (nrow + 1) * 2.5 * figsize))
if title is not None:
basey = 0.93
basex = 0.5
fig.suptitle(
title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy
)
if (colpad > 0) and (rowpad > 0):
bothgridpad = max(colpad, rowpad)
colpad = 0
rowpad = 0
else:
bothgridpad = 0.0
gs = gridspec.GridSpec(
nrow,
ncol,
wspace=bothgridpad,
hspace=0.0,
top=1.0 - 0.5 / (nrow + 1),
bottom=0.5 / (nrow + 1) + colpad,
left=0.5 / (ncol + 1) + rowpad,
right=1 - 0.5 / (ncol + 1),
)
# pad image to have isotropic array dimensions
vminols=[]
vmaxols=[]
for i in range(n_images):
images[i] = images[i].numpy()
if overlays[i] is not None:
vminols.append( overlays[i].min() )
vmaxols.append( overlays[i].max() )
overlays[i] = overlays[i].numpy()
if overlays[i].dtype not in ["uint8", "uint32"]:
overlays[i][np.abs(overlays[i]) == 0] = np.nan
####################
####################
for i in range(n_images):
yz_slice = reorient_slice(images[i][xyz[0], :, :], 0)
if not transpose:
ax = plt.subplot(gs[i, 0])
else:
ax = plt.subplot(gs[0, i])
ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlays[i] is not None:
yz_overlay = reorient_slice(overlays[i][xyz[0], :, :], 0)
ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap,
vmin=vminols[i], vmax=vmaxols[i])
if xyz_lines:
# add lines
l = mlines.Line2D(
[yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],
[xyz_pad, yz_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, yz_slice.shape[1] - xyz_pad],
[yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
if orient_labels:
ax.text(
0.5,
0.98,
"S",
horizontalalignment="center",
verticalalignment="top",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.5,
0.02,
"I",
horizontalalignment="center",
verticalalignment="bottom",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.98,
0.5,
"A",
horizontalalignment="right",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.02,
0.5,
"P",
horizontalalignment="left",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.axis("off")
####################
####################
xz_slice = reorient_slice(images[i][:, xyz[1], :], 1)
if not transpose:
ax = plt.subplot(gs[i, 1])
else:
ax = plt.subplot(gs[1, i])
ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlays[i] is not None:
xz_overlay = reorient_slice(overlays[i][:, xyz[1], :], 1)
ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap,
vmin=vminols[i], vmax=vmaxols[i])
if xyz_lines:
# add lines
l = mlines.Line2D(
[xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],
[xyz_pad, xz_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xz_slice.shape[1] - xyz_pad],
[xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
if orient_labels:
ax.text(
0.5,
0.98,
"A",
horizontalalignment="center",
verticalalignment="top",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.5,
0.02,
"P",
horizontalalignment="center",
verticalalignment="bottom",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.98,
0.5,
"L",
horizontalalignment="right",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.02,
0.5,
"R",
horizontalalignment="left",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.axis("off")
####################
####################
xy_slice = reorient_slice(images[i][:, :, xyz[2]], 2)
if not transpose:
ax = plt.subplot(gs[i, 2])
else:
ax = plt.subplot(gs[2, i])
ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlays[i] is not None:
xy_overlay = reorient_slice(overlays[i][:, :, xyz[2]], 2)
ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap,
vmin=vminols[i], vmax=vmaxols[i])
if xyz_lines:
# add lines
l = mlines.Line2D(
[xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],
[xyz_pad, xy_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xy_slice.shape[1] - xyz_pad],
[xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
if orient_labels:
ax.text(
0.5,
0.98,
"A",
horizontalalignment="center",
verticalalignment="top",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.5,
0.02,
"P",
horizontalalignment="center",
verticalalignment="bottom",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.98,
0.5,
"L",
horizontalalignment="right",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.02,
0.5,
"R",
horizontalalignment="left",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.axis("off")
####################
####################
if filename is not None:
plt.savefig(filename, dpi=dpi, transparent=transparent)
plt.close(fig)
else:
plt.show()
# turn warnings back to default
warnings.simplefilter("default")
|
[
"def plot_ortho(\n image,\n overlay=None,\n reorient=True,\n blend=False,\n # xyz arguments\n xyz=None,\n xyz_lines=True,\n xyz_color=\"red\",\n xyz_alpha=0.6,\n xyz_linewidth=2,\n xyz_pad=5,\n orient_labels=True,\n # base image arguments\n alpha=1,\n cmap=\"Greys_r\",\n # overlay arguments\n overlay_cmap=\"jet\",\n overlay_alpha=0.9,\n cbar=False,\n cbar_length=0.8,\n cbar_dx=0.0,\n cbar_vertical=True,\n # background arguments\n black_bg=True,\n bg_thresh_quant=0.01,\n bg_val_quant=0.99,\n # scale/crop/domain arguments\n crop=False,\n scale=False,\n domain_image_map=None,\n # title arguments\n title=None,\n titlefontsize=24,\n title_dx=0,\n title_dy=0,\n # 4th panel text arguemnts\n text=None,\n textfontsize=24,\n textfontcolor=\"white\",\n text_dx=0,\n text_dy=0,\n # save & size arguments\n filename=None,\n dpi=500,\n figsize=1.0,\n flat=False,\n transparent=True,\n resample=False,\n):\n\n def mirror_matrix(x):\n return x[::-1, :]\n\n def rotate270_matrix(x):\n return mirror_matrix(x.T)\n\n def reorient_slice(x, axis):\n return rotate270_matrix(x)\n\n # need this hack because of a weird NaN warning from matplotlib with overlays\n warnings.simplefilter(\"ignore\")\n\n # handle `image` argument\n if isinstance(image, str):\n image = iio2.image_read(image)\n if not isinstance(image, iio.ANTsImage):\n raise ValueError(\"image argument must be an ANTsImage\")\n if image.dimension != 3:\n raise ValueError(\"Input image must have 3 dimensions!\")\n\n # handle `overlay` argument\n if overlay is not None:\n vminol = overlay.min()\n vmaxol = overlay.max()\n if isinstance(overlay, str):\n overlay = iio2.image_read(overlay)\n if not isinstance(overlay, iio.ANTsImage):\n raise ValueError(\"overlay argument must be an ANTsImage\")\n if overlay.components > 1:\n raise ValueError(\"overlay cannot have more than one voxel component\")\n if overlay.dimension != 3:\n raise ValueError(\"Overlay image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(image, overlay):\n overlay = reg.resample_image_to_target(overlay, image, interp_type=\"linear\")\n\n if blend:\n if alpha == 1:\n alpha = 0.5\n image = image * alpha + overlay * (1 - alpha)\n overlay = None\n alpha = 1.0\n\n if image.pixeltype not in {\"float\", \"double\"}:\n scale = False # turn off scaling if image is discrete\n\n # reorient images\n if reorient != False:\n if reorient == True:\n reorient = \"RPI\"\n image = image.reorient_image2(\"RPI\")\n if overlay is not None:\n overlay = overlay.reorient_image2(\"RPI\")\n\n # handle `slices` argument\n if xyz is None:\n xyz = [int(s / 2) for s in image.shape]\n for i in range(3):\n if xyz[i] is None:\n xyz[i] = int(image.shape[i] / 2)\n\n # resample image if spacing is very unbalanced\n spacing = [s for i, s in enumerate(image.spacing)]\n if (max(spacing) / min(spacing)) > 3.0 and resample:\n new_spacing = (1, 1, 1)\n image = image.resample_image(tuple(new_spacing))\n if overlay is not None:\n overlay = overlay.resample_image(tuple(new_spacing))\n xyz = [\n int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)\n ]\n\n # potentially crop image\n if crop:\n plotmask = image.get_mask(cleanup=0)\n if plotmask.max() == 0:\n plotmask += 1\n image = image.crop_image(plotmask)\n if overlay is not None:\n overlay = overlay.crop_image(plotmask)\n\n # pad images\n image, lowpad, uppad = image.pad_image(return_padvals=True)\n xyz = [v + l for v, l in zip(xyz, lowpad)]\n if overlay is not None:\n overlay = overlay.pad_image()\n\n # handle `domain_image_map` argument\n if domain_image_map is not None:\n if isinstance(domain_image_map, iio.ANTsImage):\n tx = tio2.new_ants_transform(\n precision=\"float\",\n transform_type=\"AffineTransform\",\n dimension=image.dimension,\n )\n image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)\n if overlay is not None:\n overlay = tio.apply_ants_transform_to_image(\n tx, overlay, domain_image_map, interpolation=\"linear\"\n )\n elif isinstance(domain_image_map, (list, tuple)):\n # expect an image and transformation\n if len(domain_image_map) != 2:\n raise ValueError(\"domain_image_map list or tuple must have length == 2\")\n\n dimg = domain_image_map[0]\n if not isinstance(dimg, iio.ANTsImage):\n raise ValueError(\"domain_image_map first entry should be ANTsImage\")\n\n tx = domain_image_map[1]\n image = reg.apply_transforms(dimg, image, transform_list=tx)\n if overlay is not None:\n overlay = reg.apply_transforms(\n dimg, overlay, transform_list=tx, interpolator=\"linear\"\n )\n\n ## single-channel images ##\n if image.components == 1:\n\n # potentially find dynamic range\n if scale == True:\n vmin, vmax = image.quantile((0.05, 0.95))\n elif isinstance(scale, (list, tuple)):\n if len(scale) != 2:\n raise ValueError(\n \"scale argument must be boolean or list/tuple with two values\"\n )\n vmin, vmax = image.quantile(scale)\n else:\n vmin = None\n vmax = None\n\n if not flat:\n nrow = 2\n ncol = 2\n else:\n nrow = 1\n ncol = 3\n\n fig = plt.figure(figsize=(9 * figsize, 9 * figsize))\n if title is not None:\n basey = 0.88 if not flat else 0.66\n basex = 0.5\n fig.suptitle(\n title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy\n )\n\n gs = gridspec.GridSpec(\n nrow,\n ncol,\n wspace=0.0,\n hspace=0.0,\n top=1.0 - 0.5 / (nrow + 1),\n bottom=0.5 / (nrow + 1),\n left=0.5 / (ncol + 1),\n right=1 - 0.5 / (ncol + 1),\n )\n\n # pad image to have isotropic array dimensions\n image = image.numpy()\n if overlay is not None:\n overlay = overlay.numpy()\n if overlay.dtype not in [\"uint8\", \"uint32\"]:\n overlay[np.abs(overlay) == 0] = np.nan\n\n yz_slice = reorient_slice(image[xyz[0], :, :], 0)\n ax = plt.subplot(gs[0, 0])\n ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n yz_overlay = reorient_slice(overlay[xyz[0], :, :], 0)\n ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol )\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],\n [xyz_pad, yz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice.shape[1] - xyz_pad],\n [yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"S\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"I\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"A\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"P\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n xz_slice = reorient_slice(image[:, xyz[1], :], 1)\n ax = plt.subplot(gs[0, 1])\n ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xz_overlay = reorient_slice(overlay[:, xyz[1], :], 1)\n ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol )\n\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],\n [xyz_pad, xz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice.shape[1] - xyz_pad],\n [xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"S\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"I\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n xy_slice = reorient_slice(image[:, :, xyz[2]], 2)\n if not flat:\n ax = plt.subplot(gs[1, 1])\n else:\n ax = plt.subplot(gs[0, 2])\n im = ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xy_overlay = reorient_slice(overlay[:, :, xyz[2]], 2)\n im = ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol)\n\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],\n [xyz_pad, xy_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice.shape[1] - xyz_pad],\n [xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"A\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"P\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n if not flat:\n # empty corner\n ax = plt.subplot(gs[1, 0])\n if text is not None:\n # add text\n left, width = 0.25, 0.5\n bottom, height = 0.25, 0.5\n right = left + width\n top = bottom + height\n ax.text(\n 0.5 * (left + right) + text_dx,\n 0.5 * (bottom + top) + text_dy,\n text,\n horizontalalignment=\"center\",\n verticalalignment=\"center\",\n fontsize=textfontsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n # ax.text(0.5, 0.5)\n ax.imshow(np.zeros(image.shape[:-1]), cmap=\"Greys_r\")\n ax.axis(\"off\")\n\n if cbar:\n cbar_start = (1 - cbar_length) / 2\n if cbar_vertical:\n cax = fig.add_axes([0.9 + cbar_dx, cbar_start, 0.03, cbar_length])\n cbar_orient = \"vertical\"\n else:\n cax = fig.add_axes([cbar_start, 0.08 + cbar_dx, cbar_length, 0.03])\n cbar_orient = \"horizontal\"\n fig.colorbar(im, cax=cax, orientation=cbar_orient)\n\n ## multi-channel images ##\n elif image.components > 1:\n raise ValueError(\"Multi-channel images not currently supported!\")\n\n if filename is not None:\n plt.savefig(filename, dpi=dpi, transparent=transparent)\n plt.close(fig)\n else:\n plt.show()\n\n # turn warnings back to default\n warnings.simplefilter(\"default\")",
"def imageStack_2_subplots(image_stack, axis=0):\n image_stack = np.rollaxis(image_stack, axis)\n N_subplots = image_stack.shape[0]\n R = math.floor(math.sqrt(N_subplots))\n C = math.ceil(N_subplots / R)\n fig, axes = plt.subplots(R, C)\n axes = axes.ravel()\n for ax, img in zip(axes, image_stack):\n ax.imshow(img)\n return fig, axes",
"def plot_ortho_double(\n image,\n image2,\n overlay=None,\n overlay2=None,\n reorient=True,\n # xyz arguments\n xyz=None,\n xyz_lines=True,\n xyz_color=\"red\",\n xyz_alpha=0.6,\n xyz_linewidth=2,\n xyz_pad=5,\n # base image arguments\n cmap=\"Greys_r\",\n alpha=1,\n cmap2=\"Greys_r\",\n alpha2=1,\n # overlay arguments\n overlay_cmap=\"jet\",\n overlay_alpha=0.9,\n overlay_cmap2=\"jet\",\n overlay_alpha2=0.9,\n # background arguments\n black_bg=True,\n bg_thresh_quant=0.01,\n bg_val_quant=0.99,\n # scale/crop/domain arguments\n crop=False,\n scale=False,\n crop2=False,\n scale2=True,\n domain_image_map=None,\n # title arguments\n title=None,\n titlefontsize=24,\n title_dx=0,\n title_dy=0,\n # 4th panel text arguemnts\n text=None,\n textfontsize=24,\n textfontcolor=\"white\",\n text_dx=0,\n text_dy=0,\n # save & size arguments\n filename=None,\n dpi=500,\n figsize=1.0,\n flat=True,\n transpose=False,\n transparent=True,\n):\n\n def mirror_matrix(x):\n return x[::-1, :]\n\n def rotate270_matrix(x):\n return mirror_matrix(x.T)\n\n def reorient_slice(x, axis):\n return rotate270_matrix(x)\n\n # need this hack because of a weird NaN warning from matplotlib with overlays\n warnings.simplefilter(\"ignore\")\n\n # handle `image` argument\n if isinstance(image, str):\n image = iio2.image_read(image)\n if not isinstance(image, iio.ANTsImage):\n raise ValueError(\"image argument must be an ANTsImage\")\n if image.dimension != 3:\n raise ValueError(\"Input image must have 3 dimensions!\")\n\n if isinstance(image2, str):\n image2 = iio2.image_read(image2)\n if not isinstance(image2, iio.ANTsImage):\n raise ValueError(\"image2 argument must be an ANTsImage\")\n if image2.dimension != 3:\n raise ValueError(\"Input image2 must have 3 dimensions!\")\n\n # handle `overlay` argument\n if overlay is not None:\n if isinstance(overlay, str):\n overlay = iio2.image_read(overlay)\n if not isinstance(overlay, iio.ANTsImage):\n raise ValueError(\"overlay argument must be an ANTsImage\")\n if overlay.components > 1:\n raise ValueError(\"overlay cannot have more than one voxel component\")\n if overlay.dimension != 3:\n raise ValueError(\"Overlay image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(image, overlay):\n overlay = reg.resample_image_to_target(overlay, image, interp_type=\"linear\")\n\n if overlay2 is not None:\n if isinstance(overlay2, str):\n overlay2 = iio2.image_read(overlay2)\n if not isinstance(overlay2, iio.ANTsImage):\n raise ValueError(\"overlay2 argument must be an ANTsImage\")\n if overlay2.components > 1:\n raise ValueError(\"overlay2 cannot have more than one voxel component\")\n if overlay2.dimension != 3:\n raise ValueError(\"Overlay2 image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(image2, overlay2):\n overlay2 = reg.resample_image_to_target(\n overlay2, image2, interp_type=\"linear\"\n )\n\n if not iio.image_physical_space_consistency(image, image2):\n image2 = reg.resample_image_to_target(image2, image, interp_type=\"linear\")\n\n if image.pixeltype not in {\"float\", \"double\"}:\n scale = False # turn off scaling if image is discrete\n\n if image2.pixeltype not in {\"float\", \"double\"}:\n scale2 = False # turn off scaling if image is discrete\n\n # reorient images\n if reorient != False:\n if reorient == True:\n reorient = \"RPI\"\n image = image.reorient_image2(reorient)\n image2 = image2.reorient_image2(reorient)\n if overlay is not None:\n overlay = overlay.reorient_image2(reorient)\n if overlay2 is not None:\n overlay2 = overlay2.reorient_image2(reorient)\n\n # handle `slices` argument\n if xyz is None:\n xyz = [int(s / 2) for s in image.shape]\n for i in range(3):\n if xyz[i] is None:\n xyz[i] = int(image.shape[i] / 2)\n\n # resample image if spacing is very unbalanced\n spacing = [s for i, s in enumerate(image.spacing)]\n if (max(spacing) / min(spacing)) > 3.0:\n new_spacing = (1, 1, 1)\n image = image.resample_image(tuple(new_spacing))\n image2 = image2.resample_image_to_target(tuple(new_spacing))\n if overlay is not None:\n overlay = overlay.resample_image(tuple(new_spacing))\n if overlay2 is not None:\n overlay2 = overlay2.resample_image(tuple(new_spacing))\n xyz = [\n int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)\n ]\n\n # pad images\n image, lowpad, uppad = image.pad_image(return_padvals=True)\n image2, lowpad2, uppad2 = image2.pad_image(return_padvals=True)\n xyz = [v + l for v, l in zip(xyz, lowpad)]\n if overlay is not None:\n overlay = overlay.pad_image()\n if overlay2 is not None:\n overlay2 = overlay2.pad_image()\n\n # handle `domain_image_map` argument\n if domain_image_map is not None:\n if isinstance(domain_image_map, iio.ANTsImage):\n tx = tio2.new_ants_transform(\n precision=\"float\",\n transform_type=\"AffineTransform\",\n dimension=image.dimension,\n )\n image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)\n image2 = tio.apply_ants_transform_to_image(tx, image2, domain_image_map)\n if overlay is not None:\n overlay = tio.apply_ants_transform_to_image(\n tx, overlay, domain_image_map, interpolation=\"linear\"\n )\n if overlay2 is not None:\n overlay2 = tio.apply_ants_transform_to_image(\n tx, overlay2, domain_image_map, interpolation=\"linear\"\n )\n elif isinstance(domain_image_map, (list, tuple)):\n # expect an image and transformation\n if len(domain_image_map) != 2:\n raise ValueError(\"domain_image_map list or tuple must have length == 2\")\n\n dimg = domain_image_map[0]\n if not isinstance(dimg, iio.ANTsImage):\n raise ValueError(\"domain_image_map first entry should be ANTsImage\")\n\n tx = domain_image_map[1]\n image = reg.apply_transforms(dimg, image, transform_list=tx)\n if overlay is not None:\n overlay = reg.apply_transforms(\n dimg, overlay, transform_list=tx, interpolator=\"linear\"\n )\n\n image2 = reg.apply_transforms(dimg, image2, transform_list=tx)\n if overlay2 is not None:\n overlay2 = reg.apply_transforms(\n dimg, overlay2, transform_list=tx, interpolator=\"linear\"\n )\n\n ## single-channel images ##\n if image.components == 1:\n\n # potentially crop image\n if crop:\n plotmask = image.get_mask(cleanup=0)\n if plotmask.max() == 0:\n plotmask += 1\n image = image.crop_image(plotmask)\n if overlay is not None:\n overlay = overlay.crop_image(plotmask)\n\n if crop2:\n plotmask2 = image2.get_mask(cleanup=0)\n if plotmask2.max() == 0:\n plotmask2 += 1\n image2 = image2.crop_image(plotmask2)\n if overlay2 is not None:\n overlay2 = overlay2.crop_image(plotmask2)\n\n # potentially find dynamic range\n if scale == True:\n vmin, vmax = image.quantile((0.05, 0.95))\n elif isinstance(scale, (list, tuple)):\n if len(scale) != 2:\n raise ValueError(\n \"scale argument must be boolean or list/tuple with two values\"\n )\n vmin, vmax = image.quantile(scale)\n else:\n vmin = None\n vmax = None\n\n if scale2 == True:\n vmin2, vmax2 = image2.quantile((0.05, 0.95))\n elif isinstance(scale2, (list, tuple)):\n if len(scale2) != 2:\n raise ValueError(\n \"scale2 argument must be boolean or list/tuple with two values\"\n )\n vmin2, vmax2 = image2.quantile(scale2)\n else:\n vmin2 = None\n vmax2 = None\n\n if not flat:\n nrow = 2\n ncol = 4\n else:\n if not transpose:\n nrow = 2\n ncol = 3\n else:\n nrow = 3\n ncol = 2\n\n fig = plt.figure(\n figsize=((ncol + 1) * 2.5 * figsize, (nrow + 1) * 2.5 * figsize)\n )\n if title is not None:\n basey = 0.88 if not flat else 0.66\n basex = 0.5\n fig.suptitle(\n title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy\n )\n\n gs = gridspec.GridSpec(\n nrow,\n ncol,\n wspace=0.0,\n hspace=0.0,\n top=1.0 - 0.5 / (nrow + 1),\n bottom=0.5 / (nrow + 1),\n left=0.5 / (ncol + 1),\n right=1 - 0.5 / (ncol + 1),\n )\n\n # pad image to have isotropic array dimensions\n image = image.numpy()\n if overlay is not None:\n overlay = overlay.numpy()\n if overlay.dtype not in [\"uint8\", \"uint32\"]:\n overlay[np.abs(overlay) == 0] = np.nan\n\n image2 = image2.numpy()\n if overlay2 is not None:\n overlay2 = overlay2.numpy()\n if overlay2.dtype not in [\"uint8\", \"uint32\"]:\n overlay2[np.abs(overlay2) == 0] = np.nan\n\n ####################\n ####################\n yz_slice = reorient_slice(image[xyz[0], :, :], 0)\n ax = plt.subplot(gs[0, 0])\n ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n yz_overlay = reorient_slice(overlay[xyz[0], :, :], 0)\n ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],\n [xyz_pad, yz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice.shape[1] - xyz_pad],\n [yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n #######\n yz_slice2 = reorient_slice(image2[xyz[0], :, :], 0)\n if not flat:\n ax = plt.subplot(gs[0, 1])\n else:\n if not transpose:\n ax = plt.subplot(gs[1, 0])\n else:\n ax = plt.subplot(gs[0, 1])\n ax.imshow(yz_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)\n if overlay2 is not None:\n yz_overlay2 = reorient_slice(overlay2[xyz[0], :, :], 0)\n ax.imshow(yz_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice2.shape[0] - xyz[1], yz_slice2.shape[0] - xyz[1]],\n [xyz_pad, yz_slice2.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice2.shape[1] - xyz_pad],\n [yz_slice2.shape[1] - xyz[2], yz_slice2.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n ####################\n ####################\n\n xz_slice = reorient_slice(image[:, xyz[1], :], 1)\n if not flat:\n ax = plt.subplot(gs[0, 2])\n else:\n if not transpose:\n ax = plt.subplot(gs[0, 1])\n else:\n ax = plt.subplot(gs[1, 0])\n ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xz_overlay = reorient_slice(overlay[:, xyz[1], :], 1)\n ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],\n [xyz_pad, xz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice.shape[1] - xyz_pad],\n [xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n #######\n xz_slice2 = reorient_slice(image2[:, xyz[1], :], 1)\n if not flat:\n ax = plt.subplot(gs[0, 3])\n else:\n ax = plt.subplot(gs[1, 1])\n ax.imshow(xz_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)\n if overlay is not None:\n xz_overlay2 = reorient_slice(overlay2[:, xyz[1], :], 1)\n ax.imshow(xz_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice2.shape[0] - xyz[0], xz_slice2.shape[0] - xyz[0]],\n [xyz_pad, xz_slice2.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice2.shape[1] - xyz_pad],\n [xz_slice2.shape[1] - xyz[2], xz_slice2.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n ####################\n ####################\n xy_slice = reorient_slice(image[:, :, xyz[2]], 2)\n if not flat:\n ax = plt.subplot(gs[1, 2])\n else:\n if not transpose:\n ax = plt.subplot(gs[0, 2])\n else:\n ax = plt.subplot(gs[2, 0])\n ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xy_overlay = reorient_slice(overlay[:, :, xyz[2]], 2)\n ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],\n [xyz_pad, xy_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice.shape[1] - xyz_pad],\n [xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n #######\n xy_slice2 = reorient_slice(image2[:, :, xyz[2]], 2)\n if not flat:\n ax = plt.subplot(gs[1, 3])\n else:\n if not transpose:\n ax = plt.subplot(gs[1, 2])\n else:\n ax = plt.subplot(gs[2, 1])\n ax.imshow(xy_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)\n if overlay is not None:\n xy_overlay2 = reorient_slice(overlay2[:, :, xyz[2]], 2)\n ax.imshow(xy_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice2.shape[0] - xyz[0], xy_slice2.shape[0] - xyz[0]],\n [xyz_pad, xy_slice2.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice2.shape[1] - xyz_pad],\n [xy_slice2.shape[1] - xyz[1], xy_slice2.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n ####################\n ####################\n\n if not flat:\n # empty corner\n ax = plt.subplot(gs[1, :2])\n if text is not None:\n # add text\n left, width = 0.25, 0.5\n bottom, height = 0.25, 0.5\n right = left + width\n top = bottom + height\n ax.text(\n 0.5 * (left + right) + text_dx,\n 0.5 * (bottom + top) + text_dy,\n text,\n horizontalalignment=\"center\",\n verticalalignment=\"center\",\n fontsize=textfontsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n # ax.text(0.5, 0.5)\n img_shape = list(image.shape[:-1])\n img_shape[1] *= 2\n ax.imshow(np.zeros(img_shape), cmap=\"Greys_r\")\n ax.axis(\"off\")\n\n ## multi-channel images ##\n elif image.components > 1:\n raise ValueError(\"Multi-channel images not currently supported!\")\n\n if filename is not None:\n plt.savefig(filename, dpi=dpi, transparent=transparent)\n plt.close(fig)\n else:\n plt.show()\n\n # turn warnings back to default\n warnings.simplefilter(\"default\")",
"def stack_plot(self, pvlim=None, **kwargs):\n from linetools.analysis import plots as ltap\n if pvlim is not None:\n vlim = pvlim\n else:\n vlim = self.vlim\n fig = ltap.stack_plot(self.list_of_abslines(), vlim=vlim, **kwargs)\n if fig is not None:\n return fig",
"def test_image_stack():\n\n directory = os.path.join(get_config()['test_dir'], 'dark_monitor')\n files = [os.path.join(directory, 'test_image_{}.fits'.format(str(i + 1))) for i in range(3)]\n\n image_stack, exptimes = pipeline_tools.image_stack(files)\n truth = np.zeros((3, 10, 10))\n truth[0, :, :] = 5.\n truth[1, :, :] = 10.\n truth[2, :, :] = 15.\n\n assert np.all(image_stack == truth)\n assert exptimes == [[10.5], [10.5], [10.5]]",
"def nh_ort1_make_stacks():\n \n do_force = False # Boolean: Do we force reloading of all of the images from FITS, or just restore from pkl?\n # Pkl (aka False) is faster. But if we have made changes to the core algorithms, must\n # reload from disk (aka True).\n \n stretch_percent = 90 \n stretch = astropy.visualization.PercentileInterval(stretch_percent) # PI(90) scales to 5th..95th %ile.\n \n reqids_haz = ['K1LR_HAZ00', 'K1LR_HAZ01', 'K1LR_HAZ02', 'K1LR_HAZ03', 'K1LR_HAZ04']\n reqid_field = 'K1LR_MU69ApprField_115d_L2_2017264'\n \n dir_data = '/Users/throop/Data/ORT1/throop/backplaned/'\n\n zoom = 4\n \n # Set the edge padding large enough s.t. all output stacks will be the same size.\n # This value is easy to compute: loop over all stacks, and take max of stack.calc_padding()[0]\n \n padding = 61 \n # Start up SPICE if needed\n \n if (sp.ktotal('ALL') == 0):\n sp.furnsh('kernels_kem_prime.tm')\n \n # Set the RA/Dec of MU69. We could look this up from SPICE but it changes slowly, so just keep it fixed for now.\n \n radec_mu69 = (4.794979838984583, -0.3641418801015417)\n \n # Load and stack the field images\n \n stack_field = image_stack(os.path.join(dir_data, reqid_field), do_force=do_force)\n stack_field.align(method = 'wcs', center = radec_mu69)\n img_field = stack_field.flatten(zoom=zoom, padding=padding)\n \n if do_force:\n stack_field.save()\n\n hbt.figsize((12,12))\n hbt.set_fontsize(15)\n \n for reqid in reqids_haz:\n stack_haz = image_stack(os.path.join(dir_data, reqid), do_force=do_force)\n stack_haz.align(method = 'wcs', center = radec_mu69)\n img_haz = stack_haz.flatten(zoom=zoom, padding=padding)\n\n if do_force:\n stack_haz.save()\n \n # Make the plot\n \n diff = img_haz - img_field\n diff_trim = hbt.trim_image(diff)\n plt.imshow(stretch(diff_trim))\n plt.title(f\"{reqid} - field, zoom = {zoom}\")\n\n # Save the stacked image as a FITS file\n \n file_out = os.path.join(dir_data, reqid, \"stack_n{}_z{}.fits\".format(stack_haz.size[0], zoom))\n hdu = fits.PrimaryHDU(stretch(diff_trim))\n hdu.writeto(file_out, overwrite=True)\n print(f'Wrote: {file_out}') \n \n # Save the stack as a PNG\n \n file_out_plot_stack = file_out.replace('.fits', '.png')\n plt.savefig(file_out_plot_stack, bbox_inches='tight')\n print(\"Wrote: {}\".format(file_out_plot_stack))\n\n # Display it \n # This must be done *after* the plt.savefig()\n \n plt.show()\n \n # Make a radial profile\n \n pos = np.array(np.shape(diff))/2\n (radius, profile) = get_radial_profile_circular(diff, pos=pos, width=1)\n \n hbt.figsize((10,8))\n hbt.set_fontsize(15)\n plt.plot(radius, profile)\n plt.xlim((0, 50*zoom))\n plt.ylim((-1,np.amax(profile)))\n plt.xlabel('Radius [pixels]')\n plt.title(f'Ring Radial Profile, {reqid}, zoom={zoom}')\n plt.ylabel('Median DN')\n plt.show()\n\n# =============================================================================\n# Calculate how many DN MU69 should be at encounter (K-20d, etc.)\n# Or alternatively, convert all of my DN values, to I/F values\n# =============================================================================\n\n # Convert DN values in array, to I/F values\n \n RSOLAR_LORRI_1X1 = 221999.98 # Diffuse sensitivity, LORRI 1X1. Units are (DN/s/pixel)/(erg/cm^2/s/A/sr)\n RSOLAR_LORRI_4X4 = 3800640.0 # Diffuse sensitivity, LORRI 1X1. Units are (DN/s/pixel)/(erg/cm^2/s/A/sr)\n \n C = profile # Get the DN values of the ring. Typical value is 1 DN.\n \n # Define the solar flux, from Hal's paper.\n \n FSOLAR_LORRI = 176.\t \t # We want to be sure to use LORRI value, not MVIC value!\n F_solar = FSOLAR_LORRI # Flux from Hal's paper\n \n RSOLAR = RSOLAR_LORRI_4X4\n \n # Calculate the MU69-Sun distance, in AU (or look it up). \n \n km2au = 1 / (u.au/u.km).to('1')\n \n et = stack_haz.t['et'][0]\n (st,lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'New Horizons')\n r_nh_mu69 = sp.vnorm(st[0:3]) * km2au # NH distance, in AU\n \n (st,lt) = sp.spkezr('MU69', et, 'J2000', 'LT', 'Sun')\n r_sun_mu69 = sp.vnorm(st[0:3]) * km2au # NH distance, in AU\n \n pixscale_km = (r_nh_mu69/km2au * (0.3*hbt.d2r / 256)) / zoom # km per pix (assuming 4x4)\n \n TEXP = stack_haz.t['exptime'][0]\n \n I = C / TEXP / RSOLAR # Could use RSOLAR, RJUPITER, or RPLUTO. All v similar, except for spectrum assumed.\n \n # Apply Hal's conversion formula from p. 7, to compute I/F and print it.\n \n IoF = math.pi * I * r_sun_mu69**2 / F_solar # Equation from Hal's paper\n \n plt.plot(radius * pixscale_km, IoF)\n plt.xlim((0, 50000))\n plt.ylim((-1e-7, 4e-7))\n# plt.ylim((0,np.amax(IoF)))\n# plt.yscale('log')\n plt.xlabel('Radius [km]')\n plt.title(f'Ring Radial Profile, {reqid}, zoom={zoom}')\n plt.ylabel('Median I/F')\n file_out_plot_profile = file_out.replace('.fits', '_profile.png')\n plt.savefig(file_out_plot_profile, bbox_inches='tight')\n plt.show()\n print(f'Wrote: {file_out_plot_profile}')\n \n # Write it to a table\n t = Table([radius, radius * pixscale_km, profile, IoF], names = ['RadiusPixels', 'RadiusKM', 'DN/pix', 'I/F'])\n file_out_table = file_out.replace('.fits', '_profile.txt')\n t.write(file_out_table, format='ascii', overwrite=True)\n print(\"Wrote: {}\".format(file_out_table))",
"def MakeZStack(image_list, xy_coords = None):\n #xy_coords = [y1,y2,x1,x2]\n if xy_coords == None:\n x_1 = 0\n y_1 = 0\n y_2,x_2 = image_list[0].shape\n else:\n y_1,y_2,x_1,x_2 = xy_coords\n xdims = x_2-x_1\n ydims = y_2-y_1\n zdims = len(image_list)\n stack = np.zeros((ydims,xdims,zdims))\n for i in range(len(image_list)):\n thumb = image_list[i][y_1:y_2,x_1:x_2]\n stack[:,:,i] = thumb\n return stack",
"def plot_stack(Timeseries, stack, cumDef, cumTime):\n self = Timeseries\n\n fig = plt.figure()\n ax = fig.add_subplot(111)\n im = plt.imshow(stack,cmap=plt.cm.jet)\n cb = plt.colorbar()\n cb.set_label('cm / {0:.2f}yr'.format(self.Set.Timespan))\n plt.title('simple stack')\n\n # Subplots --> cumulative deformation, cumulative time, average rate\n fig = plt.figure(figsize=(17,11))\n titlestr = '{0} Stack {1} Interferograms {2} : {3} '.format(self.Set.Track,\n self.Set.Nig,\n self.Set.Dates[0],\n self.Set.Dates[-1])\n plt.suptitle(titlestr, fontweight='bold', fontsize=12)\n ax = fig.add_subplot(131)\n im = plt.imshow(cumDef,cmap=plt.cm.jet)\n cb = plt.colorbar()\n cb.set_label('cm')\n plt.title('cumulative deformation')\n\n ax = fig.add_subplot(132)\n im = plt.imshow(cumTime,cmap=plt.cm.jet)\n cb = plt.colorbar()\n cb.set_label('years')\n plt.title('cumulative time')\n\n ax = fig.add_subplot(133)\n im = plt.imshow(stack,cmap=plt.cm.jet)\n cb = plt.colorbar()\n cb.set_label('cm / {0:.2f}yr'.format(self.Set.Timespan))\n plt.title('average velocity')\n\n plt.show()",
"def makeStackedImages(topImage, bottomImage):\n twidth = topImage.getWidth()\n theight = topImage.getHeight()\n bwidth = bottomImage.getWidth()\n bheight = bottomImage.getHeight()\n height = theight + bheight\n if twidth < bwidth:\n width = bwidth\n else:\n width = twidth\n resultImage = cImage.EmptyImage(width, height)\n for i in range(twidth):\n for j in range(theight):\n tpixel = topImage.getPixel(i, j)\n resultImage.setPixel(i, j, tpixel)\n\n for i in range(bwidth):\n for j in range(bheight):\n bpixel = bottomImage.getPixel(i, j)\n resultImage.setPixel(i, j + theight, bpixel)\n\n return resultImage",
"def display_coronal_with_overlay(temporal_slice, coronal_slice, images, masks, label, window_min, window_max):\n img = images[temporal_slice][:,coronal_slice,:]\n msk = masks[temporal_slice][:,coronal_slice,:]==label\n\n overlay_img = overlay_binary_segmentation_contours(img, msk, window_min, window_max) \n # Flip the image so that corresponds to correct radiological view.\n plt.imshow(np.flipud(sitk.GetArrayFromImage(overlay_img)))\n plt.axis('off')\n plt.show()",
"def stack2pred(stack_index=None,stack_band=None,index_sel=None,band_sel=None,outfile=None):\n\n if (stack_index is not None and index_sel is not None and stack_band is not None and band_sel is not None):\n \n count_new_stack = len(index_sel+band_sel)\n\n with rio.open(stack_index) as src1:\n img_indices = src1.read(index_sel)\n\n img_indices[np.isnan(img_indices)]=999\n\n with rio.open(stack_band) as src2:\n img_bands = src2.read(band_sel)\n meta = src2.meta\n meta.update(count=count_new_stack,dtype='float32',nodata=999)\n\n out_img = np.vstack((img_indices,img_bands))\n\n with rio.open(outfile,'w',**meta) as dst:\n for i in range(len(out_img)):\n dst.write_band(i+1,out_img[i])\n \n elif (stack_index == None and index_sel == None):\n \n count_new_stack = len(band_sel)\n with rio.open(stack_band) as src2:\n img_bands = src2.read(band_sel)\n meta = src2.meta\n meta.update(count=count_new_stack)\n \n with rio.open(outfile,'w',**meta) as dst:\n for i in range(len(img_bands)):\n dst.write_band(i+1,img_bands[i])\n \n elif (stack_band == None and band_sel == None):\n count_new_stack = len(index_sel)\n\n with rio.open(stack_index) as src1:\n img_indices = src1.read(index_sel)\n\n meta = src1.meta\n meta.update(count=count_new_stack)\n \n with rio.open(outfile,'w',**meta) as dst:\n for i in range(len(img_indices)):\n dst.write_band(i+1,img_indices[i])",
"def display_images_predictions3(image_array, pred_array1, pred_array2, num_images=4, image_list=False, random_images=False, overlay = True):\n ts = image_array\n pred1 = pred_array1\n pred2 = pred_array2\n samples, x, y, z = ts.shape\n print (\"samples, max, min \", samples, pred1.max(), pred1.min())\n pred1r = np.round(pred1)\n pred2r = np.round(pred2)\n\n display_list = []\n if image_list == False:\n if random_images == True:\n display_list = random.sample(range(0, samples), num_images)\n else :\n display_list = [i for i in range (num_images)]\n else:\n display_list = image_list\n\n for i in display_list:\n f, axs = plt.subplots(1,3,figsize=(15,15))\n plt.subplot(131),plt.imshow(ts[i].reshape(x, y))\n plt.title('Image '+str(i)), plt.xticks([]), plt.yticks([])\n if overlay == True:\n plt.subplot(132),plt.imshow(ts[i].reshape(x, y)), plt.imshow(pred1r[i].reshape(x, y), 'binary', interpolation='none', alpha=0.3)\n else : \n plt.subplot(132),plt.imshow(pred1r[i].reshape(x, y))\n plt.title('Pred 1'), plt.xticks([]), plt.yticks([])\n if overlay == True:\n plt.subplot(133),plt.imshow(ts[i].reshape(x, y)), plt.imshow(pred2r[i].reshape(x, y), 'binary', interpolation='none', alpha=0.3)\n else : \n plt.subplot(133),plt.imshow(pred2r[i].reshape(x, y))\n plt.title('Pred 2'), plt.xticks([]), plt.yticks([])\n plt.show()",
"def stack_tiffs(bands_list, scene, filename):\n\n file_list = [\n landsat8.get_band_filename(scene, band) for band in bands_list\n ]\n # only metadata of first file is read to copy to the merge tiff\n with rasterio.open(file_list[0]) as src0:\n meta = src0.meta\n meta.update(count=len(file_list))\n\n with rasterio.open(filename, 'w', **meta) as dst:\n for i, layer in enumerate(file_list, start=1):\n with rasterio.open(layer) as src1:\n dst.write_band(i, src1.read(1))\n print(f'Successfully created: {filename}')",
"def repeat_stack(image:'torch.Tensor', repeat:int=1, grid_kwargs:dict={}) -> 'torch.Tensor':\n return torch.stack([to_grid(image, **grid_kwargs)]*repeat)",
"def offset_mosaic(input_prefix,\n output_prefix,\n filter_list=['w2','m2','w1','uu','bb','vv'],\n min_exp_w2=170, min_exp_m2=230, min_exp_w1=200,\n min_exp_uu=0, min_exp_bb=0, min_exp_vv=0,\n restack_id=False, mask_file=None, use_scattered_light=False):\n\n # make dictionary with the minimum exposure times\n min_exp = {'w2':min_exp_w2, 'm2':min_exp_m2, 'w1':min_exp_w1,\n 'uu':min_exp_uu, 'bb':min_exp_bb, 'vv':min_exp_vv}\n\n # set a file tag for using images corrected for scattered light\n sl_tag = ''\n if use_scattered_light:\n sl_tag = '_sl'\n \n\n # go through each filter to build images\n\n for filt in filter_list:\n\n # ------------------------\n # find unique target IDs, and stack those first\n # ------------------------\n\n # open the images\n with fits.open(input_prefix + filt + '_sk_all'+sl_tag+'.fits') as hdu_sk, fits.open(input_prefix + filt + '_ex_all.fits') as hdu_ex:\n\n # delete the 0th extensions (no images there, and they break later steps)\n del hdu_sk[0]\n del hdu_ex[0]\n \n # remove extensions with exposures shorter than minimum\n exp_time = np.array( [hdu_sk[i].header['EXPOSURE'] for i in range(len(hdu_sk))] )\n remove_ind = np.where(exp_time < min_exp[filt])[0]\n for ind in sorted(remove_ind, reverse=True):\n del hdu_sk[ind]\n del hdu_ex[ind]\n\n\n\n # all of the target IDs\n target_ids = np.array( [hdu_sk[i].header['TARG_ID'] for i in range(len(hdu_sk))] )\n # chop it down to just the unique ones\n target_ids = np.unique(target_ids)\n\n \n for targ in target_ids:\n\n print('')\n print('##### stacking target ID ' + str(targ) + ', filter ' + filt + ' #####')\n print('')\n\n # prefix for saving the files for this target ID\n file_prefix = output_prefix + str(targ) + '_' + filt\n\n # check if this one is done already (by looking for a count rate image)\n if os.path.isfile(file_prefix + '_cr'+sl_tag+'.fits') and (restack_id == False):\n print(str(targ)+' is already done')\n print('')\n continue\n \n \n # temp file to hold snapshots with current target ID\n temp_hdu_sk = fits.HDUList()\n temp_hdu_ex = fits.HDUList()\n\n # append matching snapshots\n [temp_hdu_sk.append(fits.ImageHDU(data=hdu_sk[i].data, header=hdu_sk[i].header)) for i in range(len(hdu_sk)) if hdu_sk[i].header['TARG_ID'] == targ]\n [temp_hdu_ex.append(fits.ImageHDU(data=hdu_ex[i].data, header=hdu_ex[i].header)) for i in range(len(hdu_sk)) if hdu_sk[i].header['TARG_ID'] == targ]\n\n # turn exposure maps into 0s and 1s\n temp_hdu_ex_adj = copy.deepcopy(temp_hdu_ex)\n temp_hdu_ex_adj = exp_to_ones(temp_hdu_ex_adj)\n\n # mask areas with foreground stars, etc.\n if mask_file is not None:\n temp_hdu_ex_adj = mask_image(temp_hdu_ex_adj, mask_file)\n\n # write out to files\n temp_hdu_sk.writeto('targ_temp_sk.fits', overwrite=True)\n temp_hdu_ex_adj.writeto('targ_temp_ex.fits', overwrite=True)\n \n # find the coordinates of the overlapping area\n overlap_x, overlap_y = find_overlap('targ_temp_ex.fits')\n\n # find the biweight of the overlapping areas\n biweight_cps = calc_overlap_val(temp_hdu_sk, temp_hdu_ex, overlap_x, overlap_y)\n\n # apply to the counts images\n hdu_sk_corr, _, hdu_delta_counts = correct_sk(temp_hdu_sk, temp_hdu_ex, biweight_cps)\n \n # write out to files\n hdu_sk_corr.writeto(file_prefix + '_sk_all'+sl_tag+'.fits', overwrite=True)\n hdu_delta_counts.writeto(file_prefix + '_sk_off_all'+sl_tag+'.fits', overwrite=True)\n temp_hdu_ex.writeto(file_prefix + '_ex_all'+sl_tag+'.fits', overwrite=True)\n \n # stack with uvotimsum\n cmd = 'uvotimsum ' + file_prefix + '_sk_all'+sl_tag+'.fits ' + \\\n file_prefix + '_sk'+sl_tag+'.fits exclude=none clobber=yes'\n subprocess.run(cmd, shell=True)\n cmd = 'uvotimsum ' + file_prefix + '_sk_off_all'+sl_tag+'.fits ' + \\\n file_prefix + '_sk_off'+sl_tag+'.fits exclude=none clobber=yes'\n subprocess.run(cmd, shell=True)\n cmd = 'uvotimsum ' + file_prefix + '_ex_all'+sl_tag+'.fits ' + \\\n file_prefix + '_ex'+sl_tag+'.fits method=EXPMAP exclude=none clobber=yes'\n subprocess.run(cmd, shell=True)\n\n # make a count rate image too\n with fits.open(file_prefix + '_sk'+sl_tag+'.fits') as h_sk, fits.open(file_prefix + '_ex'+sl_tag+'.fits') as h_ex:\n cr_hdu = fits.PrimaryHDU(data=h_sk[1].data/h_ex[1].data, header=h_sk[1].header)\n cr_hdu.writeto(file_prefix + '_cr'+sl_tag+'.fits', overwrite=True)\n \n # delete temporary files\n subprocess.run('rm targ_temp_*.fits', shell=True)\n \n \n # ------------------------\n # combine the stacks\n # ------------------------\n\n\n # output file names\n output_file_sk = output_prefix + filt + '_sk'+sl_tag+'.fits'\n output_file_sk_all = output_prefix + filt + '_sk_all'+sl_tag+'.fits'\n output_file_sk_off = output_prefix + filt + '_sk_off'+sl_tag+'.fits'\n output_file_sk_off_all = output_prefix + filt + '_sk_off_all'+sl_tag+'.fits'\n output_file_ex = output_prefix + filt + '_ex'+sl_tag+'.fits'\n output_file_ex_all = output_prefix + filt + '_ex_all'+sl_tag+'.fits'\n output_file_cr = output_prefix + filt + '_cr'+sl_tag+'.fits'\n\n # start out the stacking with the first target ID\n subprocess.run('cp '+ output_prefix + str(target_ids[0]) +'_'+ filt + '_sk'+sl_tag+'.fits ' + output_file_sk, shell=True)\n subprocess.run('cp '+ output_prefix + str(target_ids[0]) +'_'+ filt + '_sk_off'+sl_tag+'.fits ' + output_file_sk_off, shell=True)\n subprocess.run('cp '+ output_prefix + str(target_ids[0]) +'_'+ filt + '_ex'+sl_tag+'.fits ' + output_file_ex, shell=True)\n subprocess.run('cp '+ output_prefix + str(target_ids[0]) +'_'+ filt + '_sk'+sl_tag+'.fits ' + output_file_sk_all, shell=True)\n subprocess.run('cp '+ output_prefix + str(target_ids[0]) +'_'+ filt + '_sk_off'+sl_tag+'.fits ' + output_file_sk_off_all, shell=True)\n subprocess.run('cp '+ output_prefix + str(target_ids[0]) +'_'+ filt + '_ex'+sl_tag+'.fits ' + output_file_ex_all, shell=True)\n # make a count rate image too\n with fits.open(output_file_sk) as h_sk, fits.open(output_file_ex) as h_ex:\n cr_hdu = fits.PrimaryHDU(data=h_sk[1].data/h_ex[1].data, header=h_sk[1].header)\n cr_hdu.writeto(output_file_cr, overwrite=True)\n\n \n # keep track of which target IDs still need to be appended to the image\n remaining_ids = copy.copy(target_ids[1:])\n\n\n # keep going while there are still IDs to append\n while len(remaining_ids) > 0:\n\n # file names for the target IDs\n remaining_id_files_sk = [output_prefix + str(t) + '_' + filt + '_sk'+sl_tag+'.fits' for t in remaining_ids]\n remaining_id_files_sk_off = [output_prefix + str(t) + '_' + filt + '_sk_off'+sl_tag+'.fits' for t in remaining_ids]\n remaining_id_files_ex = [output_prefix + str(t) + '_' + filt + '_ex'+sl_tag+'.fits' for t in remaining_ids]\n \n # find the target ID that has the best overlap with current mosaic\n # (returns index and the overlapping pixels)\n best_ind, overlap_x, overlap_y = most_overlap(output_file_ex, remaining_id_files_ex)\n\n # make an HDU with the counts (sk) image for the mosaic and best ID\n with fits.open(output_file_sk) as hdu_mosaic_sk, fits.open(remaining_id_files_sk[best_ind]) as hdu_best_sk:\n temp_hdu_sk = fits.HDUList()\n temp_hdu_sk.append(fits.ImageHDU(data=hdu_mosaic_sk[1].data, header=hdu_mosaic_sk[1].header))\n temp_hdu_sk.append(fits.ImageHDU(data=hdu_best_sk[1].data, header=hdu_best_sk[1].header))\n # make an HDU with the counts offset image for the mosaic and best ID\n with fits.open(output_file_sk_off) as hdu_mosaic_sk_off, fits.open(remaining_id_files_sk_off[best_ind]) as hdu_best_sk_off:\n temp_hdu_sk_off = fits.HDUList()\n temp_hdu_sk_off.append(fits.ImageHDU(data=hdu_mosaic_sk_off[1].data, header=hdu_mosaic_sk_off[1].header))\n temp_hdu_sk_off.append(fits.ImageHDU(data=hdu_best_sk_off[1].data, header=hdu_best_sk_off[1].header))\n # make an HDU with the exposure image for the mosaic and best ID\n with fits.open(output_file_ex) as hdu_mosaic_ex, fits.open(remaining_id_files_ex[best_ind]) as hdu_best_ex:\n temp_hdu_ex = fits.HDUList()\n temp_hdu_ex.append(fits.ImageHDU(data=hdu_mosaic_ex[1].data, header=hdu_mosaic_ex[1].header))\n temp_hdu_ex.append(fits.ImageHDU(data=hdu_best_ex[1].data, header=hdu_best_ex[1].header))\n \n # find the biweight of the overlapping areas\n biweight_cps = calc_overlap_val(temp_hdu_sk, temp_hdu_ex, overlap_x, overlap_y)\n\n # apply to the counts images\n hdu_sk_corr, delta_cps, hdu_delta_counts = correct_sk(temp_hdu_sk, temp_hdu_ex, biweight_cps)\n\n # save those changes to the individual target ID segments\n with fits.open(output_file_sk_all) as hdu_sk_all, fits.open(output_file_sk_off_all) as hdu_sk_off_all, fits.open(output_file_ex_all) as hdu_ex_all:\n # apply offset to existing segments\n for h in range(1,len(hdu_sk_all)):\n hdu_sk_all[h].data = (hdu_sk_all[h].data/hdu_ex_all[h].data + delta_cps[0]) * hdu_ex_all[h].data\n hdu_sk_all[h].data[hdu_ex_all[h].data == 0] = 0\n hdu_sk_off_all[h].data = hdu_sk_off_all[h].data + (delta_cps[0] * hdu_ex_all[h].data)\n # append new corrected segment\n hdu_sk_all.append(fits.ImageHDU(data=hdu_sk_corr[1].data, header=hdu_sk_corr[1].header))\n hdu_sk_off_all.append(fits.ImageHDU(data=hdu_delta_counts[1].data + temp_hdu_sk_off[1].data,\n header=hdu_delta_counts[1].header))\n hdu_ex_all.append(fits.ImageHDU(data=temp_hdu_ex[1].data, header=temp_hdu_ex[1].header))\n # write out to files\n hdu_sk_all.writeto(output_file_sk_all, overwrite=True)\n hdu_sk_off_all.writeto(output_file_sk_off_all, overwrite=True)\n hdu_ex_all.writeto(output_file_ex_all, overwrite=True)\n \n \n # stack with uvotimsum\n cmd = 'uvotimsum ' + output_file_sk_all + ' ' + output_file_sk + ' exclude=none clobber=yes'\n subprocess.run(cmd, shell=True)\n cmd = 'uvotimsum ' + output_file_sk_off_all + ' ' + output_file_sk_off + ' exclude=none clobber=yes'\n subprocess.run(cmd, shell=True)\n cmd = 'uvotimsum ' + output_file_ex_all + ' ' + output_file_ex + ' method=EXPMAP exclude=none clobber=yes'\n subprocess.run(cmd, shell=True)\n\n # make a count rate image too\n with fits.open(output_file_sk) as h_sk, fits.open(output_file_ex) as h_ex:\n cr_hdu = fits.PrimaryHDU(data=h_sk[1].data/h_ex[1].data, header=h_sk[1].header)\n cr_hdu.writeto(output_file_cr, overwrite=True)\n \n # finally, remove this index from the remaining IDs list\n remaining_ids = np.delete(remaining_ids, best_ind)",
"def generate_panoramic_images(self, number_of_panoramas):\n assert self.homographies is not None\n\n # compute bounding boxes of all warped input images in the coordinate system of the middle\n # image (as given by the homographies)\n self.bounding_boxes = np.zeros((self.frames_for_panoramas.size, 2, 2))\n for i in range(self.frames_for_panoramas.size):\n self.bounding_boxes[i] = compute_bounding_box(self.homographies[i], self.w, self.h)\n\n # change our reference coordinate system to the panoramas\n # all panoramas share the same coordinate system\n global_offset = np.min(self.bounding_boxes, axis=(0, 1))\n self.bounding_boxes -= global_offset\n\n slice_centers = np.linspace(0, self.w, number_of_panoramas + 2, endpoint=True, dtype=np.int)[1:-1]\n warped_slice_centers = np.zeros((number_of_panoramas, self.frames_for_panoramas.size))\n # every slice is a different panorama, it indicates the slices of the input images from\n # which the panorama will be concatenated\n for i in range(slice_centers.size):\n slice_center_2d = np.array([slice_centers[i], self.h // 2])[None, :]\n # homography warps the slice center to the coordinate system of the middle image\n warped_centers = [apply_homography(slice_center_2d, h) for h in self.homographies]\n # we are actually only interested in the x coordinate of each slice center in the panoramas' coordinate system\n warped_slice_centers[i] = np.array(warped_centers)[:, :, 0].squeeze() - global_offset[0]\n\n panorama_size = np.max(self.bounding_boxes, axis=(0, 1)).astype(np.int) + 1\n\n # boundary between input images in the panorama\n x_strip_boundary = ((warped_slice_centers[:, :-1] + warped_slice_centers[:, 1:]) / 2)\n x_strip_boundary = np.hstack([np.zeros((number_of_panoramas, 1)),\n x_strip_boundary,\n np.ones((number_of_panoramas, 1)) * panorama_size[0]])\n x_strip_boundary = x_strip_boundary.round().astype(np.int)\n\n self.panoramas = np.zeros((number_of_panoramas, panorama_size[1], panorama_size[0], 3), dtype=np.float64)\n for i, frame_index in enumerate(self.frames_for_panoramas):\n # warp every input image once, and populate all panoramas\n image = sol4_utils.read_image(self.files[frame_index], 2)\n warped_image = warp_image(image, self.homographies[i])\n x_offset, y_offset = self.bounding_boxes[i][0].astype(np.int)\n y_bottom = y_offset + warped_image.shape[0]\n\n for panorama_index in range(number_of_panoramas):\n # take strip of warped image and paste to current panorama\n boundaries = x_strip_boundary[panorama_index, i:i + 2]\n image_strip = warped_image[:, boundaries[0] - x_offset: boundaries[1] - x_offset]\n x_end = boundaries[0] + image_strip.shape[1]\n self.panoramas[panorama_index, y_offset:y_bottom, boundaries[0]:x_end] = image_strip\n\n # crop out areas not recorded from enough angles\n # assert will fail if there is overlap in field of view between the left most image and the right most image\n crop_left = int(self.bounding_boxes[0][1, 0])\n crop_right = int(self.bounding_boxes[-1][0, 0])\n assert crop_left < crop_right, 'for testing your code with a few images do not crop.'\n print(crop_left, crop_right)\n self.panoramas = self.panoramas[:, :, crop_left:crop_right, :]",
"def stack(greyordinates, axis=0):\n new_bm, slices = cifti.combine([go.brain_model_axis for go in greyordinates])\n new_arr = np.stack([go.data[..., slc] for go, slc in zip(greyordinates, slices)], axis=axis)\n ref_axes = set([go.other_axes for go in greyordinates if go.other_axes is not None])\n if len(ref_axes) == 0:\n other_axes = None\n elif len(ref_axes) == 1:\n other_axes = list(ref_axes[0])\n other_axes.insert(axis, ScalarAxis([f'stacked_{idx + 1}' for idx in range(len(greyordinates))]))\n else:\n raise ValueError(\"Failed to merge greyordinates as their other axes did not match\")\n return GreyOrdinates(new_arr, new_bm, other_axes)",
"def animate_zstacks(img_list, frames=None, titles=None, vmin=None, vmax=None, cmaps=None, interval=200, gif_name=None, bgcolor=None, **kwargs):\n def update_frame(f):\n for i, img in enumerate(img_list):\n imxy[i].set_data(img[:,:,f])\n return imxy\n\n if not all([img.shape[2] == img_list[0].shape[2] for img in img_list]):\n print('Error: all images must have same length in z-dimension.')\n\n if not frames:\n frames = list(range(img_list[0].shape[2]))\n if not vmin:\n vmin = [np.amin(img) for img in img_list]\n if not vmax:\n vmax = [np.amax(img) for img in img_list]\n if not cmaps:\n cmaps = ['binary_r'] * len(img_list)\n\n fig, ax = plt.subplots(1, len(img_list), figsize=(3.*len(img_list), 3.))\n imxy = []\n for i, img in enumerate(img_list):\n if bgcolor:\n ax[i].set_facecolor(bgcolor)\n imxy.append(ax[i].imshow(img[:,:,frames[0]], vmin=vmin[i], vmax=vmax[i], cmap=cmaps[i], **kwargs))\n if titles:\n for i, title in enumerate(titles):\n ax[i].set_title(title)\n anim = FuncAnimation(fig, update_frame, frames=frames, interval=interval, blit=False)\n if gif_name:\n # Writer = animation.writers['imagemagick']\n # writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)\n anim.save(gif_name, writer='imagemagick', fps=8, dpi=300)\n # plt.show()\n plt.close()\n return True",
"def save_image_stack(image_stack, path, indices=None,\n options = None):\n options = utils.Options(options)\n\n if os.path.isfile(path):\n dirpath = os.path.dirname(path)\n prefix, ext = os.path.splitext(path)\n ext = ext.lower()\n if not ext: ext = '.raw'\n elif os.path.isdir(path):\n dirpath = path\n ext = 'dir'\n prefix = 'image_'\n elif not os.path.exists(path):\n base, ext = os.path.splitext(path)\n ext = ext.lower()\n dirpath = path if not ext else os.path.dirname(path)\n if ext:\n prefix = base\n else:\n ext = 'dir'\n prefix = 'image_'\n\n if dirpath and not os.path.exists(dirpath):\n os.makedirs(dirpath)\n\n if ext in tif_extensions + raw_extensions + data_extensions:\n pathinfo_txt = path + '_PATHINFO.txt'\n else:\n pathinfo_txt = os.path.join(dirpath, 'PATHINFO.txt')\n\n image_stack.pathinfo.save(pathinfo_txt)\n images = image_stack.images\n\n if ext in raw_extensions:\n f = open(path, 'wb')\n images.tofile(f)\n f.close()\n elif ext=='dir':\n for i in range(images.shape[0]):\n f = open(os.path.join(dirpath, prefix + '%.5i.raw' % (i)), 'wb')\n images[i].tofile(f)\n f.close()\n elif ext in tif_extensions:\n tif = TIFF.open(path, mode='w')\n compression = options.get(tiff_compression = 'none')\n buf = StringIO()\n image_stack.pathinfo.save(buf)\n tif.SetField('ImageDescription', buf.getvalue ())\n tif.write_image(images, compression=compression)\n tif.close()\n elif ext=='.data':\n f = open(path, 'w')\n voxel_sizes = numpy.array(image_stack.get_voxel_sizes()) * 1e6 #um\n t = (voxel_sizes[2], voxel_sizes[1], voxel_sizes[0],voxel_sizes[0]*voxel_sizes[1]*voxel_sizes[2],)\n fmt = 4*'%14.6e' + '\\n'\n f.write(fmt % t)\n if indices is None:\n indices = numpy.ndindex(*images.shape)\n center = numpy.array(images.shape)//2\n maxvalue = images.max()\n use_value_resolution = options.get(use_value_resolution = False)\n if use_value_resolution:\n value_resolution = image_stack.pathinfo.get_value_resolution() or 0\n print 'Using Value Resolution:', value_resolution\n else:\n value_resolution = 0\n for index in indices:\n value = images[index]\n if value > value_resolution:\n position = (index - center) * voxel_sizes\n t = (position[2], position[1], position[0], value,)\n f.write (fmt % t)\n f.close()\n else:\n raise NotImplementedError(`path, ext`)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create a pair of orthographic plots with overlays. Use mask_image and/or threshold_image to preprocess images to be be overlaid and display the overlays in a given range. See the wiki examples. Example >>> import ants >>> mni = ants.image_read(ants.get_data('mni')) >>> ch2 = ants.image_read(ants.get_data('ch2')) >>> ants.plot_ortho_double(mni, ch2)
|
def plot_ortho_double(
image,
image2,
overlay=None,
overlay2=None,
reorient=True,
# xyz arguments
xyz=None,
xyz_lines=True,
xyz_color="red",
xyz_alpha=0.6,
xyz_linewidth=2,
xyz_pad=5,
# base image arguments
cmap="Greys_r",
alpha=1,
cmap2="Greys_r",
alpha2=1,
# overlay arguments
overlay_cmap="jet",
overlay_alpha=0.9,
overlay_cmap2="jet",
overlay_alpha2=0.9,
# background arguments
black_bg=True,
bg_thresh_quant=0.01,
bg_val_quant=0.99,
# scale/crop/domain arguments
crop=False,
scale=False,
crop2=False,
scale2=True,
domain_image_map=None,
# title arguments
title=None,
titlefontsize=24,
title_dx=0,
title_dy=0,
# 4th panel text arguemnts
text=None,
textfontsize=24,
textfontcolor="white",
text_dx=0,
text_dy=0,
# save & size arguments
filename=None,
dpi=500,
figsize=1.0,
flat=True,
transpose=False,
transparent=True,
):
def mirror_matrix(x):
return x[::-1, :]
def rotate270_matrix(x):
return mirror_matrix(x.T)
def reorient_slice(x, axis):
return rotate270_matrix(x)
# need this hack because of a weird NaN warning from matplotlib with overlays
warnings.simplefilter("ignore")
# handle `image` argument
if isinstance(image, str):
image = iio2.image_read(image)
if not isinstance(image, iio.ANTsImage):
raise ValueError("image argument must be an ANTsImage")
if image.dimension != 3:
raise ValueError("Input image must have 3 dimensions!")
if isinstance(image2, str):
image2 = iio2.image_read(image2)
if not isinstance(image2, iio.ANTsImage):
raise ValueError("image2 argument must be an ANTsImage")
if image2.dimension != 3:
raise ValueError("Input image2 must have 3 dimensions!")
# handle `overlay` argument
if overlay is not None:
if isinstance(overlay, str):
overlay = iio2.image_read(overlay)
if not isinstance(overlay, iio.ANTsImage):
raise ValueError("overlay argument must be an ANTsImage")
if overlay.components > 1:
raise ValueError("overlay cannot have more than one voxel component")
if overlay.dimension != 3:
raise ValueError("Overlay image must have 3 dimensions!")
if not iio.image_physical_space_consistency(image, overlay):
overlay = reg.resample_image_to_target(overlay, image, interp_type="linear")
if overlay2 is not None:
if isinstance(overlay2, str):
overlay2 = iio2.image_read(overlay2)
if not isinstance(overlay2, iio.ANTsImage):
raise ValueError("overlay2 argument must be an ANTsImage")
if overlay2.components > 1:
raise ValueError("overlay2 cannot have more than one voxel component")
if overlay2.dimension != 3:
raise ValueError("Overlay2 image must have 3 dimensions!")
if not iio.image_physical_space_consistency(image2, overlay2):
overlay2 = reg.resample_image_to_target(
overlay2, image2, interp_type="linear"
)
if not iio.image_physical_space_consistency(image, image2):
image2 = reg.resample_image_to_target(image2, image, interp_type="linear")
if image.pixeltype not in {"float", "double"}:
scale = False # turn off scaling if image is discrete
if image2.pixeltype not in {"float", "double"}:
scale2 = False # turn off scaling if image is discrete
# reorient images
if reorient != False:
if reorient == True:
reorient = "RPI"
image = image.reorient_image2(reorient)
image2 = image2.reorient_image2(reorient)
if overlay is not None:
overlay = overlay.reorient_image2(reorient)
if overlay2 is not None:
overlay2 = overlay2.reorient_image2(reorient)
# handle `slices` argument
if xyz is None:
xyz = [int(s / 2) for s in image.shape]
for i in range(3):
if xyz[i] is None:
xyz[i] = int(image.shape[i] / 2)
# resample image if spacing is very unbalanced
spacing = [s for i, s in enumerate(image.spacing)]
if (max(spacing) / min(spacing)) > 3.0:
new_spacing = (1, 1, 1)
image = image.resample_image(tuple(new_spacing))
image2 = image2.resample_image_to_target(tuple(new_spacing))
if overlay is not None:
overlay = overlay.resample_image(tuple(new_spacing))
if overlay2 is not None:
overlay2 = overlay2.resample_image(tuple(new_spacing))
xyz = [
int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)
]
# pad images
image, lowpad, uppad = image.pad_image(return_padvals=True)
image2, lowpad2, uppad2 = image2.pad_image(return_padvals=True)
xyz = [v + l for v, l in zip(xyz, lowpad)]
if overlay is not None:
overlay = overlay.pad_image()
if overlay2 is not None:
overlay2 = overlay2.pad_image()
# handle `domain_image_map` argument
if domain_image_map is not None:
if isinstance(domain_image_map, iio.ANTsImage):
tx = tio2.new_ants_transform(
precision="float",
transform_type="AffineTransform",
dimension=image.dimension,
)
image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)
image2 = tio.apply_ants_transform_to_image(tx, image2, domain_image_map)
if overlay is not None:
overlay = tio.apply_ants_transform_to_image(
tx, overlay, domain_image_map, interpolation="linear"
)
if overlay2 is not None:
overlay2 = tio.apply_ants_transform_to_image(
tx, overlay2, domain_image_map, interpolation="linear"
)
elif isinstance(domain_image_map, (list, tuple)):
# expect an image and transformation
if len(domain_image_map) != 2:
raise ValueError("domain_image_map list or tuple must have length == 2")
dimg = domain_image_map[0]
if not isinstance(dimg, iio.ANTsImage):
raise ValueError("domain_image_map first entry should be ANTsImage")
tx = domain_image_map[1]
image = reg.apply_transforms(dimg, image, transform_list=tx)
if overlay is not None:
overlay = reg.apply_transforms(
dimg, overlay, transform_list=tx, interpolator="linear"
)
image2 = reg.apply_transforms(dimg, image2, transform_list=tx)
if overlay2 is not None:
overlay2 = reg.apply_transforms(
dimg, overlay2, transform_list=tx, interpolator="linear"
)
## single-channel images ##
if image.components == 1:
# potentially crop image
if crop:
plotmask = image.get_mask(cleanup=0)
if plotmask.max() == 0:
plotmask += 1
image = image.crop_image(plotmask)
if overlay is not None:
overlay = overlay.crop_image(plotmask)
if crop2:
plotmask2 = image2.get_mask(cleanup=0)
if plotmask2.max() == 0:
plotmask2 += 1
image2 = image2.crop_image(plotmask2)
if overlay2 is not None:
overlay2 = overlay2.crop_image(plotmask2)
# potentially find dynamic range
if scale == True:
vmin, vmax = image.quantile((0.05, 0.95))
elif isinstance(scale, (list, tuple)):
if len(scale) != 2:
raise ValueError(
"scale argument must be boolean or list/tuple with two values"
)
vmin, vmax = image.quantile(scale)
else:
vmin = None
vmax = None
if scale2 == True:
vmin2, vmax2 = image2.quantile((0.05, 0.95))
elif isinstance(scale2, (list, tuple)):
if len(scale2) != 2:
raise ValueError(
"scale2 argument must be boolean or list/tuple with two values"
)
vmin2, vmax2 = image2.quantile(scale2)
else:
vmin2 = None
vmax2 = None
if not flat:
nrow = 2
ncol = 4
else:
if not transpose:
nrow = 2
ncol = 3
else:
nrow = 3
ncol = 2
fig = plt.figure(
figsize=((ncol + 1) * 2.5 * figsize, (nrow + 1) * 2.5 * figsize)
)
if title is not None:
basey = 0.88 if not flat else 0.66
basex = 0.5
fig.suptitle(
title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy
)
gs = gridspec.GridSpec(
nrow,
ncol,
wspace=0.0,
hspace=0.0,
top=1.0 - 0.5 / (nrow + 1),
bottom=0.5 / (nrow + 1),
left=0.5 / (ncol + 1),
right=1 - 0.5 / (ncol + 1),
)
# pad image to have isotropic array dimensions
image = image.numpy()
if overlay is not None:
overlay = overlay.numpy()
if overlay.dtype not in ["uint8", "uint32"]:
overlay[np.abs(overlay) == 0] = np.nan
image2 = image2.numpy()
if overlay2 is not None:
overlay2 = overlay2.numpy()
if overlay2.dtype not in ["uint8", "uint32"]:
overlay2[np.abs(overlay2) == 0] = np.nan
####################
####################
yz_slice = reorient_slice(image[xyz[0], :, :], 0)
ax = plt.subplot(gs[0, 0])
ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
yz_overlay = reorient_slice(overlay[xyz[0], :, :], 0)
ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap)
if xyz_lines:
# add lines
l = mlines.Line2D(
[yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],
[xyz_pad, yz_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, yz_slice.shape[1] - xyz_pad],
[yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
ax.axis("off")
#######
yz_slice2 = reorient_slice(image2[xyz[0], :, :], 0)
if not flat:
ax = plt.subplot(gs[0, 1])
else:
if not transpose:
ax = plt.subplot(gs[1, 0])
else:
ax = plt.subplot(gs[0, 1])
ax.imshow(yz_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)
if overlay2 is not None:
yz_overlay2 = reorient_slice(overlay2[xyz[0], :, :], 0)
ax.imshow(yz_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)
if xyz_lines:
# add lines
l = mlines.Line2D(
[yz_slice2.shape[0] - xyz[1], yz_slice2.shape[0] - xyz[1]],
[xyz_pad, yz_slice2.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, yz_slice2.shape[1] - xyz_pad],
[yz_slice2.shape[1] - xyz[2], yz_slice2.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
ax.axis("off")
####################
####################
xz_slice = reorient_slice(image[:, xyz[1], :], 1)
if not flat:
ax = plt.subplot(gs[0, 2])
else:
if not transpose:
ax = plt.subplot(gs[0, 1])
else:
ax = plt.subplot(gs[1, 0])
ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
xz_overlay = reorient_slice(overlay[:, xyz[1], :], 1)
ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap)
if xyz_lines:
# add lines
l = mlines.Line2D(
[xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],
[xyz_pad, xz_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xz_slice.shape[1] - xyz_pad],
[xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
ax.axis("off")
#######
xz_slice2 = reorient_slice(image2[:, xyz[1], :], 1)
if not flat:
ax = plt.subplot(gs[0, 3])
else:
ax = plt.subplot(gs[1, 1])
ax.imshow(xz_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)
if overlay is not None:
xz_overlay2 = reorient_slice(overlay2[:, xyz[1], :], 1)
ax.imshow(xz_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)
if xyz_lines:
# add lines
l = mlines.Line2D(
[xz_slice2.shape[0] - xyz[0], xz_slice2.shape[0] - xyz[0]],
[xyz_pad, xz_slice2.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xz_slice2.shape[1] - xyz_pad],
[xz_slice2.shape[1] - xyz[2], xz_slice2.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
ax.axis("off")
####################
####################
xy_slice = reorient_slice(image[:, :, xyz[2]], 2)
if not flat:
ax = plt.subplot(gs[1, 2])
else:
if not transpose:
ax = plt.subplot(gs[0, 2])
else:
ax = plt.subplot(gs[2, 0])
ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
xy_overlay = reorient_slice(overlay[:, :, xyz[2]], 2)
ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap)
if xyz_lines:
# add lines
l = mlines.Line2D(
[xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],
[xyz_pad, xy_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xy_slice.shape[1] - xyz_pad],
[xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
ax.axis("off")
#######
xy_slice2 = reorient_slice(image2[:, :, xyz[2]], 2)
if not flat:
ax = plt.subplot(gs[1, 3])
else:
if not transpose:
ax = plt.subplot(gs[1, 2])
else:
ax = plt.subplot(gs[2, 1])
ax.imshow(xy_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)
if overlay is not None:
xy_overlay2 = reorient_slice(overlay2[:, :, xyz[2]], 2)
ax.imshow(xy_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)
if xyz_lines:
# add lines
l = mlines.Line2D(
[xy_slice2.shape[0] - xyz[0], xy_slice2.shape[0] - xyz[0]],
[xyz_pad, xy_slice2.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xy_slice2.shape[1] - xyz_pad],
[xy_slice2.shape[1] - xyz[1], xy_slice2.shape[1] - xyz[1]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
ax.axis("off")
####################
####################
if not flat:
# empty corner
ax = plt.subplot(gs[1, :2])
if text is not None:
# add text
left, width = 0.25, 0.5
bottom, height = 0.25, 0.5
right = left + width
top = bottom + height
ax.text(
0.5 * (left + right) + text_dx,
0.5 * (bottom + top) + text_dy,
text,
horizontalalignment="center",
verticalalignment="center",
fontsize=textfontsize,
color=textfontcolor,
transform=ax.transAxes,
)
# ax.text(0.5, 0.5)
img_shape = list(image.shape[:-1])
img_shape[1] *= 2
ax.imshow(np.zeros(img_shape), cmap="Greys_r")
ax.axis("off")
## multi-channel images ##
elif image.components > 1:
raise ValueError("Multi-channel images not currently supported!")
if filename is not None:
plt.savefig(filename, dpi=dpi, transparent=transparent)
plt.close(fig)
else:
plt.show()
# turn warnings back to default
warnings.simplefilter("default")
|
[
"def plot_ortho(\n image,\n overlay=None,\n reorient=True,\n blend=False,\n # xyz arguments\n xyz=None,\n xyz_lines=True,\n xyz_color=\"red\",\n xyz_alpha=0.6,\n xyz_linewidth=2,\n xyz_pad=5,\n orient_labels=True,\n # base image arguments\n alpha=1,\n cmap=\"Greys_r\",\n # overlay arguments\n overlay_cmap=\"jet\",\n overlay_alpha=0.9,\n cbar=False,\n cbar_length=0.8,\n cbar_dx=0.0,\n cbar_vertical=True,\n # background arguments\n black_bg=True,\n bg_thresh_quant=0.01,\n bg_val_quant=0.99,\n # scale/crop/domain arguments\n crop=False,\n scale=False,\n domain_image_map=None,\n # title arguments\n title=None,\n titlefontsize=24,\n title_dx=0,\n title_dy=0,\n # 4th panel text arguemnts\n text=None,\n textfontsize=24,\n textfontcolor=\"white\",\n text_dx=0,\n text_dy=0,\n # save & size arguments\n filename=None,\n dpi=500,\n figsize=1.0,\n flat=False,\n transparent=True,\n resample=False,\n):\n\n def mirror_matrix(x):\n return x[::-1, :]\n\n def rotate270_matrix(x):\n return mirror_matrix(x.T)\n\n def reorient_slice(x, axis):\n return rotate270_matrix(x)\n\n # need this hack because of a weird NaN warning from matplotlib with overlays\n warnings.simplefilter(\"ignore\")\n\n # handle `image` argument\n if isinstance(image, str):\n image = iio2.image_read(image)\n if not isinstance(image, iio.ANTsImage):\n raise ValueError(\"image argument must be an ANTsImage\")\n if image.dimension != 3:\n raise ValueError(\"Input image must have 3 dimensions!\")\n\n # handle `overlay` argument\n if overlay is not None:\n vminol = overlay.min()\n vmaxol = overlay.max()\n if isinstance(overlay, str):\n overlay = iio2.image_read(overlay)\n if not isinstance(overlay, iio.ANTsImage):\n raise ValueError(\"overlay argument must be an ANTsImage\")\n if overlay.components > 1:\n raise ValueError(\"overlay cannot have more than one voxel component\")\n if overlay.dimension != 3:\n raise ValueError(\"Overlay image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(image, overlay):\n overlay = reg.resample_image_to_target(overlay, image, interp_type=\"linear\")\n\n if blend:\n if alpha == 1:\n alpha = 0.5\n image = image * alpha + overlay * (1 - alpha)\n overlay = None\n alpha = 1.0\n\n if image.pixeltype not in {\"float\", \"double\"}:\n scale = False # turn off scaling if image is discrete\n\n # reorient images\n if reorient != False:\n if reorient == True:\n reorient = \"RPI\"\n image = image.reorient_image2(\"RPI\")\n if overlay is not None:\n overlay = overlay.reorient_image2(\"RPI\")\n\n # handle `slices` argument\n if xyz is None:\n xyz = [int(s / 2) for s in image.shape]\n for i in range(3):\n if xyz[i] is None:\n xyz[i] = int(image.shape[i] / 2)\n\n # resample image if spacing is very unbalanced\n spacing = [s for i, s in enumerate(image.spacing)]\n if (max(spacing) / min(spacing)) > 3.0 and resample:\n new_spacing = (1, 1, 1)\n image = image.resample_image(tuple(new_spacing))\n if overlay is not None:\n overlay = overlay.resample_image(tuple(new_spacing))\n xyz = [\n int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)\n ]\n\n # potentially crop image\n if crop:\n plotmask = image.get_mask(cleanup=0)\n if plotmask.max() == 0:\n plotmask += 1\n image = image.crop_image(plotmask)\n if overlay is not None:\n overlay = overlay.crop_image(plotmask)\n\n # pad images\n image, lowpad, uppad = image.pad_image(return_padvals=True)\n xyz = [v + l for v, l in zip(xyz, lowpad)]\n if overlay is not None:\n overlay = overlay.pad_image()\n\n # handle `domain_image_map` argument\n if domain_image_map is not None:\n if isinstance(domain_image_map, iio.ANTsImage):\n tx = tio2.new_ants_transform(\n precision=\"float\",\n transform_type=\"AffineTransform\",\n dimension=image.dimension,\n )\n image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)\n if overlay is not None:\n overlay = tio.apply_ants_transform_to_image(\n tx, overlay, domain_image_map, interpolation=\"linear\"\n )\n elif isinstance(domain_image_map, (list, tuple)):\n # expect an image and transformation\n if len(domain_image_map) != 2:\n raise ValueError(\"domain_image_map list or tuple must have length == 2\")\n\n dimg = domain_image_map[0]\n if not isinstance(dimg, iio.ANTsImage):\n raise ValueError(\"domain_image_map first entry should be ANTsImage\")\n\n tx = domain_image_map[1]\n image = reg.apply_transforms(dimg, image, transform_list=tx)\n if overlay is not None:\n overlay = reg.apply_transforms(\n dimg, overlay, transform_list=tx, interpolator=\"linear\"\n )\n\n ## single-channel images ##\n if image.components == 1:\n\n # potentially find dynamic range\n if scale == True:\n vmin, vmax = image.quantile((0.05, 0.95))\n elif isinstance(scale, (list, tuple)):\n if len(scale) != 2:\n raise ValueError(\n \"scale argument must be boolean or list/tuple with two values\"\n )\n vmin, vmax = image.quantile(scale)\n else:\n vmin = None\n vmax = None\n\n if not flat:\n nrow = 2\n ncol = 2\n else:\n nrow = 1\n ncol = 3\n\n fig = plt.figure(figsize=(9 * figsize, 9 * figsize))\n if title is not None:\n basey = 0.88 if not flat else 0.66\n basex = 0.5\n fig.suptitle(\n title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy\n )\n\n gs = gridspec.GridSpec(\n nrow,\n ncol,\n wspace=0.0,\n hspace=0.0,\n top=1.0 - 0.5 / (nrow + 1),\n bottom=0.5 / (nrow + 1),\n left=0.5 / (ncol + 1),\n right=1 - 0.5 / (ncol + 1),\n )\n\n # pad image to have isotropic array dimensions\n image = image.numpy()\n if overlay is not None:\n overlay = overlay.numpy()\n if overlay.dtype not in [\"uint8\", \"uint32\"]:\n overlay[np.abs(overlay) == 0] = np.nan\n\n yz_slice = reorient_slice(image[xyz[0], :, :], 0)\n ax = plt.subplot(gs[0, 0])\n ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n yz_overlay = reorient_slice(overlay[xyz[0], :, :], 0)\n ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol )\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],\n [xyz_pad, yz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice.shape[1] - xyz_pad],\n [yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"S\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"I\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"A\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"P\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n xz_slice = reorient_slice(image[:, xyz[1], :], 1)\n ax = plt.subplot(gs[0, 1])\n ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xz_overlay = reorient_slice(overlay[:, xyz[1], :], 1)\n ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol )\n\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],\n [xyz_pad, xz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice.shape[1] - xyz_pad],\n [xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"S\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"I\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n xy_slice = reorient_slice(image[:, :, xyz[2]], 2)\n if not flat:\n ax = plt.subplot(gs[1, 1])\n else:\n ax = plt.subplot(gs[0, 2])\n im = ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xy_overlay = reorient_slice(overlay[:, :, xyz[2]], 2)\n im = ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol)\n\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],\n [xyz_pad, xy_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice.shape[1] - xyz_pad],\n [xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"A\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"P\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n if not flat:\n # empty corner\n ax = plt.subplot(gs[1, 0])\n if text is not None:\n # add text\n left, width = 0.25, 0.5\n bottom, height = 0.25, 0.5\n right = left + width\n top = bottom + height\n ax.text(\n 0.5 * (left + right) + text_dx,\n 0.5 * (bottom + top) + text_dy,\n text,\n horizontalalignment=\"center\",\n verticalalignment=\"center\",\n fontsize=textfontsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n # ax.text(0.5, 0.5)\n ax.imshow(np.zeros(image.shape[:-1]), cmap=\"Greys_r\")\n ax.axis(\"off\")\n\n if cbar:\n cbar_start = (1 - cbar_length) / 2\n if cbar_vertical:\n cax = fig.add_axes([0.9 + cbar_dx, cbar_start, 0.03, cbar_length])\n cbar_orient = \"vertical\"\n else:\n cax = fig.add_axes([cbar_start, 0.08 + cbar_dx, cbar_length, 0.03])\n cbar_orient = \"horizontal\"\n fig.colorbar(im, cax=cax, orientation=cbar_orient)\n\n ## multi-channel images ##\n elif image.components > 1:\n raise ValueError(\"Multi-channel images not currently supported!\")\n\n if filename is not None:\n plt.savefig(filename, dpi=dpi, transparent=transparent)\n plt.close(fig)\n else:\n plt.show()\n\n # turn warnings back to default\n warnings.simplefilter(\"default\")",
"def plot_ortho_stack(\n images,\n overlays=None,\n reorient=True,\n # xyz arguments\n xyz=None,\n xyz_lines=False,\n xyz_color=\"red\",\n xyz_alpha=0.6,\n xyz_linewidth=2,\n xyz_pad=5,\n # base image arguments\n cmap=\"Greys_r\",\n alpha=1,\n # overlay arguments\n overlay_cmap=\"jet\",\n overlay_alpha=0.9,\n # background arguments\n black_bg=True,\n bg_thresh_quant=0.01,\n bg_val_quant=0.99,\n # scale/crop/domain arguments\n crop=False,\n scale=False,\n domain_image_map=None,\n # title arguments\n title=None,\n titlefontsize=24,\n title_dx=0,\n title_dy=0,\n # 4th panel text arguemnts\n text=None,\n textfontsize=24,\n textfontcolor=\"white\",\n text_dx=0,\n text_dy=0,\n # save & size arguments\n filename=None,\n dpi=500,\n figsize=1.0,\n colpad=0,\n rowpad=0,\n transpose=False,\n transparent=True,\n orient_labels=True,\n):\n\n def mirror_matrix(x):\n return x[::-1, :]\n\n def rotate270_matrix(x):\n return mirror_matrix(x.T)\n\n def reorient_slice(x, axis):\n return rotate270_matrix(x)\n\n # need this hack because of a weird NaN warning from matplotlib with overlays\n warnings.simplefilter(\"ignore\")\n\n n_images = len(images)\n\n # handle `image` argument\n for i in range(n_images):\n if isinstance(images[i], str):\n images[i] = iio2.image_read(images[i])\n if not isinstance(images[i], iio.ANTsImage):\n raise ValueError(\"image argument must be an ANTsImage\")\n if images[i].dimension != 3:\n raise ValueError(\"Input image must have 3 dimensions!\")\n\n if overlays is None:\n overlays = [None] * n_images\n # handle `overlay` argument\n for i in range(n_images):\n if overlays[i] is not None:\n if isinstance(overlays[i], str):\n overlays[i] = iio2.image_read(overlays[i])\n if not isinstance(overlays[i], iio.ANTsImage):\n raise ValueError(\"overlay argument must be an ANTsImage\")\n if overlays[i].components > 1:\n raise ValueError(\"overlays[i] cannot have more than one voxel component\")\n if overlays[i].dimension != 3:\n raise ValueError(\"Overlay image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(images[i], overlays[i]):\n overlays[i] = reg.resample_image_to_target(\n overlays[i], images[i], interp_type=\"linear\"\n )\n\n for i in range(1, n_images):\n if not iio.image_physical_space_consistency(images[0], images[i]):\n images[i] = reg.resample_image_to_target(\n images[0], images[i], interp_type=\"linear\"\n )\n\n # reorient images\n if reorient != False:\n if reorient == True:\n reorient = \"RPI\"\n\n for i in range(n_images):\n images[i] = images[i].reorient_image2(reorient)\n\n if overlays[i] is not None:\n overlays[i] = overlays[i].reorient_image2(reorient)\n\n # handle `slices` argument\n if xyz is None:\n xyz = [int(s / 2) for s in images[0].shape]\n for i in range(3):\n if xyz[i] is None:\n xyz[i] = int(images[0].shape[i] / 2)\n\n # resample image if spacing is very unbalanced\n spacing = [s for i, s in enumerate(images[0].spacing)]\n if (max(spacing) / min(spacing)) > 3.0:\n new_spacing = (1, 1, 1)\n for i in range(n_images):\n images[i] = images[i].resample_image(tuple(new_spacing))\n if overlays[i] is not None:\n overlays[i] = overlays[i].resample_image(tuple(new_spacing))\n xyz = [\n int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)\n ]\n\n # potentially crop image\n if crop:\n for i in range(n_images):\n plotmask = images[i].get_mask(cleanup=0)\n if plotmask.max() == 0:\n plotmask += 1\n images[i] = images[i].crop_image(plotmask)\n if overlays[i] is not None:\n overlays[i] = overlays[i].crop_image(plotmask)\n\n # pad images\n for i in range(n_images):\n if i == 0:\n images[i], lowpad, uppad = images[i].pad_image(return_padvals=True)\n else:\n images[i] = images[i].pad_image()\n if overlays[i] is not None:\n overlays[i] = overlays[i].pad_image()\n xyz = [v + l for v, l in zip(xyz, lowpad)]\n\n # handle `domain_image_map` argument\n if domain_image_map is not None:\n if isinstance(domain_image_map, iio.ANTsImage):\n tx = tio2.new_ants_transform(\n precision=\"float\", transform_type=\"AffineTransform\", dimension=3\n )\n for i in range(n_images):\n images[i] = tio.apply_ants_transform_to_image(\n tx, images[i], domain_image_map\n )\n\n if overlays[i] is not None:\n overlays[i] = tio.apply_ants_transform_to_image(\n tx, overlays[i], domain_image_map, interpolation=\"linear\"\n )\n elif isinstance(domain_image_map, (list, tuple)):\n # expect an image and transformation\n if len(domain_image_map) != 2:\n raise ValueError(\"domain_image_map list or tuple must have length == 2\")\n\n dimg = domain_image_map[0]\n if not isinstance(dimg, iio.ANTsImage):\n raise ValueError(\"domain_image_map first entry should be ANTsImage\")\n\n tx = domain_image_map[1]\n for i in range(n_images):\n images[i] = reg.apply_transforms(dimg, images[i], transform_list=tx)\n if overlays[i] is not None:\n overlays[i] = reg.apply_transforms(\n dimg, overlays[i], transform_list=tx, interpolator=\"linear\"\n )\n\n # potentially find dynamic range\n if scale == True:\n vmins = []\n vmaxs = []\n for i in range(n_images):\n vmin, vmax = images[i].quantile((0.05, 0.95))\n vmins.append(vmin)\n vmaxs.append(vmax)\n elif isinstance(scale, (list, tuple)):\n if len(scale) != 2:\n raise ValueError(\n \"scale argument must be boolean or list/tuple with two values\"\n )\n vmins = []\n vmaxs = []\n for i in range(n_images):\n vmin, vmax = images[i].quantile(scale)\n vmins.append(vmin)\n vmaxs.append(vmax)\n else:\n vmin = None\n vmax = None\n\n if not transpose:\n nrow = n_images\n ncol = 3\n else:\n nrow = 3\n ncol = n_images\n\n fig = plt.figure(figsize=((ncol + 1) * 2.5 * figsize, (nrow + 1) * 2.5 * figsize))\n if title is not None:\n basey = 0.93\n basex = 0.5\n fig.suptitle(\n title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy\n )\n\n if (colpad > 0) and (rowpad > 0):\n bothgridpad = max(colpad, rowpad)\n colpad = 0\n rowpad = 0\n else:\n bothgridpad = 0.0\n\n gs = gridspec.GridSpec(\n nrow,\n ncol,\n wspace=bothgridpad,\n hspace=0.0,\n top=1.0 - 0.5 / (nrow + 1),\n bottom=0.5 / (nrow + 1) + colpad,\n left=0.5 / (ncol + 1) + rowpad,\n right=1 - 0.5 / (ncol + 1),\n )\n\n # pad image to have isotropic array dimensions\n vminols=[]\n vmaxols=[]\n for i in range(n_images):\n images[i] = images[i].numpy()\n if overlays[i] is not None:\n vminols.append( overlays[i].min() )\n vmaxols.append( overlays[i].max() )\n overlays[i] = overlays[i].numpy()\n if overlays[i].dtype not in [\"uint8\", \"uint32\"]:\n overlays[i][np.abs(overlays[i]) == 0] = np.nan\n\n ####################\n ####################\n for i in range(n_images):\n yz_slice = reorient_slice(images[i][xyz[0], :, :], 0)\n if not transpose:\n ax = plt.subplot(gs[i, 0])\n else:\n ax = plt.subplot(gs[0, i])\n ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlays[i] is not None:\n yz_overlay = reorient_slice(overlays[i][xyz[0], :, :], 0)\n ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap,\n vmin=vminols[i], vmax=vmaxols[i])\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],\n [xyz_pad, yz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice.shape[1] - xyz_pad],\n [yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"S\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"I\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"A\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"P\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n ####################\n ####################\n\n xz_slice = reorient_slice(images[i][:, xyz[1], :], 1)\n if not transpose:\n ax = plt.subplot(gs[i, 1])\n else:\n ax = plt.subplot(gs[1, i])\n ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlays[i] is not None:\n xz_overlay = reorient_slice(overlays[i][:, xyz[1], :], 1)\n ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap,\n vmin=vminols[i], vmax=vmaxols[i])\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],\n [xyz_pad, xz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice.shape[1] - xyz_pad],\n [xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"A\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"P\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n ####################\n ####################\n xy_slice = reorient_slice(images[i][:, :, xyz[2]], 2)\n if not transpose:\n ax = plt.subplot(gs[i, 2])\n else:\n ax = plt.subplot(gs[2, i])\n ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlays[i] is not None:\n xy_overlay = reorient_slice(overlays[i][:, :, xyz[2]], 2)\n ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap,\n vmin=vminols[i], vmax=vmaxols[i])\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],\n [xyz_pad, xy_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice.shape[1] - xyz_pad],\n [xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"A\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"P\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n ####################\n ####################\n\n if filename is not None:\n plt.savefig(filename, dpi=dpi, transparent=transparent)\n plt.close(fig)\n else:\n plt.show()\n\n # turn warnings back to default\n warnings.simplefilter(\"default\")",
"def plot_image(\n light_profile,\n grid,\n mask=None,\n extract_array_from_mask=False,\n zoom_around_mask=False,\n positions=None,\n as_subplot=False,\n units=\"arcsec\",\n kpc_per_arcsec=None,\n figsize=(7, 7),\n aspect=\"square\",\n cmap=\"jet\",\n norm=\"linear\",\n norm_min=None,\n norm_max=None,\n linthresh=0.05,\n linscale=0.01,\n cb_ticksize=10,\n cb_fraction=0.047,\n cb_pad=0.01,\n cb_tick_values=None,\n cb_tick_labels=None,\n title=\"Image\",\n titlesize=16,\n xlabelsize=16,\n ylabelsize=16,\n xyticksize=16,\n mask_pointsize=10,\n position_pointsize=10.0,\n grid_pointsize=1,\n output_path=None,\n output_format=\"show\",\n output_filename=\"image\",\n):\n image = light_profile.profile_image_from_grid(\n grid=grid, bypass_decorator=False\n )\n\n array_plotters.plot_array(\n array=image,\n mask=mask,\n extract_array_from_mask=extract_array_from_mask,\n zoom_around_mask=zoom_around_mask,\n positions=positions,\n as_subplot=as_subplot,\n units=units,\n kpc_per_arcsec=kpc_per_arcsec,\n figsize=figsize,\n aspect=aspect,\n cmap=cmap,\n norm=norm,\n norm_min=norm_min,\n norm_max=norm_max,\n linthresh=linthresh,\n linscale=linscale,\n cb_ticksize=cb_ticksize,\n cb_fraction=cb_fraction,\n cb_pad=cb_pad,\n cb_tick_values=cb_tick_values,\n cb_tick_labels=cb_tick_labels,\n title=title,\n titlesize=titlesize,\n xlabelsize=xlabelsize,\n ylabelsize=ylabelsize,\n xyticksize=xyticksize,\n mask_pointsize=mask_pointsize,\n position_pointsize=position_pointsize,\n grid_pointsize=grid_pointsize,\n output_path=output_path,\n output_format=output_format,\n output_filename=output_filename,\n )",
"def ortho(enable=pythoncom.Empty):\r\n return _base._rsf.ortho(enable)",
"def overlay(img1, img2, title=None, interpolation=None, sizeThreshold=128):\r\n # Check for type of images and convert to np.array\r\n if isinstance(img1, sitk.Image):\r\n img1 = sitk.GetArrayFromImage(img1)\r\n if isinstance(img2, sitk.Image):\r\n img2 = sitk.GetArrayFromImage(img2)\r\n if type(img1) is not type(img2) is not np.ndarray:\r\n raise NotImplementedError('Please provide images as np.array or '\r\n 'sitk.Image.')\r\n # Check for size of images\r\n if not img1.ndim == img2.ndim == 2:\r\n raise NotImplementedError('Only supports 2D images.')\r\n\r\n if interpolation:\r\n plt.imshow(img1, cmap='summer', interpolation=interpolation)\r\n plt.imshow(img2, cmap='autumn', alpha=0.5, interpolation=interpolation)\r\n elif max(max(img1.shape), max(img2.shape)) > sizeThreshold:\r\n plt.imshow(img1, cmap='summer')\r\n plt.imshow(img2, cmap='autumn', alpha=0.5)\r\n else:\r\n plt.imshow(img1, cmap='summer', interpolation='nearest')\r\n plt.imshow(img2, cmap='autumn', alpha=0.5, interpolation='nearest')\r\n plt.title(title)\r\n plt.axis('off')\r\n plt.show()",
"def display_coronal_with_overlay(temporal_slice, coronal_slice, images, masks, label, window_min, window_max):\n img = images[temporal_slice][:,coronal_slice,:]\n msk = masks[temporal_slice][:,coronal_slice,:]==label\n\n overlay_img = overlay_binary_segmentation_contours(img, msk, window_min, window_max) \n # Flip the image so that corresponds to correct radiological view.\n plt.imshow(np.flipud(sitk.GetArrayFromImage(overlay_img)))\n plt.axis('off')\n plt.show()",
"def ortho(self, left: 'double', right: 'double', bottom: 'double', top: 'double', nearval: 'double', farval: 'double') -> \"void\":\n return _coin.SbDPViewVolume_ortho(self, left, right, bottom, top, nearval, farval)",
"def get_orthorectified_for_ortho_bounds(self, bounds):\n\n ortho_bounds, nominal_pixel_bounds = self.extract_pixel_bounds(bounds)\n # extract the values - ensure that things are within proper image bounds\n pixel_bounds = self.get_real_pixel_bounds(nominal_pixel_bounds)\n pixel_array = self.reader[\n pixel_bounds[0]:pixel_bounds[1], pixel_bounds[2]:pixel_bounds[3], self.index]\n row_arr = numpy.arange(pixel_bounds[0], pixel_bounds[1])\n col_arr = numpy.arange(pixel_bounds[2], pixel_bounds[3])\n return self.get_orthorectified_from_array(ortho_bounds, row_arr, col_arr, pixel_array)",
"def plot_prediction_overlay(tile: np.ndarray, prediction: np.ndarray):\n plt.figure()\n plt.imshow(tile)\n plt.show()",
"def plot_orb_match(img1, img2, kp1, kp2, matches, name, num_drawn=None):\r\n draw = matches[:] if num_drawn is None else matches[:num_drawn]\r\n img3 = cv.drawMatches(img1, kp1, img2, kp2, draw, None, flags=2)\r\n fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(20, 10))\r\n ax.imshow(img3)\r\n ax.set_title(name)\r\n plt.show()",
"def plot_img(im1, im2):\r\n for i in range(5):\r\n plt.subplot(2, 5, i+1)\r\n plt.imshow(im1[i].reshape(28,28), cmap='gray') # reshape image 1 and show\r\n frame = plt.gca() # eliminate the axises\r\n frame.axes.get_xaxis().set_visible(False)\r\n frame.axes.get_yaxis().set_visible(False)\r\n\r\n for i in range(5):\r\n plt.subplot(2, 5, i+1+5)\r\n plt.imshow(im2[i].reshape(28,28), cmap='gray') # reshape image 2 and show\r\n frame = plt.gca() # eliminate the axises\r\n frame.axes.get_xaxis().set_visible(False)\r\n frame.axes.get_yaxis().set_visible(False)\r\n \r\n plt.subplots_adjust(wspace =0, hspace=0, top=0.6)\r\n plt.savefig(\"p5_comparison.eps\")\r\n plt.show()",
"def do_ortho_by_tile(self, raster_list, tile_name, tmp_srtm_dir):\n all_cmd = []\n output_files_list = []\n print \"Start orthorectification :\",tile_name\n for i in range(len(raster_list)):\n raster, tile_origin = raster_list[i]\n manifest = raster.get_manifest()\n\n for image in raster.get_images_list():\n image_ok = image.replace(\".tiff\", \"_OrthoReady.tiff\")\n current_date = Utils.get_date_from_s1_raster(image)\n current_polar = Utils.get_polar_from_s1_raster(image)\n current_platform = Utils.get_platform_from_s1_raster(image)\n current_orbit_direction = Utils.get_orbit_direction(manifest)\n current_relative_orbit = Utils.get_relative_orbit(manifest)\n out_utm_zone = tile_name[0:2]\n out_utm_northern = (tile_name[2] >= 'N')\n working_directory = os.path.join(self.cfg.output_preprocess,\\\n tile_name)\n if os.path.exists(working_directory) == False:\n os.makedirs(working_directory)\n\n in_epsg = 4326\n out_epsg = 32600+int(out_utm_zone)\n if not out_utm_northern:\n out_epsg = out_epsg+100\n\n conv_result = Utils.convert_coord([tile_origin[0]], in_epsg, out_epsg)\n (x_coord, y_coord,dummy) = conv_result[0]\n conv_result = Utils.convert_coord([tile_origin[2]], in_epsg, out_epsg)\n (lrx, lry,dummy) = conv_result[0]\n \n if not out_utm_northern and y_coord < 0:\n y_coord = y_coord+10000000.\n lry = lry+10000000.\n\n ortho_image_name = current_platform\\\n +\"_\"+tile_name\\\n +\"_\"+current_polar\\\n +\"_\"+current_orbit_direction\\\n +'_{:0>3d}'.format(current_relative_orbit)\\\n +\"_\"+current_date\\\n +\".tif\"\n\n if not os.path.exists(os.path.join(working_directory,ortho_image_name)) and not os.path.exists(os.path.join(working_directory,ortho_image_name[:-11]+\"txxxxxx.tif\")): \n cmd = 'export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={};'.format(self.cfg.OTBThreads)+\"otbcli_OrthoRectification -opt.ram \"\\\n +str(self.cfg.ram_per_process)\\\n +\" -progress false -io.in \"+image_ok\\\n +\" -io.out \\\"\"+os.path.join(working_directory,\\\n ortho_image_name)\\\n +\"?&writegeom=false&gdal:co:COMPRESS=DEFLATE\\\" -interpolator nn -outputs.spacingx \"\\\n +str(self.cfg.out_spatial_res)\\\n +\" -outputs.spacingy -\"+str(self.cfg.out_spatial_res)\\\n +\" -outputs.sizex \"\\\n +str(int(round(abs(lrx-x_coord)/self.cfg.out_spatial_res)))\\\n +\" -outputs.sizey \"\\\n +str(int(round(abs(lry-y_coord)/self.cfg.out_spatial_res)))\\\n +\" -opt.gridspacing \"+str(self.cfg.grid_spacing)\\\n +\" -map utm -map.utm.zone \"+str(out_utm_zone)\\\n +\" -map.utm.northhem \"+str(out_utm_northern).lower()\\\n +\" -outputs.ulx \"+str(x_coord)\\\n +\" -outputs.uly \"+str(y_coord)\\\n +\" -elev.dem \"+tmp_srtm_dir+\" -elev.geoid \"+self.cfg.GeoidFile\n\n all_cmd.append(cmd)\n output_files_list.append(os.path.join(working_directory,\\\n ortho_image_name))\n\n self.run_processing(all_cmd, title=\"Orthorectification\")\n\n # Writing the metadata\n for f in os.listdir(working_directory):\n fullpath = os.path.join(working_directory, f)\n if os.path.isfile(fullpath) and f.startswith('s1') and f.endswith('.tif'):\n dst = gdal.Open(fullpath, gdal.GA_Update)\n oin = f.split('_')\n\n dst.SetMetadataItem('S2_TILE_CORRESPONDING_CODE', tile_name)\n dst.SetMetadataItem('PROCESSED_DATETIME', str(datetime.datetime.now().strftime('%Y:%m:%d')))\n dst.SetMetadataItem('ORTHORECTIFIED', 'true')\n dst.SetMetadataItem('CALIBRATION', str(self.cfg.calibration_type))\n dst.SetMetadataItem('SPATIAL_RESOLUTION', str(self.cfg.out_spatial_res))\n dst.SetMetadataItem('IMAGE_TYPE', 'GRD')\n dst.SetMetadataItem('FLYING_UNIT_CODE', oin[0])\n dst.SetMetadataItem('POLARIZATION', oin[2])\n dst.SetMetadataItem('ORBIT', oin[4])\n dst.SetMetadataItem('ORBIT_DIRECTION', oin[3])\n if oin[5][9] == 'x':\n date = oin[5][0:4]+':'+oin[5][4:6]+':'+oin[5][6:8]+' 00:00:00'\n else:\n date = oin[5][0:4]+':'+oin[5][4:6]+':'+oin[5][6:8]+' '+oin[5][9:11]+':'+oin[5][11:13]+':'+oin[5][13:15]\n dst.SetMetadataItem('ACQUISITION_DATETIME', date)\n\n return output_files_list",
"def plot_two_images(im1, im2):\n ax1=plt.subplot(1,2,1)\n plt.imshow(im1)\n plt.axis('off')\n ax2=plt.subplot(1,2,2)\n plt.imshow(im2)\n plt.axis('off')\n return ax1, ax2",
"def _plot_dipole_mri_orthoview(\n dipole,\n trans,\n subject,\n subjects_dir=None,\n coord_frame=\"head\",\n idx=\"gof\",\n show_all=True,\n ax=None,\n block=False,\n show=True,\n color=None,\n highlight_color=\"r\",\n title=None,\n width=None,\n):\n import matplotlib.pyplot as plt\n from mpl_toolkits.mplot3d import Axes3D\n\n _import_nibabel(\"plotting MRI slices\")\n\n _check_option(\"coord_frame\", coord_frame, [\"head\", \"mri\"])\n\n if idx == \"gof\":\n idx = np.argmax(dipole.gof)\n elif idx == \"amplitude\":\n idx = np.argmax(np.abs(dipole.amplitude))\n else:\n idx = _ensure_int(idx, \"idx\", 'an int or one of [\"gof\", \"amplitude\"]')\n\n vox, ori, pos, data = _get_dipole_loc(\n dipole, trans, subject, subjects_dir, coord_frame\n )\n\n dims = len(data) # Symmetric size assumed.\n dd = dims // 2\n if ax is None:\n fig, ax = plt.subplots(1, subplot_kw=dict(projection=\"3d\"))\n else:\n _validate_type(ax, Axes3D, \"ax\", \"Axes3D\", extra='when mode is \"orthoview\"')\n fig = ax.get_figure()\n\n gridx, gridy = np.meshgrid(\n np.linspace(-dd, dd, dims), np.linspace(-dd, dd, dims), indexing=\"ij\"\n )\n params = {\n \"ax\": ax,\n \"data\": data,\n \"idx\": idx,\n \"dipole\": dipole,\n \"vox\": vox,\n \"gridx\": gridx,\n \"gridy\": gridy,\n \"ori\": ori,\n \"coord_frame\": coord_frame,\n \"show_all\": show_all,\n \"pos\": pos,\n \"color\": color,\n \"highlight_color\": highlight_color,\n \"title\": title,\n \"width\": width,\n }\n _plot_dipole(**params)\n ax.view_init(elev=30, azim=-140)\n\n callback_func = partial(_dipole_changed, params=params)\n fig.canvas.mpl_connect(\"scroll_event\", callback_func)\n fig.canvas.mpl_connect(\"key_press_event\", callback_func)\n\n plt_show(show, block=block)\n return fig",
"def isOrtho(*args, **kwargs):\n \n pass",
"def ortho(self, left: 'float', right: 'float', bottom: 'float', top: 'float', nearval: 'float', farval: 'float') -> \"void\":\n return _coin.SbViewVolume_ortho(self, left, right, bottom, top, nearval, farval)",
"def tomPlot2D(data, x, y, dr):\n pfnormlog = 20*np.log10(np.abs(data))\n pfnormlog = np.transpose(np.squeeze(pfnormlog)) #reduce to 2-dimensions, transposed to match plot in paper\n \n fig1 = plt.figure()\n plt.imshow(pfnormlog, extent=[x[0]*1e3,x[-1]*1e3,y[-1]*1e3,y[0]*1e3], vmin=-dr, vmax=0, cmap='gray')\n plt.xlabel('x(mm)')\n plt.ylabel('y(mm)')\n plt.colorbar() \n\n fig2, (ax1, ax2) = plt.subplots(1,2)\n xPlotInd = int(round(len(x)/2))\n ax1.plot(y*1e3, data[xPlotInd,:])\n yPlotInd = int(round(len(y)/2))\n ax2.plot(x*1e3, data[:,yPlotInd])\n ax1.set_xlabel('y(mm)')\n xval = round(x[xPlotInd]*1e3)\n ax1.set_title('x = '+ str(xval) + ' mm')\n ax2.set_xlabel('x(mm)')\n yval = round(y[yPlotInd]*1e3)\n ax2.set_title('y = ' + str(yval)+ ' mm')\n \n fig1.savefig('pfnormlog-py.png')\n fig2.savefig('pfnormlog-cross_sections-py.png')\n \n return",
"def draw_match(pyramid, template, image_array_list):\n # Convert the image to color, so that we can put the red rectangles\n im = pyramid[0].convert(\"RGB\")\n draw = ImageDraw.Draw(im)\n\n # current image\n curr_im = 0\n # list of points with correlation > threshold\n pointslist = []\n # size of the template\n (i, j) = template.size\n for image in image_array_list:\n # get the coordinates of high correlation points\n pointslist = np.nonzero(image)\n # Resizes the red box dimensions according to the image size\n i /= 0.75 ** curr_im\n j /= 0.75 ** curr_im\n\n # draw each rectangle centered on a correlation point\n for p in range(len(pointslist[0])):\n # resizes the points coordinates according to the size\n # of the current image\n x = pointslist[1][p] / (0.75) ** curr_im\n y = pointslist[0][p] / (0.75) ** curr_im\n draw.rectangle([(x-i/2, y-j/2), (x+i/2, y+j/2)], outline=\"red\")\n curr_im += 1\n del draw\n im.show()\n # im.save(im_path+\"output/\"+im_name[im_num], \"PNG\")",
"def borderless_image(image, cmap=\"hot\", fignum=100, filename=None):\n plt.figure(fignum)\n plt.imshow(image, cmap=cmap)\n plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)\n plt.margins(0, 0)\n plt.gca().xaxis.set_major_locator(plt.NullLocator())\n plt.gca().yaxis.set_major_locator(plt.NullLocator())\n plt.show()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Plot an orthographic view of a 3D image Use mask_image and/or threshold_image to preprocess images to be be overlaid and display the overlays in a given range. See the wiki examples.
|
def plot_ortho(
image,
overlay=None,
reorient=True,
blend=False,
# xyz arguments
xyz=None,
xyz_lines=True,
xyz_color="red",
xyz_alpha=0.6,
xyz_linewidth=2,
xyz_pad=5,
orient_labels=True,
# base image arguments
alpha=1,
cmap="Greys_r",
# overlay arguments
overlay_cmap="jet",
overlay_alpha=0.9,
cbar=False,
cbar_length=0.8,
cbar_dx=0.0,
cbar_vertical=True,
# background arguments
black_bg=True,
bg_thresh_quant=0.01,
bg_val_quant=0.99,
# scale/crop/domain arguments
crop=False,
scale=False,
domain_image_map=None,
# title arguments
title=None,
titlefontsize=24,
title_dx=0,
title_dy=0,
# 4th panel text arguemnts
text=None,
textfontsize=24,
textfontcolor="white",
text_dx=0,
text_dy=0,
# save & size arguments
filename=None,
dpi=500,
figsize=1.0,
flat=False,
transparent=True,
resample=False,
):
def mirror_matrix(x):
return x[::-1, :]
def rotate270_matrix(x):
return mirror_matrix(x.T)
def reorient_slice(x, axis):
return rotate270_matrix(x)
# need this hack because of a weird NaN warning from matplotlib with overlays
warnings.simplefilter("ignore")
# handle `image` argument
if isinstance(image, str):
image = iio2.image_read(image)
if not isinstance(image, iio.ANTsImage):
raise ValueError("image argument must be an ANTsImage")
if image.dimension != 3:
raise ValueError("Input image must have 3 dimensions!")
# handle `overlay` argument
if overlay is not None:
vminol = overlay.min()
vmaxol = overlay.max()
if isinstance(overlay, str):
overlay = iio2.image_read(overlay)
if not isinstance(overlay, iio.ANTsImage):
raise ValueError("overlay argument must be an ANTsImage")
if overlay.components > 1:
raise ValueError("overlay cannot have more than one voxel component")
if overlay.dimension != 3:
raise ValueError("Overlay image must have 3 dimensions!")
if not iio.image_physical_space_consistency(image, overlay):
overlay = reg.resample_image_to_target(overlay, image, interp_type="linear")
if blend:
if alpha == 1:
alpha = 0.5
image = image * alpha + overlay * (1 - alpha)
overlay = None
alpha = 1.0
if image.pixeltype not in {"float", "double"}:
scale = False # turn off scaling if image is discrete
# reorient images
if reorient != False:
if reorient == True:
reorient = "RPI"
image = image.reorient_image2("RPI")
if overlay is not None:
overlay = overlay.reorient_image2("RPI")
# handle `slices` argument
if xyz is None:
xyz = [int(s / 2) for s in image.shape]
for i in range(3):
if xyz[i] is None:
xyz[i] = int(image.shape[i] / 2)
# resample image if spacing is very unbalanced
spacing = [s for i, s in enumerate(image.spacing)]
if (max(spacing) / min(spacing)) > 3.0 and resample:
new_spacing = (1, 1, 1)
image = image.resample_image(tuple(new_spacing))
if overlay is not None:
overlay = overlay.resample_image(tuple(new_spacing))
xyz = [
int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)
]
# potentially crop image
if crop:
plotmask = image.get_mask(cleanup=0)
if plotmask.max() == 0:
plotmask += 1
image = image.crop_image(plotmask)
if overlay is not None:
overlay = overlay.crop_image(plotmask)
# pad images
image, lowpad, uppad = image.pad_image(return_padvals=True)
xyz = [v + l for v, l in zip(xyz, lowpad)]
if overlay is not None:
overlay = overlay.pad_image()
# handle `domain_image_map` argument
if domain_image_map is not None:
if isinstance(domain_image_map, iio.ANTsImage):
tx = tio2.new_ants_transform(
precision="float",
transform_type="AffineTransform",
dimension=image.dimension,
)
image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)
if overlay is not None:
overlay = tio.apply_ants_transform_to_image(
tx, overlay, domain_image_map, interpolation="linear"
)
elif isinstance(domain_image_map, (list, tuple)):
# expect an image and transformation
if len(domain_image_map) != 2:
raise ValueError("domain_image_map list or tuple must have length == 2")
dimg = domain_image_map[0]
if not isinstance(dimg, iio.ANTsImage):
raise ValueError("domain_image_map first entry should be ANTsImage")
tx = domain_image_map[1]
image = reg.apply_transforms(dimg, image, transform_list=tx)
if overlay is not None:
overlay = reg.apply_transforms(
dimg, overlay, transform_list=tx, interpolator="linear"
)
## single-channel images ##
if image.components == 1:
# potentially find dynamic range
if scale == True:
vmin, vmax = image.quantile((0.05, 0.95))
elif isinstance(scale, (list, tuple)):
if len(scale) != 2:
raise ValueError(
"scale argument must be boolean or list/tuple with two values"
)
vmin, vmax = image.quantile(scale)
else:
vmin = None
vmax = None
if not flat:
nrow = 2
ncol = 2
else:
nrow = 1
ncol = 3
fig = plt.figure(figsize=(9 * figsize, 9 * figsize))
if title is not None:
basey = 0.88 if not flat else 0.66
basex = 0.5
fig.suptitle(
title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy
)
gs = gridspec.GridSpec(
nrow,
ncol,
wspace=0.0,
hspace=0.0,
top=1.0 - 0.5 / (nrow + 1),
bottom=0.5 / (nrow + 1),
left=0.5 / (ncol + 1),
right=1 - 0.5 / (ncol + 1),
)
# pad image to have isotropic array dimensions
image = image.numpy()
if overlay is not None:
overlay = overlay.numpy()
if overlay.dtype not in ["uint8", "uint32"]:
overlay[np.abs(overlay) == 0] = np.nan
yz_slice = reorient_slice(image[xyz[0], :, :], 0)
ax = plt.subplot(gs[0, 0])
ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
yz_overlay = reorient_slice(overlay[xyz[0], :, :], 0)
ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol )
if xyz_lines:
# add lines
l = mlines.Line2D(
[yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],
[xyz_pad, yz_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, yz_slice.shape[1] - xyz_pad],
[yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
if orient_labels:
ax.text(
0.5,
0.98,
"S",
horizontalalignment="center",
verticalalignment="top",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.5,
0.02,
"I",
horizontalalignment="center",
verticalalignment="bottom",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.98,
0.5,
"A",
horizontalalignment="right",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.02,
0.5,
"P",
horizontalalignment="left",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.axis("off")
xz_slice = reorient_slice(image[:, xyz[1], :], 1)
ax = plt.subplot(gs[0, 1])
ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
xz_overlay = reorient_slice(overlay[:, xyz[1], :], 1)
ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol )
if xyz_lines:
# add lines
l = mlines.Line2D(
[xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],
[xyz_pad, xz_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xz_slice.shape[1] - xyz_pad],
[xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
if orient_labels:
ax.text(
0.5,
0.98,
"S",
horizontalalignment="center",
verticalalignment="top",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.5,
0.02,
"I",
horizontalalignment="center",
verticalalignment="bottom",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.98,
0.5,
"L",
horizontalalignment="right",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.02,
0.5,
"R",
horizontalalignment="left",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.axis("off")
xy_slice = reorient_slice(image[:, :, xyz[2]], 2)
if not flat:
ax = plt.subplot(gs[1, 1])
else:
ax = plt.subplot(gs[0, 2])
im = ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)
if overlay is not None:
xy_overlay = reorient_slice(overlay[:, :, xyz[2]], 2)
im = ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap, vmin=vminol, vmax=vmaxol)
if xyz_lines:
# add lines
l = mlines.Line2D(
[xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],
[xyz_pad, xy_slice.shape[0] - xyz_pad],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
l = mlines.Line2D(
[xyz_pad, xy_slice.shape[1] - xyz_pad],
[xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],
color=xyz_color,
alpha=xyz_alpha,
linewidth=xyz_linewidth,
)
ax.add_line(l)
if orient_labels:
ax.text(
0.5,
0.98,
"A",
horizontalalignment="center",
verticalalignment="top",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.5,
0.02,
"P",
horizontalalignment="center",
verticalalignment="bottom",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.98,
0.5,
"L",
horizontalalignment="right",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.text(
0.02,
0.5,
"R",
horizontalalignment="left",
verticalalignment="center",
fontsize=20 * figsize,
color=textfontcolor,
transform=ax.transAxes,
)
ax.axis("off")
if not flat:
# empty corner
ax = plt.subplot(gs[1, 0])
if text is not None:
# add text
left, width = 0.25, 0.5
bottom, height = 0.25, 0.5
right = left + width
top = bottom + height
ax.text(
0.5 * (left + right) + text_dx,
0.5 * (bottom + top) + text_dy,
text,
horizontalalignment="center",
verticalalignment="center",
fontsize=textfontsize,
color=textfontcolor,
transform=ax.transAxes,
)
# ax.text(0.5, 0.5)
ax.imshow(np.zeros(image.shape[:-1]), cmap="Greys_r")
ax.axis("off")
if cbar:
cbar_start = (1 - cbar_length) / 2
if cbar_vertical:
cax = fig.add_axes([0.9 + cbar_dx, cbar_start, 0.03, cbar_length])
cbar_orient = "vertical"
else:
cax = fig.add_axes([cbar_start, 0.08 + cbar_dx, cbar_length, 0.03])
cbar_orient = "horizontal"
fig.colorbar(im, cax=cax, orientation=cbar_orient)
## multi-channel images ##
elif image.components > 1:
raise ValueError("Multi-channel images not currently supported!")
if filename is not None:
plt.savefig(filename, dpi=dpi, transparent=transparent)
plt.close(fig)
else:
plt.show()
# turn warnings back to default
warnings.simplefilter("default")
|
[
"def plot_ortho_stack(\n images,\n overlays=None,\n reorient=True,\n # xyz arguments\n xyz=None,\n xyz_lines=False,\n xyz_color=\"red\",\n xyz_alpha=0.6,\n xyz_linewidth=2,\n xyz_pad=5,\n # base image arguments\n cmap=\"Greys_r\",\n alpha=1,\n # overlay arguments\n overlay_cmap=\"jet\",\n overlay_alpha=0.9,\n # background arguments\n black_bg=True,\n bg_thresh_quant=0.01,\n bg_val_quant=0.99,\n # scale/crop/domain arguments\n crop=False,\n scale=False,\n domain_image_map=None,\n # title arguments\n title=None,\n titlefontsize=24,\n title_dx=0,\n title_dy=0,\n # 4th panel text arguemnts\n text=None,\n textfontsize=24,\n textfontcolor=\"white\",\n text_dx=0,\n text_dy=0,\n # save & size arguments\n filename=None,\n dpi=500,\n figsize=1.0,\n colpad=0,\n rowpad=0,\n transpose=False,\n transparent=True,\n orient_labels=True,\n):\n\n def mirror_matrix(x):\n return x[::-1, :]\n\n def rotate270_matrix(x):\n return mirror_matrix(x.T)\n\n def reorient_slice(x, axis):\n return rotate270_matrix(x)\n\n # need this hack because of a weird NaN warning from matplotlib with overlays\n warnings.simplefilter(\"ignore\")\n\n n_images = len(images)\n\n # handle `image` argument\n for i in range(n_images):\n if isinstance(images[i], str):\n images[i] = iio2.image_read(images[i])\n if not isinstance(images[i], iio.ANTsImage):\n raise ValueError(\"image argument must be an ANTsImage\")\n if images[i].dimension != 3:\n raise ValueError(\"Input image must have 3 dimensions!\")\n\n if overlays is None:\n overlays = [None] * n_images\n # handle `overlay` argument\n for i in range(n_images):\n if overlays[i] is not None:\n if isinstance(overlays[i], str):\n overlays[i] = iio2.image_read(overlays[i])\n if not isinstance(overlays[i], iio.ANTsImage):\n raise ValueError(\"overlay argument must be an ANTsImage\")\n if overlays[i].components > 1:\n raise ValueError(\"overlays[i] cannot have more than one voxel component\")\n if overlays[i].dimension != 3:\n raise ValueError(\"Overlay image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(images[i], overlays[i]):\n overlays[i] = reg.resample_image_to_target(\n overlays[i], images[i], interp_type=\"linear\"\n )\n\n for i in range(1, n_images):\n if not iio.image_physical_space_consistency(images[0], images[i]):\n images[i] = reg.resample_image_to_target(\n images[0], images[i], interp_type=\"linear\"\n )\n\n # reorient images\n if reorient != False:\n if reorient == True:\n reorient = \"RPI\"\n\n for i in range(n_images):\n images[i] = images[i].reorient_image2(reorient)\n\n if overlays[i] is not None:\n overlays[i] = overlays[i].reorient_image2(reorient)\n\n # handle `slices` argument\n if xyz is None:\n xyz = [int(s / 2) for s in images[0].shape]\n for i in range(3):\n if xyz[i] is None:\n xyz[i] = int(images[0].shape[i] / 2)\n\n # resample image if spacing is very unbalanced\n spacing = [s for i, s in enumerate(images[0].spacing)]\n if (max(spacing) / min(spacing)) > 3.0:\n new_spacing = (1, 1, 1)\n for i in range(n_images):\n images[i] = images[i].resample_image(tuple(new_spacing))\n if overlays[i] is not None:\n overlays[i] = overlays[i].resample_image(tuple(new_spacing))\n xyz = [\n int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)\n ]\n\n # potentially crop image\n if crop:\n for i in range(n_images):\n plotmask = images[i].get_mask(cleanup=0)\n if plotmask.max() == 0:\n plotmask += 1\n images[i] = images[i].crop_image(plotmask)\n if overlays[i] is not None:\n overlays[i] = overlays[i].crop_image(plotmask)\n\n # pad images\n for i in range(n_images):\n if i == 0:\n images[i], lowpad, uppad = images[i].pad_image(return_padvals=True)\n else:\n images[i] = images[i].pad_image()\n if overlays[i] is not None:\n overlays[i] = overlays[i].pad_image()\n xyz = [v + l for v, l in zip(xyz, lowpad)]\n\n # handle `domain_image_map` argument\n if domain_image_map is not None:\n if isinstance(domain_image_map, iio.ANTsImage):\n tx = tio2.new_ants_transform(\n precision=\"float\", transform_type=\"AffineTransform\", dimension=3\n )\n for i in range(n_images):\n images[i] = tio.apply_ants_transform_to_image(\n tx, images[i], domain_image_map\n )\n\n if overlays[i] is not None:\n overlays[i] = tio.apply_ants_transform_to_image(\n tx, overlays[i], domain_image_map, interpolation=\"linear\"\n )\n elif isinstance(domain_image_map, (list, tuple)):\n # expect an image and transformation\n if len(domain_image_map) != 2:\n raise ValueError(\"domain_image_map list or tuple must have length == 2\")\n\n dimg = domain_image_map[0]\n if not isinstance(dimg, iio.ANTsImage):\n raise ValueError(\"domain_image_map first entry should be ANTsImage\")\n\n tx = domain_image_map[1]\n for i in range(n_images):\n images[i] = reg.apply_transforms(dimg, images[i], transform_list=tx)\n if overlays[i] is not None:\n overlays[i] = reg.apply_transforms(\n dimg, overlays[i], transform_list=tx, interpolator=\"linear\"\n )\n\n # potentially find dynamic range\n if scale == True:\n vmins = []\n vmaxs = []\n for i in range(n_images):\n vmin, vmax = images[i].quantile((0.05, 0.95))\n vmins.append(vmin)\n vmaxs.append(vmax)\n elif isinstance(scale, (list, tuple)):\n if len(scale) != 2:\n raise ValueError(\n \"scale argument must be boolean or list/tuple with two values\"\n )\n vmins = []\n vmaxs = []\n for i in range(n_images):\n vmin, vmax = images[i].quantile(scale)\n vmins.append(vmin)\n vmaxs.append(vmax)\n else:\n vmin = None\n vmax = None\n\n if not transpose:\n nrow = n_images\n ncol = 3\n else:\n nrow = 3\n ncol = n_images\n\n fig = plt.figure(figsize=((ncol + 1) * 2.5 * figsize, (nrow + 1) * 2.5 * figsize))\n if title is not None:\n basey = 0.93\n basex = 0.5\n fig.suptitle(\n title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy\n )\n\n if (colpad > 0) and (rowpad > 0):\n bothgridpad = max(colpad, rowpad)\n colpad = 0\n rowpad = 0\n else:\n bothgridpad = 0.0\n\n gs = gridspec.GridSpec(\n nrow,\n ncol,\n wspace=bothgridpad,\n hspace=0.0,\n top=1.0 - 0.5 / (nrow + 1),\n bottom=0.5 / (nrow + 1) + colpad,\n left=0.5 / (ncol + 1) + rowpad,\n right=1 - 0.5 / (ncol + 1),\n )\n\n # pad image to have isotropic array dimensions\n vminols=[]\n vmaxols=[]\n for i in range(n_images):\n images[i] = images[i].numpy()\n if overlays[i] is not None:\n vminols.append( overlays[i].min() )\n vmaxols.append( overlays[i].max() )\n overlays[i] = overlays[i].numpy()\n if overlays[i].dtype not in [\"uint8\", \"uint32\"]:\n overlays[i][np.abs(overlays[i]) == 0] = np.nan\n\n ####################\n ####################\n for i in range(n_images):\n yz_slice = reorient_slice(images[i][xyz[0], :, :], 0)\n if not transpose:\n ax = plt.subplot(gs[i, 0])\n else:\n ax = plt.subplot(gs[0, i])\n ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlays[i] is not None:\n yz_overlay = reorient_slice(overlays[i][xyz[0], :, :], 0)\n ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap,\n vmin=vminols[i], vmax=vmaxols[i])\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],\n [xyz_pad, yz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice.shape[1] - xyz_pad],\n [yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"S\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"I\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"A\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"P\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n ####################\n ####################\n\n xz_slice = reorient_slice(images[i][:, xyz[1], :], 1)\n if not transpose:\n ax = plt.subplot(gs[i, 1])\n else:\n ax = plt.subplot(gs[1, i])\n ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlays[i] is not None:\n xz_overlay = reorient_slice(overlays[i][:, xyz[1], :], 1)\n ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap,\n vmin=vminols[i], vmax=vmaxols[i])\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],\n [xyz_pad, xz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice.shape[1] - xyz_pad],\n [xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"A\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"P\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n ####################\n ####################\n xy_slice = reorient_slice(images[i][:, :, xyz[2]], 2)\n if not transpose:\n ax = plt.subplot(gs[i, 2])\n else:\n ax = plt.subplot(gs[2, i])\n ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlays[i] is not None:\n xy_overlay = reorient_slice(overlays[i][:, :, xyz[2]], 2)\n ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap,\n vmin=vminols[i], vmax=vmaxols[i])\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],\n [xyz_pad, xy_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice.shape[1] - xyz_pad],\n [xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n if orient_labels:\n ax.text(\n 0.5,\n 0.98,\n \"A\",\n horizontalalignment=\"center\",\n verticalalignment=\"top\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.5,\n 0.02,\n \"P\",\n horizontalalignment=\"center\",\n verticalalignment=\"bottom\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.98,\n 0.5,\n \"L\",\n horizontalalignment=\"right\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.text(\n 0.02,\n 0.5,\n \"R\",\n horizontalalignment=\"left\",\n verticalalignment=\"center\",\n fontsize=20 * figsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n ax.axis(\"off\")\n\n ####################\n ####################\n\n if filename is not None:\n plt.savefig(filename, dpi=dpi, transparent=transparent)\n plt.close(fig)\n else:\n plt.show()\n\n # turn warnings back to default\n warnings.simplefilter(\"default\")",
"def plot_ortho_double(\n image,\n image2,\n overlay=None,\n overlay2=None,\n reorient=True,\n # xyz arguments\n xyz=None,\n xyz_lines=True,\n xyz_color=\"red\",\n xyz_alpha=0.6,\n xyz_linewidth=2,\n xyz_pad=5,\n # base image arguments\n cmap=\"Greys_r\",\n alpha=1,\n cmap2=\"Greys_r\",\n alpha2=1,\n # overlay arguments\n overlay_cmap=\"jet\",\n overlay_alpha=0.9,\n overlay_cmap2=\"jet\",\n overlay_alpha2=0.9,\n # background arguments\n black_bg=True,\n bg_thresh_quant=0.01,\n bg_val_quant=0.99,\n # scale/crop/domain arguments\n crop=False,\n scale=False,\n crop2=False,\n scale2=True,\n domain_image_map=None,\n # title arguments\n title=None,\n titlefontsize=24,\n title_dx=0,\n title_dy=0,\n # 4th panel text arguemnts\n text=None,\n textfontsize=24,\n textfontcolor=\"white\",\n text_dx=0,\n text_dy=0,\n # save & size arguments\n filename=None,\n dpi=500,\n figsize=1.0,\n flat=True,\n transpose=False,\n transparent=True,\n):\n\n def mirror_matrix(x):\n return x[::-1, :]\n\n def rotate270_matrix(x):\n return mirror_matrix(x.T)\n\n def reorient_slice(x, axis):\n return rotate270_matrix(x)\n\n # need this hack because of a weird NaN warning from matplotlib with overlays\n warnings.simplefilter(\"ignore\")\n\n # handle `image` argument\n if isinstance(image, str):\n image = iio2.image_read(image)\n if not isinstance(image, iio.ANTsImage):\n raise ValueError(\"image argument must be an ANTsImage\")\n if image.dimension != 3:\n raise ValueError(\"Input image must have 3 dimensions!\")\n\n if isinstance(image2, str):\n image2 = iio2.image_read(image2)\n if not isinstance(image2, iio.ANTsImage):\n raise ValueError(\"image2 argument must be an ANTsImage\")\n if image2.dimension != 3:\n raise ValueError(\"Input image2 must have 3 dimensions!\")\n\n # handle `overlay` argument\n if overlay is not None:\n if isinstance(overlay, str):\n overlay = iio2.image_read(overlay)\n if not isinstance(overlay, iio.ANTsImage):\n raise ValueError(\"overlay argument must be an ANTsImage\")\n if overlay.components > 1:\n raise ValueError(\"overlay cannot have more than one voxel component\")\n if overlay.dimension != 3:\n raise ValueError(\"Overlay image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(image, overlay):\n overlay = reg.resample_image_to_target(overlay, image, interp_type=\"linear\")\n\n if overlay2 is not None:\n if isinstance(overlay2, str):\n overlay2 = iio2.image_read(overlay2)\n if not isinstance(overlay2, iio.ANTsImage):\n raise ValueError(\"overlay2 argument must be an ANTsImage\")\n if overlay2.components > 1:\n raise ValueError(\"overlay2 cannot have more than one voxel component\")\n if overlay2.dimension != 3:\n raise ValueError(\"Overlay2 image must have 3 dimensions!\")\n\n if not iio.image_physical_space_consistency(image2, overlay2):\n overlay2 = reg.resample_image_to_target(\n overlay2, image2, interp_type=\"linear\"\n )\n\n if not iio.image_physical_space_consistency(image, image2):\n image2 = reg.resample_image_to_target(image2, image, interp_type=\"linear\")\n\n if image.pixeltype not in {\"float\", \"double\"}:\n scale = False # turn off scaling if image is discrete\n\n if image2.pixeltype not in {\"float\", \"double\"}:\n scale2 = False # turn off scaling if image is discrete\n\n # reorient images\n if reorient != False:\n if reorient == True:\n reorient = \"RPI\"\n image = image.reorient_image2(reorient)\n image2 = image2.reorient_image2(reorient)\n if overlay is not None:\n overlay = overlay.reorient_image2(reorient)\n if overlay2 is not None:\n overlay2 = overlay2.reorient_image2(reorient)\n\n # handle `slices` argument\n if xyz is None:\n xyz = [int(s / 2) for s in image.shape]\n for i in range(3):\n if xyz[i] is None:\n xyz[i] = int(image.shape[i] / 2)\n\n # resample image if spacing is very unbalanced\n spacing = [s for i, s in enumerate(image.spacing)]\n if (max(spacing) / min(spacing)) > 3.0:\n new_spacing = (1, 1, 1)\n image = image.resample_image(tuple(new_spacing))\n image2 = image2.resample_image_to_target(tuple(new_spacing))\n if overlay is not None:\n overlay = overlay.resample_image(tuple(new_spacing))\n if overlay2 is not None:\n overlay2 = overlay2.resample_image(tuple(new_spacing))\n xyz = [\n int(sl * (sold / snew)) for sl, sold, snew in zip(xyz, spacing, new_spacing)\n ]\n\n # pad images\n image, lowpad, uppad = image.pad_image(return_padvals=True)\n image2, lowpad2, uppad2 = image2.pad_image(return_padvals=True)\n xyz = [v + l for v, l in zip(xyz, lowpad)]\n if overlay is not None:\n overlay = overlay.pad_image()\n if overlay2 is not None:\n overlay2 = overlay2.pad_image()\n\n # handle `domain_image_map` argument\n if domain_image_map is not None:\n if isinstance(domain_image_map, iio.ANTsImage):\n tx = tio2.new_ants_transform(\n precision=\"float\",\n transform_type=\"AffineTransform\",\n dimension=image.dimension,\n )\n image = tio.apply_ants_transform_to_image(tx, image, domain_image_map)\n image2 = tio.apply_ants_transform_to_image(tx, image2, domain_image_map)\n if overlay is not None:\n overlay = tio.apply_ants_transform_to_image(\n tx, overlay, domain_image_map, interpolation=\"linear\"\n )\n if overlay2 is not None:\n overlay2 = tio.apply_ants_transform_to_image(\n tx, overlay2, domain_image_map, interpolation=\"linear\"\n )\n elif isinstance(domain_image_map, (list, tuple)):\n # expect an image and transformation\n if len(domain_image_map) != 2:\n raise ValueError(\"domain_image_map list or tuple must have length == 2\")\n\n dimg = domain_image_map[0]\n if not isinstance(dimg, iio.ANTsImage):\n raise ValueError(\"domain_image_map first entry should be ANTsImage\")\n\n tx = domain_image_map[1]\n image = reg.apply_transforms(dimg, image, transform_list=tx)\n if overlay is not None:\n overlay = reg.apply_transforms(\n dimg, overlay, transform_list=tx, interpolator=\"linear\"\n )\n\n image2 = reg.apply_transforms(dimg, image2, transform_list=tx)\n if overlay2 is not None:\n overlay2 = reg.apply_transforms(\n dimg, overlay2, transform_list=tx, interpolator=\"linear\"\n )\n\n ## single-channel images ##\n if image.components == 1:\n\n # potentially crop image\n if crop:\n plotmask = image.get_mask(cleanup=0)\n if plotmask.max() == 0:\n plotmask += 1\n image = image.crop_image(plotmask)\n if overlay is not None:\n overlay = overlay.crop_image(plotmask)\n\n if crop2:\n plotmask2 = image2.get_mask(cleanup=0)\n if plotmask2.max() == 0:\n plotmask2 += 1\n image2 = image2.crop_image(plotmask2)\n if overlay2 is not None:\n overlay2 = overlay2.crop_image(plotmask2)\n\n # potentially find dynamic range\n if scale == True:\n vmin, vmax = image.quantile((0.05, 0.95))\n elif isinstance(scale, (list, tuple)):\n if len(scale) != 2:\n raise ValueError(\n \"scale argument must be boolean or list/tuple with two values\"\n )\n vmin, vmax = image.quantile(scale)\n else:\n vmin = None\n vmax = None\n\n if scale2 == True:\n vmin2, vmax2 = image2.quantile((0.05, 0.95))\n elif isinstance(scale2, (list, tuple)):\n if len(scale2) != 2:\n raise ValueError(\n \"scale2 argument must be boolean or list/tuple with two values\"\n )\n vmin2, vmax2 = image2.quantile(scale2)\n else:\n vmin2 = None\n vmax2 = None\n\n if not flat:\n nrow = 2\n ncol = 4\n else:\n if not transpose:\n nrow = 2\n ncol = 3\n else:\n nrow = 3\n ncol = 2\n\n fig = plt.figure(\n figsize=((ncol + 1) * 2.5 * figsize, (nrow + 1) * 2.5 * figsize)\n )\n if title is not None:\n basey = 0.88 if not flat else 0.66\n basex = 0.5\n fig.suptitle(\n title, fontsize=titlefontsize, color=textfontcolor, x=basex + title_dx, y=basey + title_dy\n )\n\n gs = gridspec.GridSpec(\n nrow,\n ncol,\n wspace=0.0,\n hspace=0.0,\n top=1.0 - 0.5 / (nrow + 1),\n bottom=0.5 / (nrow + 1),\n left=0.5 / (ncol + 1),\n right=1 - 0.5 / (ncol + 1),\n )\n\n # pad image to have isotropic array dimensions\n image = image.numpy()\n if overlay is not None:\n overlay = overlay.numpy()\n if overlay.dtype not in [\"uint8\", \"uint32\"]:\n overlay[np.abs(overlay) == 0] = np.nan\n\n image2 = image2.numpy()\n if overlay2 is not None:\n overlay2 = overlay2.numpy()\n if overlay2.dtype not in [\"uint8\", \"uint32\"]:\n overlay2[np.abs(overlay2) == 0] = np.nan\n\n ####################\n ####################\n yz_slice = reorient_slice(image[xyz[0], :, :], 0)\n ax = plt.subplot(gs[0, 0])\n ax.imshow(yz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n yz_overlay = reorient_slice(overlay[xyz[0], :, :], 0)\n ax.imshow(yz_overlay, alpha=overlay_alpha, cmap=overlay_cmap)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice.shape[0] - xyz[1], yz_slice.shape[0] - xyz[1]],\n [xyz_pad, yz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice.shape[1] - xyz_pad],\n [yz_slice.shape[1] - xyz[2], yz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n #######\n yz_slice2 = reorient_slice(image2[xyz[0], :, :], 0)\n if not flat:\n ax = plt.subplot(gs[0, 1])\n else:\n if not transpose:\n ax = plt.subplot(gs[1, 0])\n else:\n ax = plt.subplot(gs[0, 1])\n ax.imshow(yz_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)\n if overlay2 is not None:\n yz_overlay2 = reorient_slice(overlay2[xyz[0], :, :], 0)\n ax.imshow(yz_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [yz_slice2.shape[0] - xyz[1], yz_slice2.shape[0] - xyz[1]],\n [xyz_pad, yz_slice2.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, yz_slice2.shape[1] - xyz_pad],\n [yz_slice2.shape[1] - xyz[2], yz_slice2.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n ####################\n ####################\n\n xz_slice = reorient_slice(image[:, xyz[1], :], 1)\n if not flat:\n ax = plt.subplot(gs[0, 2])\n else:\n if not transpose:\n ax = plt.subplot(gs[0, 1])\n else:\n ax = plt.subplot(gs[1, 0])\n ax.imshow(xz_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xz_overlay = reorient_slice(overlay[:, xyz[1], :], 1)\n ax.imshow(xz_overlay, alpha=overlay_alpha, cmap=overlay_cmap)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice.shape[0] - xyz[0], xz_slice.shape[0] - xyz[0]],\n [xyz_pad, xz_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice.shape[1] - xyz_pad],\n [xz_slice.shape[1] - xyz[2], xz_slice.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n #######\n xz_slice2 = reorient_slice(image2[:, xyz[1], :], 1)\n if not flat:\n ax = plt.subplot(gs[0, 3])\n else:\n ax = plt.subplot(gs[1, 1])\n ax.imshow(xz_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)\n if overlay is not None:\n xz_overlay2 = reorient_slice(overlay2[:, xyz[1], :], 1)\n ax.imshow(xz_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xz_slice2.shape[0] - xyz[0], xz_slice2.shape[0] - xyz[0]],\n [xyz_pad, xz_slice2.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xz_slice2.shape[1] - xyz_pad],\n [xz_slice2.shape[1] - xyz[2], xz_slice2.shape[1] - xyz[2]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n ####################\n ####################\n xy_slice = reorient_slice(image[:, :, xyz[2]], 2)\n if not flat:\n ax = plt.subplot(gs[1, 2])\n else:\n if not transpose:\n ax = plt.subplot(gs[0, 2])\n else:\n ax = plt.subplot(gs[2, 0])\n ax.imshow(xy_slice, cmap=cmap, vmin=vmin, vmax=vmax)\n if overlay is not None:\n xy_overlay = reorient_slice(overlay[:, :, xyz[2]], 2)\n ax.imshow(xy_overlay, alpha=overlay_alpha, cmap=overlay_cmap)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice.shape[0] - xyz[0], xy_slice.shape[0] - xyz[0]],\n [xyz_pad, xy_slice.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice.shape[1] - xyz_pad],\n [xy_slice.shape[1] - xyz[1], xy_slice.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n #######\n xy_slice2 = reorient_slice(image2[:, :, xyz[2]], 2)\n if not flat:\n ax = plt.subplot(gs[1, 3])\n else:\n if not transpose:\n ax = plt.subplot(gs[1, 2])\n else:\n ax = plt.subplot(gs[2, 1])\n ax.imshow(xy_slice2, cmap=cmap2, vmin=vmin2, vmax=vmax2)\n if overlay is not None:\n xy_overlay2 = reorient_slice(overlay2[:, :, xyz[2]], 2)\n ax.imshow(xy_overlay2, alpha=overlay_alpha2, cmap=overlay_cmap2)\n if xyz_lines:\n # add lines\n l = mlines.Line2D(\n [xy_slice2.shape[0] - xyz[0], xy_slice2.shape[0] - xyz[0]],\n [xyz_pad, xy_slice2.shape[0] - xyz_pad],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n l = mlines.Line2D(\n [xyz_pad, xy_slice2.shape[1] - xyz_pad],\n [xy_slice2.shape[1] - xyz[1], xy_slice2.shape[1] - xyz[1]],\n color=xyz_color,\n alpha=xyz_alpha,\n linewidth=xyz_linewidth,\n )\n ax.add_line(l)\n ax.axis(\"off\")\n\n ####################\n ####################\n\n if not flat:\n # empty corner\n ax = plt.subplot(gs[1, :2])\n if text is not None:\n # add text\n left, width = 0.25, 0.5\n bottom, height = 0.25, 0.5\n right = left + width\n top = bottom + height\n ax.text(\n 0.5 * (left + right) + text_dx,\n 0.5 * (bottom + top) + text_dy,\n text,\n horizontalalignment=\"center\",\n verticalalignment=\"center\",\n fontsize=textfontsize,\n color=textfontcolor,\n transform=ax.transAxes,\n )\n # ax.text(0.5, 0.5)\n img_shape = list(image.shape[:-1])\n img_shape[1] *= 2\n ax.imshow(np.zeros(img_shape), cmap=\"Greys_r\")\n ax.axis(\"off\")\n\n ## multi-channel images ##\n elif image.components > 1:\n raise ValueError(\"Multi-channel images not currently supported!\")\n\n if filename is not None:\n plt.savefig(filename, dpi=dpi, transparent=transparent)\n plt.close(fig)\n else:\n plt.show()\n\n # turn warnings back to default\n warnings.simplefilter(\"default\")",
"def plot_image(\n light_profile,\n grid,\n mask=None,\n extract_array_from_mask=False,\n zoom_around_mask=False,\n positions=None,\n as_subplot=False,\n units=\"arcsec\",\n kpc_per_arcsec=None,\n figsize=(7, 7),\n aspect=\"square\",\n cmap=\"jet\",\n norm=\"linear\",\n norm_min=None,\n norm_max=None,\n linthresh=0.05,\n linscale=0.01,\n cb_ticksize=10,\n cb_fraction=0.047,\n cb_pad=0.01,\n cb_tick_values=None,\n cb_tick_labels=None,\n title=\"Image\",\n titlesize=16,\n xlabelsize=16,\n ylabelsize=16,\n xyticksize=16,\n mask_pointsize=10,\n position_pointsize=10.0,\n grid_pointsize=1,\n output_path=None,\n output_format=\"show\",\n output_filename=\"image\",\n):\n image = light_profile.profile_image_from_grid(\n grid=grid, bypass_decorator=False\n )\n\n array_plotters.plot_array(\n array=image,\n mask=mask,\n extract_array_from_mask=extract_array_from_mask,\n zoom_around_mask=zoom_around_mask,\n positions=positions,\n as_subplot=as_subplot,\n units=units,\n kpc_per_arcsec=kpc_per_arcsec,\n figsize=figsize,\n aspect=aspect,\n cmap=cmap,\n norm=norm,\n norm_min=norm_min,\n norm_max=norm_max,\n linthresh=linthresh,\n linscale=linscale,\n cb_ticksize=cb_ticksize,\n cb_fraction=cb_fraction,\n cb_pad=cb_pad,\n cb_tick_values=cb_tick_values,\n cb_tick_labels=cb_tick_labels,\n title=title,\n titlesize=titlesize,\n xlabelsize=xlabelsize,\n ylabelsize=ylabelsize,\n xyticksize=xyticksize,\n mask_pointsize=mask_pointsize,\n position_pointsize=position_pointsize,\n grid_pointsize=grid_pointsize,\n output_path=output_path,\n output_format=output_format,\n output_filename=output_filename,\n )",
"def display_coronal_with_overlay(temporal_slice, coronal_slice, images, masks, label, window_min, window_max):\n img = images[temporal_slice][:,coronal_slice,:]\n msk = masks[temporal_slice][:,coronal_slice,:]==label\n\n overlay_img = overlay_binary_segmentation_contours(img, msk, window_min, window_max) \n # Flip the image so that corresponds to correct radiological view.\n plt.imshow(np.flipud(sitk.GetArrayFromImage(overlay_img)))\n plt.axis('off')\n plt.show()",
"def plot_prediction_overlay(tile: np.ndarray, prediction: np.ndarray):\n plt.figure()\n plt.imshow(tile)\n plt.show()",
"def plot_mask(self):\n\n fig = plt.figure()\n ax = fig.add_subplot(111)\n im = ax.imshow(self.mask)",
"def _plot_dipole_mri_orthoview(\n dipole,\n trans,\n subject,\n subjects_dir=None,\n coord_frame=\"head\",\n idx=\"gof\",\n show_all=True,\n ax=None,\n block=False,\n show=True,\n color=None,\n highlight_color=\"r\",\n title=None,\n width=None,\n):\n import matplotlib.pyplot as plt\n from mpl_toolkits.mplot3d import Axes3D\n\n _import_nibabel(\"plotting MRI slices\")\n\n _check_option(\"coord_frame\", coord_frame, [\"head\", \"mri\"])\n\n if idx == \"gof\":\n idx = np.argmax(dipole.gof)\n elif idx == \"amplitude\":\n idx = np.argmax(np.abs(dipole.amplitude))\n else:\n idx = _ensure_int(idx, \"idx\", 'an int or one of [\"gof\", \"amplitude\"]')\n\n vox, ori, pos, data = _get_dipole_loc(\n dipole, trans, subject, subjects_dir, coord_frame\n )\n\n dims = len(data) # Symmetric size assumed.\n dd = dims // 2\n if ax is None:\n fig, ax = plt.subplots(1, subplot_kw=dict(projection=\"3d\"))\n else:\n _validate_type(ax, Axes3D, \"ax\", \"Axes3D\", extra='when mode is \"orthoview\"')\n fig = ax.get_figure()\n\n gridx, gridy = np.meshgrid(\n np.linspace(-dd, dd, dims), np.linspace(-dd, dd, dims), indexing=\"ij\"\n )\n params = {\n \"ax\": ax,\n \"data\": data,\n \"idx\": idx,\n \"dipole\": dipole,\n \"vox\": vox,\n \"gridx\": gridx,\n \"gridy\": gridy,\n \"ori\": ori,\n \"coord_frame\": coord_frame,\n \"show_all\": show_all,\n \"pos\": pos,\n \"color\": color,\n \"highlight_color\": highlight_color,\n \"title\": title,\n \"width\": width,\n }\n _plot_dipole(**params)\n ax.view_init(elev=30, azim=-140)\n\n callback_func = partial(_dipole_changed, params=params)\n fig.canvas.mpl_connect(\"scroll_event\", callback_func)\n fig.canvas.mpl_connect(\"key_press_event\", callback_func)\n\n plt_show(show, block=block)\n return fig",
"def plot_img_and_mask(img, mask):\n classes = mask.shape[2] if len(mask.shape) > 2 else 1\n fig, ax = plt.subplots(1, classes + 1)\n ax[0].set_title('Input image')\n ax[0].imshow(img)\n if classes > 1:\n for i in range(classes):\n ax[i+1].set_title(f'Output mask (class {i+1})')\n ax[i+1].imshow(mask[:, :, i])\n else:\n ax[1].set_title(f'Output mask')\n ax[1].imshow(mask)\n plt.xticks([]), plt.yticks([])\n plt.show()",
"def plot_mask(mask,mask_width,mask_height):\n plt.matshow(flatten_mask(mask).reshape(mask_height,mask_width))\n plt.show()",
"def plot_original_3d(self, path=\"images\"):\n raise NotImplementedError(\"nyi\")",
"def lutshow(img,lut):\n f,ax = plt.subplots(1,3,dpi=150)\n imshow(img,ax[0])\n ax[1].plot(lut)\n ax[1].plot(np.arange(0,256),'--')\n ax[1].set_aspect('equal', 'box')\n ax[1].tick_params(left=False,bottom=False,labelleft=False,labelbottom=False)\n imshow(lut[img],ax[2])\n return f",
"def plot_central_planes(image): \n n_x, n_y, n_z = image.shape\n fig, axs = plt.subplots(1,3, figsize = (15, 10))\n axs[0].imshow(image[n_x//2, :, :], cmap = 'gray'), axs[0].set_title('X central plane')\n axs[1].imshow(image[:, n_y//2, :], cmap = 'gray'), axs[1].set_title('Y central plane')\n axs[2].imshow(image[:, :, n_z//2], cmap = 'gray'), axs[2].set_title('Z central plane')\n plt.show()",
"def display_images_predictions3(image_array, pred_array1, pred_array2, num_images=4, image_list=False, random_images=False, overlay = True):\n ts = image_array\n pred1 = pred_array1\n pred2 = pred_array2\n samples, x, y, z = ts.shape\n print (\"samples, max, min \", samples, pred1.max(), pred1.min())\n pred1r = np.round(pred1)\n pred2r = np.round(pred2)\n\n display_list = []\n if image_list == False:\n if random_images == True:\n display_list = random.sample(range(0, samples), num_images)\n else :\n display_list = [i for i in range (num_images)]\n else:\n display_list = image_list\n\n for i in display_list:\n f, axs = plt.subplots(1,3,figsize=(15,15))\n plt.subplot(131),plt.imshow(ts[i].reshape(x, y))\n plt.title('Image '+str(i)), plt.xticks([]), plt.yticks([])\n if overlay == True:\n plt.subplot(132),plt.imshow(ts[i].reshape(x, y)), plt.imshow(pred1r[i].reshape(x, y), 'binary', interpolation='none', alpha=0.3)\n else : \n plt.subplot(132),plt.imshow(pred1r[i].reshape(x, y))\n plt.title('Pred 1'), plt.xticks([]), plt.yticks([])\n if overlay == True:\n plt.subplot(133),plt.imshow(ts[i].reshape(x, y)), plt.imshow(pred2r[i].reshape(x, y), 'binary', interpolation='none', alpha=0.3)\n else : \n plt.subplot(133),plt.imshow(pred2r[i].reshape(x, y))\n plt.title('Pred 2'), plt.xticks([]), plt.yticks([])\n plt.show()",
"def ShowSpots(image,spot_mask):\n fig, axes = plt.subplots(nrows = 1, ncols = 2, figsize = (20,10))\n axes[0].imshow(image, cmap = 'gray')\n axes[1].imshow(image, cmap = 'gray')\n axes[1].imshow(np.ma.array(spot_mask, mask = spot_mask==0), \n cmap = 'flag', alpha = 0.5)\n axes[0].title.set_text('original image')\n axes[1].title.set_text('overlay spots')\n plt.tight_layout()\n plt.show()\n return",
"def plot_from_tc_mapping():\n from target_calib import CameraConfiguration\n c = CameraConfiguration(\"1.1.0\")\n m = c.GetMapping()\n camera = CameraImage.from_tc_mapping(m)\n image = np.zeros(m.GetNPixels())\n image[::2] = 1\n camera.image = image\n plt.show()",
"def get_img_view(world, x, y, z, th, res=1, hfov_d=360, v_max=np.pi / 2, v_min=-np.pi / 12,\r\n wrap=False, blur=False, blur_kernel_size=3):\r\n\r\n X, Y, Z = world['X'], world['Y'], world['Z']\r\n dpi = 100\r\n hfov_deg = hfov_d\r\n hfov = np.deg2rad(hfov_deg)\r\n h_min = -hfov / 2\r\n h_max = hfov / 2\r\n\r\n vfov = v_max - v_min\r\n vfov_deg = np.rad2deg(vfov)\r\n\r\n resolution = res\r\n sky_colour = 'white'\r\n ground_colour = (0.1, 0.1, 0.1, 1)\r\n grass_colour = 'gray'\r\n grass_cmap = LinearSegmentedColormap.from_list('mycmap', [(0, (0, 0, 0, 1)), (1, grass_colour)])\r\n\r\n c = np.ones(Z.shape[0]) * 0.5\r\n\r\n image_ratio = vfov / hfov\r\n h_pixels = hfov_deg / resolution\r\n v_pixels = h_pixels * image_ratio\r\n\r\n im_width = h_pixels / dpi\r\n im_height = v_pixels / dpi\r\n\r\n fig = Figure(frameon=False, figsize=(im_width, im_height))\r\n ax = fig.add_axes([0., 0., 1., 1.])\r\n ax.set_xlim(h_min, h_max)\r\n ax.set_ylim(v_min, v_max)\r\n ax.get_xaxis().set_visible(False)\r\n ax.get_yaxis().set_visible(False)\r\n ax.spines['top'].set_visible(False)\r\n ax.spines['bottom'].set_visible(False)\r\n ax.spines['right'].set_visible(False)\r\n ax.spines['left'].set_visible(False)\r\n ax.set_facecolor(sky_colour)\r\n\r\n canvas = FigureCanvasAgg(fig)\r\n ground_verts = [[(h_min, v_min), (h_max, v_min), (h_max, 0), (h_min, 0)]]\r\n\r\n g = PolyCollection(ground_verts, facecolor=ground_colour, edgecolor='none')\r\n ax.add_collection(g)\r\n\r\n TH, PHI, R = cart2sph(X - x, Y - y, np.abs(Z) - z)\r\n TH_rel = pi2pi(TH - th)\r\n\r\n # fix the grass\r\n ind = (np.max(TH_rel, axis=1) - np.min(TH_rel, axis=1)) > np.pi\r\n TH_ext = np.vstack((TH_rel, np.mod(TH_rel[ind, :] - 2 * np.pi, -2 * np.pi)))\r\n n_blades = np.sum(ind)\r\n padded_ind = np.lib.pad(ind, (0, n_blades), 'constant')\r\n TH_ext[padded_ind, :] = np.mod(TH_rel[ind, :] + 2 * np.pi, 2 * np.pi)\r\n\r\n PHI_ext = np.vstack((PHI, PHI[ind, :]))\r\n R_ext = np.vstack((R, R[ind, :]))\r\n\r\n grass_verts = np.dstack((TH_ext, PHI_ext))\r\n p = PolyCollection(grass_verts, array=c, cmap=grass_cmap, edgecolors='none')\r\n ax.add_collection(p)\r\n\r\n buf = io.BytesIO()\r\n fig.savefig(buf, format='png', pad_inches=0, dpi=dpi)\r\n buf.seek(0)\r\n im = Image.open(buf)\r\n im_array = np.asarray(im)[:, :, 0:3]\r\n\r\n # grey scale and blurred image\r\n img_cv = cv2.cvtColor(np.asarray(im_array), cv2.COLOR_RGB2BGR)\r\n img_cv = cv2.cvtColor(img_cv, cv2.COLOR_BGR2GRAY)\r\n if wrap:\r\n img_cv = img_wrapper(img_cv, 1)\r\n if blur:\r\n img_cv = cv2.blur(img_cv, (blur_kernel_size, blur_kernel_size))\r\n return img_cv",
"def plot_slice(image: sitk.Image):\n img_arr = sitk.GetArrayFromImage(image)\n plt.figure()\n plt.imshow(img_arr[80, :, :], cmap='gray')\n plt.colorbar()\n plt.show()",
"def plotGlobe3D():",
"def show_anatomical_slices(img_data, title):\n\n axial_slice = img_data[:, :, int(img_data.shape[2] / 2)]\n coronal_slice = img_data[:, int(img_data.shape[1] / 2), :]\n sagittal_slice = img_data[int(img_data.shape[0] / 2), :, :]\n\n fig = plt.figure(constrained_layout=False)\n gs = fig.add_gridspec(nrows=3, ncols=2, wspace=0.01, hspace=0.01)\n ax1 = fig.add_subplot(gs[:-1, :])\n ax1.imshow(axial_slice.T, cmap=\"gray\", origin=\"lower\")\n ax1.axis('off')\n ax2 = fig.add_subplot(gs[2, 0])\n ax2.imshow(coronal_slice.T, cmap=\"gray\", origin=\"lower\")\n ax2.axis('off')\n ax3 = fig.add_subplot(gs[2, 1])\n ax3.imshow(sagittal_slice.T, cmap=\"gray\", origin=\"lower\")\n ax3.axis('off')\n\n plt.suptitle(title)\n plt.show()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create and save an ANTsPy plot for every image matching a given regular expression in a directory, optionally recursively. This is a good function for quick visualize exploration of all of images in a directory
|
def plot_directory(
directory,
recursive=False,
regex="*",
save_prefix="",
save_suffix="",
axis=None,
**kwargs
):
def has_acceptable_suffix(fname):
suffixes = {".nii.gz"}
return sum([fname.endswith(sx) for sx in suffixes]) > 0
if directory.startswith("~"):
directory = os.path.expanduser(directory)
if not os.path.isdir(directory):
raise ValueError("directory %s does not exist!" % directory)
for root, dirnames, fnames in os.walk(directory):
for fname in fnames:
if fnmatch.fnmatch(fname, regex) and has_acceptable_suffix(fname):
load_fname = os.path.join(root, fname)
fname = fname.replace(".".join(fname.split(".")[1:]), "png")
fname = fname.replace(".png", "%s.png" % save_suffix)
fname = "%s%s" % (save_prefix, fname)
save_fname = os.path.join(root, fname)
img = iio2.image_read(load_fname)
if axis is None:
axis_range = [i for i in range(img.dimension)]
else:
axis_range = axis if isinstance(axis, (list, tuple)) else [axis]
if img.dimension > 2:
for axis_idx in axis_range:
filename = save_fname.replace(".png", "_axis%i.png" % axis_idx)
ncol = int(math.sqrt(img.shape[axis_idx]))
plot(
img,
axis=axis_idx,
nslices=img.shape[axis_idx],
ncol=ncol,
filename=filename,
**kwargs
)
else:
filename = save_fname
plot(img, filename=filename, **kwargs)
|
[
"def save_analyzed_image(self, filename, **kwargs):\n self.plot_analyzed_image(show=False)\n\n plt.savefig(filename, **kwargs)",
"def main():\n\n\n args = sys.argv\n if '-h' in args:\n print main.__doc__\n sys.exit()\n dataframe = extractor.command_line_dataframe([['f', False, 'rmag_anisotropy.txt'], ['fb', False, 'magic_measurements.txt'], ['fsa', False, 'er_samples.txt'], ['fa', False, None], ['fsum', False, None], ['fmt', False, 'svg'], ['ds', False, 'mbsf'], ['d', False, '-1 -1'], ['sav', False, False], ['WD', False, '.' ]])\n #args = sys.argv\n checked_args = extractor.extract_and_check_args(args, dataframe)\n ani_file, meas_file, samp_file, age_file, sum_file, fmt, depth_scale, depth, save_quietly, dir_path = extractor.get_vars(['f', 'fb', 'fsa', 'fa', 'fsum', 'fmt', 'ds', 'd', 'sav', 'WD'], checked_args)\n\n # format min/max depth\n try:\n dmin, dmax = depth.split()\n dmin, dmax = float(dmin), float(dmax)\n except:\n print 'you must provide depth in this format: -d dmin dmax'\n print 'could not parse \"{}\", defaulting to plotting all depths'.format('-d ' + str(depth))\n dmin, dmax = -1, -1\n\n if depth_scale:\n if age_file:\n depth_scale = 'age'\n elif 'mbsf' in depth_scale:\n depth_scale = 'sample_core_depth'\n elif 'mcd' in depth_scale:\n depth_scale = 'sample_composite_depth'\n else:\n print 'Warning: Unrecognized option \"{}\" provided for depth scale.\\nOptions for depth scale are mbsf -- meters below sea floor -- or mcd -- meters composite depth.\\nAlternatively, if you provide an age file the depth scale will be automatically set to plot by age instead.\\nUsing default \"mbsf\"'.format(depth_scale)\n depth_scale = 'sample_composite_depth'\n \n fig, figname = ipmag.aniso_depthplot(ani_file, meas_file, samp_file, age_file, sum_file, fmt, dmin, dmax, depth_scale, dir_path)\n if save_quietly:\n if dir_path == '.':\n dir_path = os.getcwd()\n print 'Saved file: {} in folder: {}'.format(figname, dir_path)\n return False\n \n app = wx.App(redirect=False)\n if not fig:\n pw.simple_warning('No plot was able to be created with the data you provided.\\nMake sure you have given all the required information and try again')\n return False\n\n dpi = fig.get_dpi()\n pixel_width = dpi * fig.get_figwidth()\n pixel_height = dpi * fig.get_figheight()\n figname = os.path.join(dir_path, figname)\n plot_frame = pmag_menu_dialogs.PlotFrame((int(pixel_width), int(pixel_height + 50)), fig, figname)\n\n app.MainLoop()",
"def data_annotation(image_path):\r\n #This for loop iterates over all images in the given data path and plots the individual images.\r\n #The coordinates of the landmarks are saved into a text file after clicking.\r\n for i in os.listdir(image_path):\r\n #Only continue with the the jpg files in the directory\r\n if i.endswith(\".jpg\"):\r\n #print the file name\r\n print(i)\r\n #Read the images individually\r\n im = cv2.imread(image_path + i + '.jpg')\r\n #Create a text file named per image\r\n if os.path.isdir('all_landmarks_together') == False:\r\n os.makedirs('all_landmarks_together')\r\n file = open('all_landmarks_together/landmarks_%s.txt' %(i),'w')\r\n\r\n #plot the image\r\n ax = plt.gca()\r\n fig = plt.gcf()\r\n implot = ax.imshow(cv2.cvtColor(im, cv2.COLOR_BGR2RGB))\r\n\r\n #print the coordinates after clicking and save these coordinates in a txt file\r\n def onclick(event):\r\n if event.xdata != None and event.ydata != None:\r\n print(event.xdata, event.ydata)\r\n file.write(str(event.xdata))\r\n file.write('\\t')\r\n file.write(str(event.ydata))\r\n file.write('\\n')\r\n\r\n #call the function\r\n cid = implot.figure.canvas.mpl_connect('button_press_event', onclick)\r\n # plt.plot(event.xdata,event.ydata,'ro',markersize=3)\r\n #show the image\r\n plt.show()\r\n #clos the file\r\n file.close()",
"def visualize_directory(self, directory, save_path=None):\n image_list = list_images_from_dir(directory)\n self.visualize_list(image_list, save_path)",
"def display_samples(folders):\n if not do_plotting:\n return\n for folder in folders:\n print(folder)\n image_files = os.listdir(folder)\n image = random.choice(image_files)\n image_file = os.path.join(folder, image)\n i = Image(filename=image_file)\n display(i)",
"def test_heatmap_plot_creates_files_in_folder(self):\n # Get dataframe from test data\n df_path = io.StringIO(good_df_matrix)\n df = pandas.read_csv(df_path,index_col=0)\n # Initialize heatmap\n heatmap = plot_heatmap.Heatmap(df)\n # Plot heatmap into temp folder path\n heatmap.plotInFolder(self._temp_dir)\n # Get plots extensions regex\n regex = '.*\\.(png|svg)$'\n # Get list of files from regex\n files_in_dir = os.listdir(self._temp_dir)\n plot_files = [x for x in files_in_dir if re.match(regex,x)]\n # Check that there is at least one plot\n if len(plot_files)<1:\n error_msg = \"The plot function should create at least one plot file in the destination folder.\"\n self.fail(error_msg)",
"def visualize_folder(self, CT_path):\n\n (self.comp_img, self.radio_img, self.filename) = self.__read_img_from_folder(\n self.comp_path, self.radio_path)\n CT_img = self.__read_CT_from_folder(CT_path, len(self.comp_img))\n for i in range(len(CT_img)):\n self.visual_results.append(self.__visualize_img(\n self.comp_img[i], self.radio_img[i], CT_img[i], self.filename[i], self.radio_path))",
"def test_grid_mapping(sample_subdirectory, regex_images):\n \n\n # Segment images in the subdirectory\n df_img,_,_ =segment_images(sample_subdirectory, regex_images)\n\n # Get stack\n stack = all_stack(df_img)\n col_peaks,row_peaks,mapping_Hz, mapping_km = get_grid_mappings(stack)\n \n fig,axes = plt.subplots(ncols=2)\n ax = axes.ravel()\n \n # Plot stack\n fig.suptitle(sample_subdirectory)\n ax[0].imshow(stack,'gray')\n h,w = stack.shape\n grid = np.ones((h,w),np.uint8)\n col_peaks2 = np.asarray(list(mapping_Hz.values()))\n \n for i in col_peaks2:\n cv2.line(grid , (i, 0), (i,h), 0, 5, 1)\n for i in row_peaks:\n cv2.line(grid , (0, i), (w,i), 0, 5, 1)\n ax[1].imshow(grid, 'gray')",
"def process_plot_mri_images(paths, params):\n\n\t# create full path of HDF5 file\n\thdf5_file = os.path.join(paths['hdf5_folder'], params['hdf5_file'])\n\n\t# read datasets from HDF5 file\n\tD = get_datasets_from_group(group_name = params['group_original_mri'], hdf5_file = hdf5_file)\n\n\t# read data from each dataset and plot mri data\n\tfor d in D:\n\n\t\t# read data from group\t\n\t\tdata = read_dataset_from_group(group_name = params['group_original_mri'], dataset = d, hdf5_file = hdf5_file)\n\n\t\t# image plot folder\n\t\timage_plot_folder = os.path.join(paths['plot_folder'], params['group_original_mri'], d.split()[-1], d)\n\n\t\t# create folder to store image to\n\t\tcreate_directory(image_plot_folder)\n\n\t\t# a single image for each image in dimensions[0]\n\t\tfor i in range(data.shape[0]):\n\n\t\t\t# create figure and axes\n\t\t\tfig, ax = plt.subplots(1, 1, figsize = (10,10))\n\t\t\t\n\t\t\t# plot mri image\n\t\t\tax.imshow(data[i], cmap = 'gray')\n\n\t\t\t# crop white space\n\t\t\tplt.gca().set_axis_off()\n\t\t\tplt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)\n\t\t\tplt.margins(0,0)\n\t\t\tplt.gca().xaxis.set_major_locator(plt.NullLocator())\n\t\t\tplt.gca().yaxis.set_major_locator(plt.NullLocator())\n\t\t\t\n\t\t\t# save the figure\n\t\t\tfig.savefig(os.path.join(image_plot_folder, f'{i}.png'), dpi = 300)\n\t\t\t\n\t\t\t# close the plot environment\n\t\t\tplt.close()",
"def plot_patterned_input_i_syn_summary(rec_filename_array, svg_title=None):\n if svg_title is not None:\n remember_font_size = mpl.rcParams['font.size']\n mpl.rcParams['font.size'] = 20\n i_syn_dict = {'i_AMPA': {}, 'i_NMDA': {}, 'i_GABA': {}, 'ratio': {}}\n for condition in ['modinh0', 'modinh1', 'modinh2']:\n for rec_filename in rec_filename_array[condition]:\n rec_t, i_syn_mean_dict, i_syn_mean_low_pass_dict = process_i_syn_rec(rec_filename)\n for syn_type in i_syn_mean_low_pass_dict:\n i_syn_dict[syn_type][condition] = i_syn_mean_low_pass_dict[syn_type]\n colors = ['k', 'y', 'orange']\n for group in ['i_AMPA', 'i_NMDA', 'i_GABA']:\n fig, axes = plt.subplots(1)\n for i, (condition, title) in enumerate(zip(['modinh0', 'modinh2', 'modinh1'], ['Control',\n 'Reduced inhibition - In field', 'Reduced inhibition - Out of field'])):\n axes.plot(rec_t, i_syn_dict[group][condition], c=colors[i], label=title, linewidth=1)\n clean_axes(axes)\n axes.set_xlabel('Time (s)')\n axes.set_ylabel('Current (nA)')\n axes.set_xlim(0., 7500.)\n axes.set_xticks([0., 1500., 3000., 4500., 6000., 7500.])\n axes.set_xticklabels([0, 1.5, 3, 4.5, 6, 7.5])\n axes.tick_params(direction='out')\n axes.set_title(group, fontsize=mpl.rcParams['font.size'])\n # plt.legend(loc='best', frameon=False, framealpha=0.5)\n if group == 'i_GABA':\n axes.set_ylim(0., .7)\n else:\n axes.set_ylim(-.7, 0.)\n if svg_title is not None:\n fig.set_size_inches(4.403, 3.631)\n fig.savefig(data_dir+svg_title+' - '+group+'.svg', format='svg', transparent=True)\n plt.show()\n plt.close()\n for condition in ['modinh0', 'modinh1', 'modinh2']:\n i_syn_dict['ratio'][condition] = np.divide(np.abs(np.add(i_syn_dict['i_AMPA'][condition],\n i_syn_dict['i_NMDA'][condition])),\n i_syn_dict['i_GABA'][condition])\n fig, axes = plt.subplots(1)\n for i, (condition, title) in enumerate(zip(['modinh0', 'modinh2', 'modinh1'], ['Control',\n 'Reduced inhibition - In field', 'Reduced inhibition - Out of field'])):\n axes.plot(rec_t, i_syn_dict['ratio'][condition], c=colors[i], label=title, linewidth=2)\n clean_axes(axes)\n axes.set_xlabel('Time (s)')\n axes.set_ylabel('E:I ratio')\n axes.set_ylim(1., 2.8)\n axes.set_xlim(0., 7500.)\n axes.set_xticks([0., 1500., 3000., 4500., 6000., 7500.])\n axes.set_xticklabels([0, 1.5, 3, 4.5, 6, 7.5])\n axes.tick_params(direction='out')\n # plt.legend(loc='best', frameon=False, framealpha=0.5)\n if svg_title is not None:\n fig.set_size_inches(4.403, 3.631)\n fig.savefig(data_dir+svg_title+' - E_I ratio.svg', format='svg', transparent=True)\n plt.show()\n plt.close()\n if svg_title is not None:\n mpl.rcParams['font.size'] = remember_font_size\n for group in i_syn_dict:\n get_i_syn_mean_values(i_syn_dict[group], group)",
"def plot_energy_evolution_multi():\n dir = '../data/pic_info/'\n if not os.path.isdir('../img/'):\n os.makedirs('../img/')\n odir = '../img/ene_evolution/'\n if not os.path.isdir(odir):\n os.makedirs(odir)\n fnames = list_pic_info_dir(dir)\n for fname in fnames:\n if 'guide' in fname:\n rname = fname.replace(\".json\", \".eps\")\n oname = rname.replace(\"pic_info\", \"enes\")\n oname = odir + oname\n fname = dir + fname\n pic_info = read_data_from_json(fname)\n plot_energy_evolution(pic_info)\n plt.savefig(oname)\n plt.close()",
"def plot_images(num_rows, num_cols, num_axes, matrix, labels, num_to_class):\n\n fig = plt.figure(figsize=(20, 10))\n\n for i in range(num_axes):\n ax = fig.add_subplot(num_rows, num_cols, i + 1, xticks=[], yticks=[])\n ax.imshow(matrix[i], interpolation='nearest')\n\n # Get index of item with value == 1. The result is an array of arrays.\n idx = np.where(labels[i] == 1)[0][0]\n\n breed = num_to_class[idx]\n breed = breed.replace('_', ' ').title()\n\n ax.text(0, -5, breed, fontsize=14)\n\n return fig",
"def generatePlots(self, filename, dirname):\n print(' '.join([\"Analyzing \", filename]))\n\n processes = []\n for plotTitle, plotFields in self.plots.items():\n plotFileName = ''.join([filename, '_', plotTitle, \".png\"])\n output = ''.join([\"--output=\", dirname, \"/\", plotFileName])\n cmd = ' '.join([\"python2\", self.mavgraph, self.mavgraphOptions,\n self.legendCmd, output, plotFields, filename])\n processes.append(subprocess.Popen(cmd, shell=True))\n\n # wait for the mavgraph processes to finish before continuing\n for p in processes:\n p.wait()",
"def search_png(path):\n result = []\n for root, folders, files in os.walk(path):\n png_files = []\n for file in files:\n if file.lower().endswith('.png'):\n png_files.append(file)\n if png_files:\n result.append(root)\n result.append(png_files)\n for folder in folders:\n search_png(folder)\n return result",
"def graphs(self, path_to_images):\n import matplotlib.pyplot as plt # pylint: disable=C0415\n import matplotlib.cm as mcm # pylint: disable=C0415\n df = self.to_df()\n\n def local_graph(vx, vy, ax=None, text=True, figsize=(5, 5)):\n btrys = set(df[\"_btry\"])\n ymin = df[vy].min()\n ymax = df[vy].max()\n decy = (ymax - ymin) / 50\n colors = mcm.rainbow(numpy.linspace(0, 1, len(btrys)))\n if len(btrys) == 0:\n raise ValueError(\"The benchmark is empty.\") # pragma: no cover\n if ax is None:\n _, ax = plt.subplots(1, 1, figsize=figsize) # pragma: no cover\n ax.grid(True) # pragma: no cover\n for i, btry in enumerate(sorted(btrys)):\n subset = df[df[\"_btry\"] == btry]\n if subset.shape[0] > 0:\n tx = subset[vx].mean()\n ty = subset[vy].mean()\n if not numpy.isnan(tx) and not numpy.isnan(ty):\n subset.plot(x=vx, y=vy, kind=\"scatter\",\n label=btry, ax=ax, color=colors[i])\n if text:\n ax.text(tx, ty + decy, btry, size='small',\n color=colors[i], ha='center', va='bottom')\n ax.set_xlabel(vx)\n ax.set_ylabel(vy)\n return ax\n\n res = []\n if self._xaxis is not None and self._yaxis is not None:\n for vx in self._xaxis:\n for vy in self._yaxis:\n self.fLOG(f\"Plotting {vx} x {vy}\")\n func_graph = lambda ax=None, text=True, vx=vx, vy=vy, **kwargs: \\\n local_graph(vx, vy, ax=ax, text=text, **kwargs)\n\n if path_to_images is not None:\n img = os.path.join(\n path_to_images, f\"img-{self.Name}-{vx}x{vy}.png\")\n gr = self.LocalGraph(\n func_graph, img, root=path_to_images)\n self.fLOG(f\"Saving '{img}'\")\n fig, ax = plt.subplots(1, 1, figsize=(8, 8))\n gr.plot(ax=ax, text=True)\n fig.savefig(img)\n self.fLOG(\"Done\")\n res.append(gr)\n plt.close('all')\n else:\n gr = self.LocalGraph(func_graph)\n res.append(gr)\n return res",
"def annotate_pattern(pos_dict,ex_dict,tag):\n for dufile in os.listdir(IN):\n if dufile.endswith(\"_parsed.xml\"):\n # call function to creat a POS file (into directory ./output/POS by default)\n convert(IN + dufile)\n posfile = dufile.replace(\"_parsed.xml\", \"_pos.txt\")\n # Do extraction and annotation step for each feature\n results = get_annotation(posfile, pos_dict, ex_dict, tag)\n print(\"results for file \" +dufile + \" :\" + str(results))\n\n # Annotate results to du_file\n annotate_du_file(dufile,results)",
"def create_ana_images(self):\n log.debug(\"start\")\n os.chdir(self._p_analysis_tmp)\n exif_attributes=self._exif_attributes\n exif_attributes=\" \".join([\"-\"+a for a in exif_attributes])\n\n # quiet option suppreses regular output\n cmd_exif=ImageAnalyzer.CMD_EXIFTOOL_JSON.replace(\"_EXIF_\",self._exiftool)\n cmd_exif=cmd_exif.replace(\"ATT\",exif_attributes)\n\n cmd_out = None\n runner = Runner()\n ret_code=runner.run_cmd(cmd_exif)\n if ret_code == 0:\n cmd_out=runner.get_output()\n files_metadata={}\n\n try:\n files_metadata=json.loads(cmd_out)\n except JSONDecodeError as e:\n err_details={\"msg\":e.msg,\"col\":str(e.colno),\"line\":str(e.lineno)}\n log.error(\"JSON Decode Error: %(msg)s error occured in output at column %(col)s, line %(line)s\",err_details)\n\n for file_metadata in files_metadata:\n\n filename=Path(file_metadata[\"SourceFile\"])\n filename=filename.stem+\"_ana\"+filename.suffix\n file_metadata[\"TargetFile\"]=os.path.join(self._p_analysis,filename)\n file_metadata[\"FocusBox\"]=ImageAnalyzer.get_focus_box(file_metadata)\n file_metadata[\"Description\"]=ImageAnalyzer.create_analysis_text(file_metadata)\n # convert to a os magick command\n draw_config=self._magick_box_config.copy()\n try:\n draw_config[\"_FILE_IN_\"]=file_metadata[\"SourceFile\"]\n draw_config[\"_FILE_OUT_\"]=file_metadata[\"TargetFile\"]\n draw_config[\"_TEXT_\"]=file_metadata[\"Description\"]\n draw_config[\"_X0_\"]=str(file_metadata[\"FocusBox\"][0][0])\n draw_config[\"_Y0_\"]=str(file_metadata[\"FocusBox\"][0][1])\n draw_config[\"_X1_\"]=str(file_metadata[\"FocusBox\"][2][0])\n draw_config[\"_Y1_\"]=str(file_metadata[\"FocusBox\"][2][1])\n except TypeError as e:\n log.error(\"not all metadata found to create focus box (%s)\",e)\n continue\n # replace template\n cmd_magick=ImageAnalyzer.CMD_MAGICK_DRAW_FOCUS_BOX\n for k,v in draw_config.items():\n cmd_magick=cmd_magick.replace(k,v)\n file_metadata[\"CmdMagick\"]=cmd_magick\n\n # writing files with focus box and meta data\n runner = Runner()\n for file_metadata in files_metadata:\n cmd=file_metadata.get(\"CmdMagick\")\n\n if not cmd:\n continue\n ret_code=runner.run_cmd(cmd)\n if ret_code == 0:\n log.info(\"Writing file %s\",file_metadata['TargetFile'])\n cmd_out=runner.get_output()\n else:\n log.error(\"Error writing file %s\",file_metadata['TargetFile'])\n\n return files_metadata",
"def plotTree(self,dim,substitution_patterns=[],line_kwargs={},marker_kwargs={}, right_to_left = False):\n\t\tright_to_left_mult = 1\n\t\tif right_to_left:\n\t\t\tright_to_left_mult = -1\n\t\tl, model_points = self.getTree(substitution_patterns)\n\t\tlast_y = 0\n\t\tfor ll in l:\n\t\t\tif ll[1][0] in self.keys() and ll[1][1] in self.keys():\n\t\t\t\tplt.plot([right_to_left_mult*ll[0][0],right_to_left_mult*ll[0][1]], [np.mean(self.getNode(ll[1][0])[dim]),np.mean(self.getNode(ll[1][1])[dim])],ll[2],**line_kwargs)\n\t\tfor ll in model_points:\n\t\t\tif ll[1][0] in self.keys():\n\t\t\t\tplt.plot(right_to_left_mult*ll[0][0], np.mean(self.getNode(ll[1][0])[dim]),ll[2],**marker_kwargs)\n\t\t\t\tx = ll[0][0]\n\t\t\t\ty = np.mean(self.getNode(ll[1][0])[dim])\n\t\t\t\tif abs(last_y-y) < 0.1:\n\t\t\t\t\ty = y + 0.1\n\t\t\t\tif 'name' in self.getNode(ll[1][0]):\n\t\t\t\t\tplt.text(x,y, self.getNode(ll[1][0])['name'],rotation=30,va='bottom',size=9)\n\t\t\t\telse:\n\t\t\t\t\tplt.text(x,y, ll[1][0],rotation=30,va='bottom',size=9)\n\t\tplt.title(dim)",
"def visualize_pattern(activations, pdf_filepath, scale='layerscale', cmap_style='viridis'):\n os.makedirs(os.path.dirname('./'+pdf_filepath), exist_ok=True)\n with PdfPages(pdf_filepath) as pdf:\n globalnorm = Visualizer._get_norm(activations.layeractivations)\n globalvmin, globalvmax = Visualizer._get_global_min_max(activations.layeractivations)\n\n for index_layer, (layeractivation, layername) in enumerate(zip(activations.layeractivations, activations.layernames)):\n layervmin, layervmax = Visualizer._get_global_min_max(layeractivation)\n layernorm = Visualizer._get_norm(layeractivation)\n\n max_cols = 4\n if (len(layeractivation.shape) == 1):\n layeractivation = layeractivation.unsqueeze(0).unsqueeze(0)\n n_channels = layeractivation.shape[0]\n n_cols = min(n_channels, max_cols)\n\n fig, axes = plt.subplots(-(-n_channels // max_cols), n_cols, sharey=True, subplot_kw={'xticks': []})\n fig.suptitle(f'Layer {index_layer}: {layername}')\n #fig.suptitle(f'Layer {index_layer+1}')\n for i, ax in enumerate(np.array(axes).reshape(-1)):\n if i < n_channels:\n if scale == 'standard':\n cmap = ax.imshow(layeractivation[i], aspect='equal', cmap=plt.get_cmap(cmap_style))\n elif scale == 'layernorm':\n cmap = ax.imshow(layeractivation[i], aspect='equal', cmap=plt.get_cmap(cmap_style), norm=layernorm)\n elif scale == 'layerscale':\n cmap = ax.imshow(layeractivation[i], aspect='equal', cmap=plt.get_cmap(cmap_style), vmin=layervmin, vmax=layervmax)\n elif scale == 'globalnorm':\n cmap = ax.imshow(layeractivation[i], aspect='equal', cmap=plt.get_cmap(cmap_style), norm=globalnorm)\n elif scale == 'globalscale':\n cmap = ax.imshow(layeractivation[i], aspect='equal', cmap=plt.get_cmap(cmap_style), vmin=globalvmin, vmax=globalvmax)\n elif scale == 'globalPositive':\n cmap = ax.imshow(layeractivation[i], aspect='equal', cmap=plt.get_cmap(cmap_style), vmin=0, vmax=globalvmax)\n else:\n raise NotImplementedError\n else:\n ax.axis('off')\n cax = fig.add_axes([0.2,0.05,0.6,0.02])\n cbar = fig.colorbar(cmap, cax=cax, orientation='horizontal')\n cbar.ax.tick_params(labelsize=10)\n\n pdf.savefig(fig, bbox_inches='tight', pad_inches=0.5)\n plt.close()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Adds cookie XSRFTOKEN to current response.
|
def add_cookie(self):
self.handler.response.set_cookie(
'XSRF-TOKEN', self.token.generate_token_string())
|
[
"async def add_csrf_token_cookie(request, response):\n token = await generate_token()\n\n # Set secure httponly csrf token\n response.cookies['t'] = token\n response.cookies['t']['httponly'] = True\n response.cookies['t']['secure'] = app.config.get('SECURE_COOKIE')\n\n # Set public csrf token for javascript\n response.cookies['csrf_token'] = token\n response.cookies['csrf_token']['secure'] = app.config.get('SECURE_COOKIE')\n\n # Secure all header response\n secure_headers.sanic(response)",
"def set_csrf_cookie(response, csrf_token):\n response.set_cookie(_CSRF_FIELD_NAME, value=csrf_token,\n #secure=True, # It would be nice to set this, but it messes up local testing. Since we only allow HTTPS connections, it's probably okay to leave this False...?\n httponly=True, path='/',\n expires=datetime.datetime.now()+datetime.timedelta(7))",
"async def csrf(request): # pylint: disable=unused-argument\n token = get_new_token()\n response = JSONResponse({\"csrftoken\": token})\n response.set_cookie(\n settings.CSRF_COOKIE_NAME,\n token,\n httponly=settings.CSRF_COOKIE_HTTPONLY,\n secure=settings.CSRF_COOKIE_SECURE,\n )\n return response",
"def get_xsrf_token():\n return bottle.request.get_cookie(\n _XSRF_TOKEN_COOKIE, secret=_get_session_cookie_secret())",
"def set_cookie(self):\n\n response = self.environment.response\n\n response.cookies[\"registered\"] = \"yes\"\n response.cookies[\"registered\"][\"expires\"] = 365 * 24 * 3600 # 1 year\n response.cookies[\"registered\"][\"path\"] = \"/\"",
"def _update_cookie(self, response):\r\n self.cookie = {\r\n 'Cookie': 'MoodleSession={};'.format(\r\n response.history[0].cookies.get('MoodleSession')\r\n )\r\n }\r\n self.session.headers.update(self.cookie)",
"def change_token_for_cookie(self, id_token):\n #try:\n expires_in = timedelta(days = 5)\n session_cookie = firebase_admin.auth.create_session_cookie(\n id_token, expires_in=expires_in)\n # maybe can change status success for something else.\n expires = datetime.now() + expires_in\n response = jsonify(status=\"success\", token=id_token)\n #uncomment secure = True and samesite=None when hosting\n response.set_cookie(\n 'session', session_cookie, expires=expires, #secure=True, samesite=None # httponly=True, secure=True\n )\n print(response)\n return response\n\n #except:\n #return abort(401, \"Failed to create a session cookie\")",
"def ensure_xsrf_token():\n xsrf_token = get_xsrf_token()\n if not xsrf_token:\n xsrf_token = misc_util.generate_random_id(16)\n bottle.response.set_cookie(\n _XSRF_TOKEN_COOKIE, xsrf_token, secret=_get_session_cookie_secret())\n return xsrf_token",
"def set_cart_cookie(cart, response):\n ten_years = timedelta(days=(365 * 10))\n response.set_signed_cookie(\n COOKIE_NAME, cart.token, max_age=int(ten_years.total_seconds()))",
"async def process_response(self, request, response):\n await super().process_response(request, response)\n if COOKIE_AUTH_KEY in request:\n if response.prepared:\n raise RuntimeError(\"Cannot save cookie into prepared response\")\n\n cookie = request[COOKIE_AUTH_KEY]\n if cookie == '':\n response.del_cookie(self.cookie_name)\n else:\n response.set_cookie(self.cookie_name, cookie)",
"def setcookie():\n jwt = create_access_token(identity=current_user.id)\n resp = make_response(f'<img src=\"http://{PUBLISHER_DOMAIN}/setcookie/{jwt}\" >', 200)\n return resp",
"def process_response(self, request, response):\n\n if not response.cookies.keys():\n return response\n\n # If setting cookie on a 301/2,\n # return 200 and replace the content with a javascript redirector\n if response.status_code != 200 and response.has_header('Location'):\n location = response.get('Location')\n response.content = REDIRECT_HTML.replace('REDIRECT_ME', location)\n response.status_code = 200\n\n pack = {}\n for key in response.cookies.keys():\n pack[key] = response.cookies[key].value\n del(response.cookies[key])\n\n pack_s = json.dumps(pack)\n encoded = base58.b58encode(pack_s)\n\n response.set_cookie('zappa', encoded)\n\n return response",
"def addACookie(self, rawCookie):\n cooked = _easyCookie(rawCookie)\n if cooked:\n self.cjar.set_cookie(cooked)",
"def process_response(self, request, response):\n\n if hasattr(request, 'delete_token') and request.delete_token:\n response.delete_cookie('access_token')\n requests.delete(build_url(request.META['HTTP_HOST'], ['auth', request.COOKIES['access_token']]))\n return response",
"def __update_session_headers(self, response=None):\n loop_count = 0\n if response is None:\n user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '+\\\n '(KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36'\n self.session.headers['User-Agent'] = user_agent\n response = self.session.get(url=self.SEARCH_UI_URL)\n else:\n copy_response = deepcopy(response)\n if 'Set-Cookie' in copy_response.headers.keys() and loop_count == 0:\n loop_count += 1\n header = copy_response.headers.pop('Set-Cookie')\n try:\n self.session.headers['Cookie'] += '; '+header\n except (KeyError):\n self.session.headers['Cookie'] = header",
"def add_cookie(self, cookie):\n self.cache[cookie.key] = cookie",
"def enable_cookie(self, cookies=None):\r\n if self.environ.get('HTTP_COOKIE'):\r\n cookies = [SimpleCookie(self.environ.get('HTTP_COOKIE'))]\r\n\r\n if cookies:\r\n for cookie in cookies:\r\n for morsel in cookie.values():\r\n morsel['path'] = '/'\r\n # TODO: fixme\r\n k, v = cookie.output().split(':')[0:2]\r\n self.headers += [(k,v)]\r\n else:\r\n cookie = SimpleCookie()\r\n cookie['JSESSIONID'] = 'dummy'\r\n cookie['JSESSIONID']['path'] = '/'\r\n k, v = cookie.output().split(':')\r\n self.headers += [(k,v)]",
"def set_xsrf_token(xsrf_token):\n global _xsrf_token\n _xsrf_token = xsrf_token",
"def add_cookies(self, cookies):\n\n self.cookies = cookies"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Verifys if request has a valid XXSRFTOKEN token. Raises HTTPForbidden else.
|
def verify(self):
token_str = self.handler.request.headers.get('X-XSRF-TOKEN')
if not token_str:
raise HTTPForbidden('no XSRF header')
try:
self.token.verify_token_string(token_str)
except xsrf.XSRFException:
raise HTTPForbidden('invalid XSRF token')
|
[
"def _csrf_token_valid(request):\r\n # TODO: rename this header to WWWHISPER_CRSFTOKEN.\r\n header_token = request.META.get('HTTP_X_CSRFTOKEN', '')\r\n cookie_token = request.COOKIES.get(settings.CSRF_COOKIE_NAME, '')\r\n if (len(header_token) != csrf.CSRF_KEY_LENGTH or\r\n not constant_time_compare(header_token, cookie_token)):\r\n return False\r\n return True",
"def check_token():\r\n\tcookie = request.cookies.get(\"token\", None);\r\n\ttoken = Utils.get_token(cookie);\r\n\tif not token:\r\n\t\treturn Utils.make_response({\r\n\t\t\t'status': 'failure',\r\n\t\t\t'reason': 'unauthorized'\r\n\t\t\t}, 403);\r\n\telse:\r\n\t\treturn Utils.make_response({\r\n\t\t\t'status': 'success'\r\n\t\t\t}, 200);",
"def _protect_xsrf_hook():\n # No need to protect API calls.\n if bottle.request.path.startswith('/api/'):\n return\n if bottle.request.method not in ('GET', 'HEAD'):\n xsrf_token = bottle.request.forms.get('xsrf_token', 'N/A')\n if xsrf_token != get_xsrf_token():\n bottle.abort(400, 'XSRF token is incorrect or not set.')",
"def should_check_csrf(request):\n return request.auth_policy_name_for_request != \"jwt\"",
"def check_token(self, req, token_id):\n self._validate_token(req, token_id)\n return utils.send_result(200, req)",
"def verify_request_token(self, token, request):\r\n log.debug('Verify request token %r', token)\r\n tok = request.request_token or self._grantgetter(token=token)\r\n if tok:\r\n request.request_token = tok\r\n return True\r\n return False",
"def check_csrf(self, tokenToCheck):\n try:\n token = self._store.session['csrf']\n if tokenToCheck != token:\n raise KeyError\n return True\n except KeyError:\n return False",
"def requires_xsrf_token(f):\n\n @functools.wraps(f)\n def wrapper(self, *args, **kwargs):\n non_xsrf_protected_verbs = ['options', 'head', 'get']\n if (self.request.method.lower() in non_xsrf_protected_verbs or\n self.has_valid_xsrf_token()):\n return f(self, *args, **kwargs)\n else:\n return self.xsrf_fail()\n\n return wrapper",
"def ensure_xsrf_token():\n xsrf_token = get_xsrf_token()\n if not xsrf_token:\n xsrf_token = misc_util.generate_random_id(16)\n bottle.response.set_cookie(\n _XSRF_TOKEN_COOKIE, xsrf_token, secret=_get_session_cookie_secret())\n return xsrf_token",
"def csrf_protect():\n if request.endpoint == 'gconnect' or request.endpoint == 'fbconnect':\n return\n\n if request.method == \"POST\":\n token = session.pop('_csrf_token', None)\n if not token or token != request.form.get('_csrf_token'):\n abort(403)",
"def request_is_valid(Klass, request):\n if request.method == 'POST':\n arr = request.POST\n elif request.method == 'GET':\n arr = request.GET\n else:\n raise AuthorizationException()\n \n if Klass._REQUEST_KEY not in arr:\n raise AuthorizationException()\n \n tok = arr[Klass._REQUEST_KEY]\n qs = Klass.objects.filter(value=tok)\n \n if not qs.exists():\n raise InvalidTokenException(tok)\n \n if qs[0].is_disabled():\n raise DisabledTokenException(qs[0])\n \n return True",
"def validate_request_token(self, client_key, token, request):\n log.debug('Validate request token %r for %r',\n token, client_key)\n tok = request.request_token or self._grantgetter(token=token)\n if tok and tok.client_key == client_key:\n request.request_token = tok\n return True\n return False",
"def check_csrf(request):\n\n # It's important to make sure the cookie isn't empty, otherwise the attacker\n # could send the attack-POST before the user hits the page for the first\n # time and gets a cookie.\n cookie_csrf_token = request.cookies.get(_CSRF_FIELD_NAME)\n request_csrf_token = request.get(_CSRF_FIELD_NAME)\n if not cookie_csrf_token or request_csrf_token != cookie_csrf_token:\n logging.error('CSRF mismatch: req csrf=>>%s<<; cookie csrf=>>%s<<',\n request_csrf_token, cookie_csrf_token)\n webapp2.abort(403, detail='CSRF check fail. Make sure you have cookies enabled. Reload this page and try again.')",
"def csrf_protect():\n if request.method == 'POST':\n token = session.pop('_csrf_token', None)\n if not token:\n logger.debug('No CSRF token in session')\n abort(400)\n elif request.json:\n _csrf_token = request.json.get('_csrf_token')\n if token != _csrf_token:\n logger.debug('Invalid CSRF token received')\n logger.debug('{token} expected and received {_csrf_token}'.format(**locals()))\n abort(400)\n elif token != request.form.get('_csrf_token'):\n logger.debug('Invalid CSRF token received in the form')\n logger.debug('Expected {} and received {}'.format(token, request.form.get('_csrf_token')))\n abort(400)\n else:\n logger.debug('CSRF valid.')",
"def validate_request_token(self, client_key, token, request):\r\n log.debug('Validate request token %r for %r',\r\n token, client_key)\r\n tok = request.request_token or self._grantgetter(token=token)\r\n if tok and tok.client_key == client_key:\r\n request.request_token = tok\r\n return True\r\n return False",
"def checktoken(self, kind, token):\n params = {\n 'action': 'checktoken',\n 'type': kind,\n 'token': token,\n }\n if self.request(**params)['checktoken']['result'] == 'invalid':\n return False\n return True",
"def _check_csrf_token(self, request):\n if self.csrf_cookie_name and self.csrf_field:\n csrf_token = request.cookies.get(self.csrf_cookie_name, None)\n if not csrf_token:\n return False\n if csrf_token != request.params.get(self.csrf_field, None):\n return False\n return True",
"def check_token(self, token=None):\r\n if token is None:\r\n token = self.token\r\n resp, resp_body = self.method_head(\"tokens/%s\" % token, admin=True)\r\n if resp.status_code in (401, 403):\r\n raise exc.AuthorizationFailure(\"You must be an admin to make this \"\r\n \"call.\")\r\n return 200 <= resp.status_code < 300",
"def check_authorization(self):\n self.token"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
returns True on Dev and testing environment
|
def is_dev():
return os.environ.get('SERVER_SOFTWARE', '').startswith('Development/')
|
[
"def is_dev_env() -> bool:\n if os.getenv(\"APP_ENV\") == \"dev\":\n return True\n return False",
"def _is_local():\n return (bool(os.getenv('LOCAL_DEVELOPMENT')) or\n os.getenv('SERVER_SOFTWARE', '').startswith('Development/'))",
"def is_dev():\n\treturn os.environ['SERVER_SOFTWARE'].startswith('Dev')",
"def is_local_env():\n # This is set on template-samp.yaml\n logging.info(f\"ENVIRONMENT: {os.getenv('TEST_ENV')}\")\n return os.getenv(\"TEST_ENV\") == \"LOCAL\" or is_local_dynamo()",
"def is_development():\n name = os.environ.get('SERVER_NAME', '')\n return (\n os.environ.get('SERVER_SOFTWARE', '').startswith('Development')\n or name.startswith('dev-')\n or name.startswith('test')\n or name.startswith('master')\n )",
"def is_local_dev_server():\n return os.environ.get('SERVER_SOFTWARE', '').startswith('Development')",
"def local_run():\n server_software = os.environ.get('SERVER_SOFTWARE')\n if server_software is None:\n return True\n if 'remote_api' in server_software:\n return False\n if server_software.startswith(('Development', 'testutil')):\n return True\n return False",
"def test_environment(self):\n return os.path.exists(self.get_ejbca_home()) and self.jboss.test_environment()",
"def test_environment(self):\n pass",
"def is_unit_test():\n if not is_local_dev_server():\n return False\n # devappserver2 sets up some sort of a sandbox that is not activated for\n # unit tests. So differentiate based on that.\n return all(\n 'google.appengine.tools.devappserver2' not in str(p)\n for p in sys.meta_path)",
"def is_production() -> bool:\n\n return conf(\"app.mode\") == \"prod\"",
"def test_dev(self):\r\n dev = Config.dev()\r\n self.assertIsInstance(dev, bool)\r\n \r\n Config.data['dev'] = 'True'\r\n dev = Config.dev()\r\n self.assertFalse(dev)\r\n \r\n Config.data['dev'] = True\r\n dev = Config.dev()\r\n self.assertTrue(dev)\r\n \r\n Config.data['dev'] = 'Yes'\r\n dev = Config.dev()\r\n self.assertFalse(dev)",
"def testing():\n return getattr(settings, 'TESTING', False)",
"def is_local_dynamo():\n # This is set on template-samp.yaml\n return os.getenv(\"TEST_ENV\") == \"LOCAL_DYNAMO_SERVER\"",
"def isDebug():\n return isLocal() or getMelangeVersion() == 'devvin'",
"def isProdHost():\n\n return _Control.TIER.name == \"PROD\"",
"def inside_test():\n return 'inside_test' in testing.environment and testing.environment.inside_test",
"def _is_running_on_app_engine():\n return os.getenv('GAE_ENV') or (\n os.getenv('SERVER_SOFTWARE') and\n (os.getenv('SERVER_SOFTWARE').startswith('Development/') or\n os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')))",
"def in_ci():\n for key in ('CI', 'TRAVIS'):\n if os.environ.get(key, '') not in [False, '', '0', 'false']:\n return True\n return False"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
allows method to run only if a given gae header is present
|
def _require_header(func, header):
@wraps(func)
def decorated(self, *args, **kwargs):
if self.request.headers.get(header) or is_dev():
return func(self, *args, **kwargs)
else:
raise HTTPForbidden()
return decorated
|
[
"def verify_cron_header(f):\n @wraps(f)\n def wrapper(*args, **kwargs):\n if request.headers.get('X-Appengine-Cron') is None:\n abort(403)\n return f(*args, **kwargs)\n return wrapper",
"def require_email_http_header() -> str:\n return True",
"def build_header(app):\n @app.route(\"/header/<htype>/<filename>\", methods=[\"GET\",])\n def open_header(htype, filename):\n file_path = app.static_dir/htype/filename\n if file_path.exists():\n return send_file(file_path)\n else:\n logging.error(f\"not found: {file_path}\")\n return \"\"",
"def check_cached_headers(self, headers):\n self.assertIn(\"x-powered-by\", headers.keys(), \"Unexpected headers (not from WordPress?)\")\n return \"age\" in headers",
"def authenticate_header(self, request):\n return getattr(request, \"auth_header\", \"Unknown OAuth Error\")",
"def _headers_exist(repository_ctx, path):\n for h in _INFERENCE_ENGINE_HEADERS:\n if not repository_ctx.path(\"%s/%s\" % (path, h)).exists:\n return False\n return True",
"def test_process_response_does_not_add_header_when_turned_off(self):\n response, _, _ = self.get_process_response()\n header = 'X-REQUEST-ID'\n\n self.assertNotIn(header, response)",
"async def force_headers(request: Request):\n if request.method.upper() == 'OPTIONS':\n return text('success.')\n if not request.raw_url.startswith(b'/static') and \\\n not request.raw_url.startswith(b'/web') and \\\n not request.raw_url.startswith(b'/status') and \\\n not request.raw_url.startswith(b'/moca-twitter/static/icons/') and \\\n request.method.upper() != 'OPTIONS':\n headers = request.app.system_config.get_config('force_headers', dict, {})\n if len(headers) != 0:\n for key, value in headers.items():\n if value is None and request.headers.get(key, None) is not None:\n pass # do nothing\n elif request.headers.get(key) != value:\n raise Forbidden('Missing required header, your request was blocked.')\n else:\n pass # do nothing.",
"def test_no_header_specific(self):\n\n requirement = self.tool_basic_requirement()\n\n requirement.header = \"Test: Test\"\n\n requirement.save()\n\n helper = self.tool_get_helper()\n\n # Don't send a header and expect that to work\n\n helper.connect(\"\", \"\", \"1.1.1.1\", \"\", {})\n helper.eoh({})\n\n self.assertFalse(\n helper.enabled,\n \"Helper was enabled after sending no header\"\n )",
"def ifReplaceable():\n if headerFile.HEADER.get(\"content-type\") == None:\n headerFile.HEADER[\"content-type\"] = \"application/json\"\n else:\n notif.warning(\"priority json\")",
"def _is_header_only(self, conanfile):\n if conanfile and os.path.isfile(conanfile) and \\\n self.file_contains(conanfile, \"self.info.header_only()\"):\n return True\n return False",
"def hasHeader(self, name):\r\n return name.lower() in self._rawHeaders",
"def ifReplaceable():\n if \"content-type\" in headerFile.HEADER:\n headerFile.HEADER[\"content-type\"] = \"application/x-www-form-urlencoded\"\n else:\n notif.warning(\"priority data\")",
"def _add_header(self, pull_request):\n pull_request.body = \"\" if pull_request.body is None else pull_request.body\n if self.UNAGGREGATED_PR_HEADER not in pull_request.body:\n pull_request.body += self.UNAGGREGATED_PR_HEADER",
"def test_no_header_wildcard(self):\n\n requirement = self.tool_basic_requirement()\n\n requirement.header = \".*\"\n\n requirement.save()\n\n helper = self.tool_get_helper()\n\n # Don't send a header and expect that to work\n\n helper.connect(\"\", \"\", \"1.1.1.1\", \"\", {})\n helper.eoh({})\n\n self.assertTrue(\n helper.enabled,\n \"Helper wasn't enabled after sending no header\"\n )",
"def header(self, url, msg):\n script = '{0} (ver. {1})'.format(self.__script_name, __version__)\n self.block('Script', [script, url, '', msg])",
"def handle_global_header_none(self, header, _data):\n self._logger.debug(\"%s | Handling global header none message\", self._name)\n\n if not self.file_open:\n self._logger.warning(\n \"%s | File not open for eiger start message. Creating now.\", self._name\n )\n self._create_file(self._generate_full_file_path(), 0)\n\n # Register the series we are expecting in proceding messages\n self._series = header[SERIES]\n self._write_dataset(SERIES, header[SERIES])",
"def check_headers(self: ProjectUpdater) -> None:\n for header_file_raw in self.header_files:\n assert header_file_raw[0] == '/'\n header_file = f'src/ballistica{header_file_raw}'\n if header_file.endswith('.h'):\n _check_header(self, header_file)",
"def __contains__(self, key):\n\n return key.lower() in self.headers"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
1 Base Case Have visited all digits. Store the path. self.res.append("".join(self.path)) 2 Search the letters associated with the current digit. digit = digits[path_len] arr = dic[digit] 3 Loop its children.
|
def DFS(self, digits, path_len):
if path_len >= len(digits): # search completed
self.res.append("".join(self.path)) # arr to str - s = "".join(arr)
return
digit = digits[path_len]
if digit not in self.dic: # invalid input protection
raise Exception("Invalid Input {0}".format(digit))
for char in self.dic[digit]: # traverse children
self.path.append(char)
self.DFS(digits, path_len + 1)
self.path.pop()
|
[
"def recursive_method(self, digits):\n if not digits:\n return []\n # Build hashmap\n phone_dict = {\n '2': 'abc', '3': 'def', '4': 'ghi', '5': 'jkl',\n '6': 'mno', '7': 'pqrs', '8': 'tuv', '9': 'wxyz'\n }\n res = []\n\n def search(s, digits, level):\n # Recursion terminator\n if level == len(digits):\n res.append(s)\n return\n # Process logic\n letters = phone_dict[digits[level]]\n for j in range(len(letters)):\n search(s + letters[j], digits, level + 1)\n search('', digits, 0)\n return res",
"def backtrack(self, digits: str, cur_ind: int, cur_str: str) -> None:\n if digits == '':\n return\n\n # base case\n if len(cur_str) == len(digits):\n self.result.append(cur_str)\n return\n # process candidates\n for letter in self.mapping[digits[cur_ind]]:\n # go to next digit\n self.backtrack(digits, cur_ind + 1, cur_str + letter)",
"def digits(path):\n paths = getText(path)\n d = {}\n\n for p in paths:\n f = open(p,'r')\n contents = f.read()\n number = clean_digits(contents)\n if len(number) in d:\n dTemp = {len(number):d[len(number)]+1}\n d.update(dTemp)\n else:\n dTemp = {len(number):1}\n d.update(dTemp)\n return d",
"def get_path(words, cache_dict):\n n = len(words)\n L = [[] for i in range(n)]\n M = [[] for i in range(n)] \n # keep a stack where the possible paths are incrementally expanded\n stack = [(0, j) for j in range(0, n)]\n # record the path for backtracking \n path = []\n found = False\n while len(stack)>0:\n (parent, end) = stack.pop() \n sub_sentence = \" \".join(words[parent:end+1]).strip()\n spotify_id = webapi.search_track(sub_sentence, cache_dict)\n while spotify_id == None:\n if len(stack) == 0:\n return None\n (parent, end) = stack.pop() \n sub_sentence = \" \".join(words[parent:end+1]).strip()\n spotify_id = webapi.search_track(sub_sentence, cache_dict)\n M[parent].append(spotify_id)\n L[parent].append(end) \n path.append((parent, end))\n # a path is found when position n-1 is reached\n if end == (n-1):\n found = True \n # return at the first path \n return path, L, M\n else: \n # a solution is not reached, further expand the path \n stack += [(end+1, i) for i in range(end+1, n)] \n if not found:\n return None",
"def print_trie(self,root):\n\t\tif root is None:\n\t\t\treturn\n\t\tfor idx,child in enumerate(root.children):\n\t\t\tif child is not None:\n\t\t\t\tprint(child.char, child.val)\n\t\t\t\tself.print_trie(child)",
"def traverse(self,bits,ptr):\n current=self.root\n for i in range(ptr,len(bits)):\n if current.char is not None:\n return current.char,i+1\n else:\n if bits[i]==1:\n current=current.right\n if current.char is not None:\n return current.char,i+1\n else:\n current=current.left\n if current.char is not None:\n return current.char,i+1",
"def findAllTogiticalPaths(dictionary):\n alphabet_graph = Graph()\n\n if len(dictionary) == 0:\n return []\n \n first_word = dictionary[0] \n for v in first_word:\n alphabet_graph.add_vertex(v)\n \n for word_index in range(1,len(dictionary)): \n word = dictionary[word_index]\n if word is None:\n print(\"Invalid Input: one of the inputs is None\")\n return\n prev_word = dictionary[word_index-1]\n find_Adj_succ = False\n # try to get an edge between to characters in alphabet_graph from two adj words in the dict\n for v in range(len(word)):\n alphabet_graph.add_vertex(word[v])\n if not find_Adj_succ and len(word) <= len(prev_word):\n if word[v] is not prev_word[v]:\n alphabet_graph.add_Edge(word[v], prev_word[v])\n find_Adj_succ = True\n return alphabet_graph.topologicalSort()",
"def test_directory_structure():\n fh = open('tmp-patnums.txt')\n fh.readline()\n dirs = {}\n c = 0\n for line in fh:\n c += 1\n if c % 100000 == 0: print c\n num = line.strip()\n (dir1, dir2, dir3) = patentid2path(num)\n dirs.setdefault(dir1,{})\n dirs[dir1][dir2] = dirs[dir1].get(dir2,0) + 1\n if not dir1 and dir2 and dir3:\n print num, dir1, dir2, dir3\n for dir1 in sorted(dirs):\n print '>', dir1, len(dirs[dir1])\n for dir2 in sorted(dirs[dir1]):\n print ' ', dir2, dirs[dir1][dir2]",
"def _build_paths(self):\n # Clear any previous path info\n for _, tile in numpy.ndenumerate(self.tiles):\n if tile:\n tile.path_next = None\n\n # TODO: Start a 0,0 for now, but eventually will have to work out where\n # the base is and start there.\n start = self.lookup_tile(vector.Vector(0, 0))\n if not start:\n return\n\n # TODO: consider height\n\n frontier = deque([start])\n visited = set([start])\n while len(frontier) > 0:\n tile = frontier.popleft()\n\n for nxt in [t for t in self.tile_neighbours(tile)\n if t.empty and t not in visited]:\n frontier.append(nxt)\n visited.add(nxt)\n nxt.path_next = tile",
"def path_strings(self, name):\n target = Person(name)\n\n # if found\n if self.val == target:\n return [name]\n else:\n returned = [self.val.name] # list of visted nodes\n\n # standard binary search operations\n\n if target < self.val:\n # target is less than, recurse left\n if self.left is None:\n # should be there but doesn't exist\n return []\n else:\n # add node to returned\n returned.extend(self.left.path_strings(name))\n\n elif target > self.val:\n # target greater than, recurse right\n if self.right is None:\n # should be there but doesn't exist\n return []\n else:\n returned.extend(self.right.path_strings(name))\n\n\n return returned",
"def num_27(): \n def get_dirlist(path):\n \"\"\"\n Return a sorted list of all entries in path.\n This returns just the names, not the full path to the names.\n \"\"\"\n dirlist = os.listdir(path)\n dirlist.sort()\n return dirlist\n\n def print_files(path, prefix = \"\"):\n \"\"\" Print recursive listing of contents of path \"\"\"\n if prefix == \"\": # Detect outermost call, print a heading\n print(\"Folder listing for\", path)\n prefix = \"| \"\n dirlist = get_dirlist(path)\n for f in dirlist:\n print(prefix + \"- \" + f) # Print the line\n fullname = os.path.join(path, f) # Turn name into full pathname\n if os.path.isdir(fullname): # If a directory, recurse.\n print_files(fullname, prefix + \"| \")\n return None\n \"\"\"dir check\"\"\"\n #path = os.getcwd()\n path = '/private/var/mobile/Containers/Shared/AppGroup/A9DDA80F-9432-45DA-B931-2E9386579AE6/Pythonista3/Documents'\n #path = '/private/var/mobile/Containers/Shared/AppGroup/A9DDA80F-9432-45DA-B931-2E9386579AE6'\n\n print_files(path)\n return None #dirlist",
"def reconstruct_path(came_from, start, goal):\n path = []\n ### START CODE HERE ### (≈ 6 line of code)\n if(came_from == {}):\n return {}\n else:\n while(1):\n if (goal == start):\n break\n else:\n path.append(goal)\n goal = came_from[goal]\n path.append(start)\n\n path.reverse()\n\n\n\n\n ### END CODE HERE ###\n return path",
"def doPathWalk(self):\r\n self.path_dic = {0: 1} ### first step is the initial state before we've done anything\r\n self.end_point_dic = {} # initializing the dict that keeps track of all endpoints and their probabilities\r\n while len(self.path_dic): # ## the dict is used to keep track of paths in a breadth first search\r\n # as long as there is a path, keep iterating\r\n self.take_next_step() #### state of self is updated \r\n\r\n return self",
"def build_suffix_tree(text):\r\n result = []\r\n # Implement this function yourself\r\n #text = text[:-1]\r\n #print(\"text is\", text)\r\n tree = []\r\n tree.append([])\r\n counter = 0\r\n for i in range(len(text)):\r\n pat = text[i:]\r\n #print()\r\n #print(i, \"pat is\", pat)\r\n cn, flag = 0, 0\r\n head = 0\r\n while not flag:\r\n cnprev = cn\r\n #print(\"cn is now\", cn)\r\n for j in range(len(tree[cn])):\r\n (pos, l, d) = tree[cn][j]\r\n if text[pos] == pat[0 + head]:\r\n #print(\"Match!\")\r\n lab = text[pos:pos + l]\r\n n = strcmp(pat[head:], lab)\r\n #print(\"n is\", n)\r\n if n == len(pat) == len(lab):\r\n flag = 1\r\n elif n == len(lab) and n != len(pat):\r\n if d > 0:\r\n cn = d\r\n head += len(lab)\r\n #print(\"Moved over\", lab)\r\n else:\r\n #print(\"ACHTUNG!\")\r\n pass # Undefined behavior\r\n else:\r\n #print(\"Common part is\", pat[head:head + n])\r\n counter += 1\r\n tree[cn][j] = (pos, n, counter)\r\n cn = counter\r\n tree.append([])\r\n tree[cn].append((head + i + n, len(pat[head + n:]), 0))\r\n tree[cn].append((pos + n, len(lab[n:]), d))\r\n #print(\"First part is\", pat[head + n:])\r\n #print(\"Second part is\", lab[n:])\r\n flag = 1\r\n break\r\n if cn == cnprev and not flag:\r\n tree[cn].append((i + head, len(pat[head:]), 0))\r\n flag = 1\r\n #print(i, \"tree is\", tree)\r\n #tree[0].append((len(text), 0, 0))\r\n for i in range(len(tree)):\r\n for j in range(len(tree[i])):\r\n (pos, l, d) = tree[i][j]\r\n result.append(text[pos:pos + l])\r\n return result",
"def search(grid,dictionary):\n neighbours = all_grid_neighbours(grid)\n paths = []\n \n \n def do_search(path):\n # print(path)\n word = path_to_word(grid,path)\n counter = 1\n # print(\"this is {0} on counter{1}\".format(counter,word))\n counter +=1\n if word in dictionary:\n paths.append(path)\n # print(\"yess\")\n # else:\n # print(\"add afaile\")\n for next_pos in neighbours[path[-1]]:\n # print(next_pos,\" check\")#print(\"path is \", [path[-1]])\n if next_pos not in path:\n # print(next_pos)\n do_search(path+[next_pos])\n \n for position in grid:\n do_search([position])\n # print(\"next iter\")\n \n words = []\n #print(paths)\n for path in paths:\n words.append(path_to_word(grid, path))\n print(words)\n return set(words)",
"def get_sort_letters(path, focused_letter):\n listitems = []\n letter_list = []\n HOME.clearProperty(\"LetterList\")\n if SETTING(\"FolderPath\") == path:\n letter_list = SETTING(\"LetterList\").split()\n elif path:\n json_response = get_kodi_json(method=\"Files.GetDirectory\",\n params='{\"directory\": \"%s\", \"media\": \"files\"}' % path)\n if \"result\" in json_response and \"files\" in json_response[\"result\"]:\n for movie in json_response[\"result\"][\"files\"]:\n cleaned_label = movie[\"label\"].replace(\"The \", \"\")\n if cleaned_label:\n sortletter = cleaned_label[0]\n if sortletter not in letter_list:\n letter_list.append(sortletter)\n ADDON.setSetting(\"LetterList\", \" \".join(letter_list))\n ADDON.setSetting(\"FolderPath\", path)\n HOME.setProperty(\"LetterList\", \"\".join(letter_list))\n if not letter_list or not focused_letter:\n return None\n start_ord = ord(\"A\")\n for i in range(0, 26):\n letter = chr(start_ord + i)\n if letter == focused_letter:\n label = \"[B][COLOR FFFF3333]%s[/COLOR][/B]\" % letter\n elif letter in letter_list:\n label = letter\n else:\n label = \"[COLOR 55FFFFFF]%s[/COLOR]\" % letter\n listitems.append({\"label\": label})\n return listitems",
"def search_in(self, data):\n\n self.parentpointer = data\n self.pointer = data\n for index, segment in enumerate(self.keypathlist):\n tmppointer = self.parentpointer\n self.parentpointer = self.pointer\n\n if type(self.pointer) is dict:\n try:\n self.pointer = self.pointer[segment]\n except KeyError:\n self.parentpointer = tmppointer\n return\n elif type(self.pointer) is list:\n try:\n self.pointer = self.pointer[int(segment)]\n except (TypeError, IndexError):\n self.parentpointer = tmppointer\n return\n\n self.match_depth = index\n self.match_depth_segment = segment\n\n self.path_match = True\n if self.value:\n self.value_match = (self.value == self.pointer)",
"def _find_paths(self, node):\n legal_moves = self._game.find_legal_moves()\n if not legal_moves:\n score = self._game.peg_count\n if not self.possible_paths[score]:\n self.possible_paths[score] = self._game.moves.copy()\n else:\n children = []\n for peg in legal_moves:\n for move in legal_moves[peg]:\n children.append(Node((peg, move)))\n for child in children:\n self._game.move(*child.data)\n self._find_paths(child)\n try:\n self._game.undo()\n except IndexError:\n pass",
"def match_character_folders(input_dir: Path) -> list:\n # segmentation saves characters under fragment/characters\n return input_dir.glob(\"**/characters\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Converts the given time to a datetime object, then looks for the weather forecast for that given time rounded to the nearest 3 hours. Builds the full tuple of weather + time tuples, then uses that to predict on the model loaded based on the given station. Html string is returned
|
def predict_func(time, station):
given_time = datetime.datetime.strptime(time, "%d %B %Y %I:%M %p")
weather_tuple = [8, 0, 1, 0, 0, 0, 0]#default values
icon = "02d"
try:
observation = owm.three_hours_forecast('Dublin,IE')
w = observation.get_forecast()
rounded_time = roundTime(given_time,roundTo=180*60)#round to 3 hour
#default values
for weather in w:
time = datetime.datetime.strptime(weather.get_reference_time('iso'), "%Y-%m-%d %H:%M:%S+00")
if rounded_time == time:
icon = weather.get_weather_icon_name()
weather_tuple = tuple_builder(weather.get_status(),round(weather.get_temperature()['temp']-273.15, 2))
except: pass
pred_tuple = weather_tuple + [given_time.day, given_time.hour, given_time.minute, given_time.weekday()]
station = prepro(station.upper())
filename = 'dublinbikes/static/models/' + station + '.sav'
model = pickle.load(open(filename, 'rb'))
prediction = math.floor(model.predict([pred_tuple])[0])
heading = '<div style="text-align: center;font-size:90%;border-top: solid rgb(8,76,85) 1px;border-bottom: solid rgb(8,76,85) 1px;color: rgb(8,76,85);">' + station + " "+ str(given_time.day) + "/" + str(given_time.month)+ "/" + str(given_time.year) + " " +str(given_time.hour) + ':' + str(add_zero(given_time)) +'</span></div>'
iconhtml = '<div id="standinfo"><div><img src="http://openweathermap.org/img/wn/' + icon + '@2x.png" alt="Weather Icon" style="height:80px;width:80px;display:flex;align-items:center;"></div>'
text = '<div id="overall" style="display:flex;justify-content:center;flex-direction: column;"> <div>' + '<span style="font-weight:bold;font-size:130%;"> Bikes: </span> <span style="color:red;font-weight:bold;font-size:130%;font-family: "Times New Roman", Times, serif;">' + str(prediction) + '</span> <img src = "/static/Imgs/bike_icon2.png" alt = "Bike" style="margin-bottom:8px;" >' +'</div>'
text2 = '<div> <span style="font-weight:bold;font-size:130%;"> Stands: </span> <span style="color:blue;font-weight:bold;font-size:130%;font-family: "Times New Roman", Times, serif;">' + str(abs(capacity['capacity'][station] - prediction)) + '</span> <img src = "static/Imgs/parking_icon.png" alt = "Bike" style="margin-bottom:8px;"></div></div></div>'
return heading + iconhtml + text + text2
|
[
"def weather_update(place,hour=0,minute=0,shuffle_urls=False,return_extreme=False,ignore_print=False):\n # Step 0) If program isnt run as __main__, must check that [hour] and [minute] are acceptable\n if not isinstance(hour, (int, long)) or not isinstance(minute, (int, long)):\n print \"[Hour] and/or [minute] not INTEGER(S). Please specify hour [0-23] and minute [0-59]\\nExiting...\"; sys.exit(1)\n if hour < 0 or hour > 23 or minute < 0 or minute > 59:\n print \"Hour or minute not in valid range, [0-23] and [0-59]\\nExiting...\"; sys.exit(1)\n\n # Step 1) Find the urls with regex matching\n list_of_urls = get_list_of_results(place)\n #print '\\nNumber of found urls: ', len(list_of_urls)\n\n # Step 2) Follow these urls and retrieve weather info for next 24 hrs\n list_place, html_weather = retrieve_weather_raw_data(list_of_urls,shuffle_urls)\n\n # Step 3) Retrieve the specific weather data and strip everything else\n date_stamp = '' # This string might be returned from this function\n formatted_weather_data_to_return = '' # This string might be returned from this function\n max_T_string, min_T_string = '', '' # These strings might be returned from this function\n\n for i,raw_weather in enumerate(html_weather):\n # Find all the specific weather data and save them as a list of tuples,\n # where each tuple (i.e. list element) correspond to one time interval at yr.no/../../..\n # Explore exactly this regex here: https://regex101.com/r/jH9mB9/2\n regular_expr = '<time\\sfrom=\"(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):00:00\"\\sto=\"(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):00:00.*?\\<symbol\\snumber=\".*?name=\"(.*?)\".*?\\<precipitation\\svalue=\"(.*?)\".*?\\<windSpeed\\smps=\"(.*?)\".*?\\<temperature\\sunit.*?value=\"(.*?)\"'\n key_weather_data = re.findall(regular_expr, raw_weather, re.DOTALL)\n\n # Find the weather data for the correct time interval specified by the user in [hour:minute]\n for j,kwd in enumerate(key_weather_data):\n start_hour = int(kwd[3])\n end_hour = int(kwd[7])\n if start_hour > end_hour:\n end_hour = 24 # Fix interval [18:00 -> 00:00] so that: end_hour > start_hour is always true\n if start_hour <= hour and end_hour > hour:\n k = j\n break\n\n if i == 0: # Print out the time stamp (with date) just one time\n year = int(key_weather_data[k][0])\n month = int(key_weather_data[k][1])\n day = int(key_weather_data[k][2])\n date_stamp = \"%i-%.2i-%.2i %.2i:%.2i\" %(year, month, day, hour, minute)\n if return_extreme and not ignore_print:\n print date_stamp\n\n summary = key_weather_data[k][8]\n rain = float(key_weather_data[k][9])\n wind = float(key_weather_data[k][10])\n temp = float(key_weather_data[k][11])\n\n # Print out nicely formatted weather update\n formatted_weather_data = \"%s: %s, rain:%.0f mm, wind:%.1f mps, temp:%.0f deg C\" \\\n %(list_place[i], summary, rain, wind, temp)\n formatted_weather_data_to_return += formatted_weather_data + '\\n'\n\n # Find extreme temperatures\n if return_extreme:\n if i == 0:\n max_T_value = temp; min_T_value = temp\n max_T_string = formatted_weather_data\n min_T_string = formatted_weather_data\n else:\n if max_T_value < temp:\n max_T_value = temp\n max_T_string = formatted_weather_data\n if min_T_value > temp:\n min_T_value = temp\n min_T_string = formatted_weather_data\n\n final_weather_update = date_stamp + '\\n' + formatted_weather_data_to_return[:-2] # [:-2] --> Remove that last newline i.e. '\\n'\n if return_extreme:\n return max_T_value, max_T_string, min_T_value, min_T_string\n else:\n return final_weather_update",
"def get_weather_for_time_stamp(ts):\n\tts_string = ts.strftime('%Y-%m-%d %H:%M:%S')\n\tweather_data = WeatherData(ts, 1)\n\tquery =\t\"\"\"SELECT w.time, w.temperature, w.wind, w.humidity, w.precipitation, w.pressure, w.station,\n\t\t\tabs(strftime(\\'%%s\\', \\'%s\\') - strftime(\\'%%s\\', w.time)) as 'closest_time'\n\t\t\tFROM weather_history w ORDER BY abs(strftime(\\'%%s\\', \\'%s\\') - strftime(\\'%%s\\', time)) \n\t\t\tlimit 30;\"\"\" % (ts_string, ts_string)\n\tdata = get_data_from_database(query)\n\tweather_data.set_data_from_database(data)\n\treturn weather_data",
"def weather_at_time(self, t: torch.Tensor) -> torch.Tensor:\n\n inbetween = (\n ((t % self._normalized_timestep_delta) / self._normalized_timestep_delta)\n .unsqueeze(1)\n .to(self._device)\n )\n left_index = (t // self._normalized_timestep_delta).long().to(self._device)\n\n if any(torch.isnan(t)):\n raise ValueError()\n if any(left_index < 0):\n left_index = torch.zeros_like(left_index)\n if any(left_index >= len(self._normalized_weather_tensor) - 2):\n left_index = torch.zeros_like(left_index) - 2\n\n right_index = left_index + 1\n\n left_weather = self._normalized_weather_tensor[left_index]\n right_weather = self._normalized_weather_tensor[right_index]\n\n return left_weather * (1 - inbetween) + right_weather * inbetween",
"def get_weather_at(self, timeobject):\n return weather. \\\n find_closest_weather(self.forecast.weathers,\n formatting.to_UNIXtime(timeobject))",
"def make_station_observation_request(station, day):\n now = dt.datetime.now().isoformat()\n\n r = requests.get(f'{DARK_SKY_URL}/{settings.DARK_SKY_KEY}/{station.latitude},{station.longitude},{day}')\n\n # make sure the web result is valid\n if r.status_code == 200:\n content = r.json()\n measurements = []\n\n # generate hourly measurements\n for obs in content['hourly']['data']:\n timestamp = obs['time']\n timestamp = dt.datetime.fromtimestamp(timestamp)\n\n # add precip\n measurements.append(\n Measurement(\n station_id=station.station_id,\n metric_id='00003',\n value=obs['precipIntensity'],\n date_time=timestamp\n )\n )\n\n # add temp\n measurements.append(\n Measurement(\n station_id=station.station_id,\n metric_id='00001',\n value=obs['temperature'],\n date_time=timestamp\n )\n )\n\n # add humidity\n measurements.append(\n Measurement(\n station_id=station.station_id,\n metric_id='00002',\n value=obs['humidity'],\n date_time=timestamp\n )\n )\n print(f'{now}: {station.station_id} complete')\n return measurements\n else:\n print(f'{now}: {station.station_id} failed')\n return None",
"async def get_visualisation_weather(session: ClientSession, url: str) -> str:\n full_html: str = await fetch(session, url, params=get_wttr_params())\n return get_weather_block_from_html(full_html)",
"def get_time_station():\n # To transform latitude and longitude into kilometers\n a = 6378.136\n e = 0.006694470\n lat0 = 41.0\n lon0 = -123.0\n dx = (pi / 180.0) * a * cos(lat0 * pi / 180.0) / sqrt(1.0 - e * e * \\\n sin(lat0 * pi / 180.0) * sin(lat0 * pi / 180.0))\n dy = (3.6 * pi / 648.0) * a * (1.0 - e * e) / ((1.0 - e * e * sin(lat0 * \\\n pi / 180.0) * sin(lat0 * pi / 180.0)) ** 1.5)\n\n # Get the locations of the sources of the LFEs\n LFEloc = np.loadtxt('../data/Plourde_2015/templates_list.txt', \\\n dtype={'names': ('name', 'family', 'lat', 'lon', 'depth', 'eH', \\\n 'eZ', 'nb'), \\\n 'formats': ('S13', 'S3', np.float, np.float, np.float, \\\n np.float, np.float, np.int)}, \\\n skiprows=1)\n lats = np.zeros(len(LFEloc))\n lons = np.zeros(len(LFEloc))\n for ie in range(0, len(LFEloc)):\n lats[ie] = LFEloc[ie][2]\n lons[ie] = LFEloc[ie][3]\n xs = dx * (lons - lon0)\n ys = dy * (lats - lat0)\n\n # Get the locations of the stations\n staloc = pd.read_csv('../data/Plourde_2015/station_locations.txt', \\\n sep=r'\\s{1,}', header=None)\n staloc.columns = ['station', 'network', 'channels', 'location', \\\n 'server', 'latitude', 'longitude']\n\n # Get the origin time for each of the templates\n origintime = pickle.load(open('timearrival/origintime.pkl', 'rb'))\n\n slowness = {}\n # Loop on the stations\n for ir in range(0, len(staloc)):\n # Compute source-receiver distances\n distance = []\n maxEWlist = []\n maxNSlist = []\n maxUDlist = []\n timeEWlist = []\n timeNSlist = []\n timeUDlist = []\n # Loop on the templates\n for ie in range(0, len(LFEloc)):\n filename = LFEloc[ie][0].decode('utf-8')\n # Open time arrival files\n data = pickle.load(open('timearrival/' + filename +'.pkl', 'rb'))\n stations = data[0]\n maxEW = data[1]\n maxNS = data[2]\n maxUD = data[3]\n timeEW = data[4]\n timeNS = data[5]\n timeUD = data[6]\n # If the station was used for this template\n for i in range(0, len(stations)):\n if (stations[i] == staloc['station'][ir]):\n latr = staloc['latitude'][ir]\n lonr = staloc['longitude'][ir]\n xr = dx * (lonr - lon0)\n yr = dy * (latr - lat0)\n distance.append(sqrt((xr - xs[ie]) ** 2.0 + \\\n (yr - ys[ie]) ** 2.0))\n maxEWlist.append(maxEW[i])\n maxNSlist.append(maxNS[i])\n maxUDlist.append(maxUD[i])\n timeEWlist.append(timeEW[i] - origintime[filename])\n timeNSlist.append(timeNS[i] - origintime[filename])\n timeUDlist.append(timeUD[i] - origintime[filename])\n # Linear regression\n if (len(distance) > 0):\n x = np.reshape(np.array(distance + distance + distance), \\\n (3 * len(distance), 1))\n y = np.reshape(np.array(timeEWlist + timeNSlist + timeUDlist), \\\n (3 * len(distance), 1))\n w = list(map(lambda x : pow(x, 3.0), maxEWlist)) + \\\n list(map(lambda x : pow(x, 3.0), maxNSlist)) + \\\n list(map(lambda x : pow(x, 3.0), maxUDlist))\n w = np.array(w)\n regr = linear_model.LinearRegression(fit_intercept=False)\n regr.fit(x, y, w)\n y_pred = regr.predict(x)\n R2 = r2_score(y, y_pred)\n s = regr.coef_[0][0]\n # Plot\n plt.figure(1, figsize=(10, 10))\n plt.plot(x, y, 'ko')\n plt.plot(x, y_pred, 'r-')\n plt.xlabel('Distance (km)', fontsize=24)\n plt.ylabel('Travel time (s)', fontsize=24)\n plt.title('{} - R2 = {:4.2f} - slowness = {:4.3f} s/km'.format( \\\n staloc['station'][ir], R2, s), fontsize=24)\n plt.savefig('timearrival/' + staloc['station'][ir] + \\\n '.eps', format='eps')\n plt.close(1)\n slowness[staloc['station'][ir]] = s\n return slowness",
"def get_forecast_for_today(data):\n description, content, temperature = None, None, None\n\n today_forecast_index = None\n chunks = data.split(\"\\n\\n\")\n for i, chunk in enumerate(chunks):\n if chunk.startswith(\"Forecast for \"):\n today_forecast_index = i\n break\n\n if today_forecast_index:\n today_forecast = chunks[today_forecast_index]\n\n description = today_forecast.split(\"\\n\", 1)[0]\n description = description.replace(\"Forecast for \", \"\")\n description = description.replace(\"the rest of \", \"\")\n description = description.strip()\n\n items = today_forecast.split(\"\\n\")[1:]\n\n if len(items) > 1:\n content = \" \".join(items)\n else:\n content = items[0]\n\n content = expand_contractions(content)\n\n today_details = chunks[today_forecast_index + 1]\n\n if today_details.startswith('Precis'):\n lines = today_details.split(\"\\n\")\n precis_line = lines[0]\n\n if precis_line.startswith(\"Precis\"):\n precis = precis_line.replace(\"Precis\", \"\")\n precis = precis.replace(\":\", \"\")\n precis = precis.strip()\n if precis.endswith(\".\"):\n precis = precis[:-1]\n\n # temp typically follows the precis line, but not always\n if len(lines) > 1:\n temp_line = lines[1]\n # temp appears to alway be last item on line\n temp_line = temp_line.strip()\n temperature = temp_line.split()[-1]\n\n else:\n # details should be on one line\n today_details = today_details.split(\"\\n\")[0]\n items = today_details.split(\" \")\n items = filter(None, items) # remove empty items\n\n if len(items) == 3:\n location, precis, temperature = items\n\n precis = precis.strip()\n if precis.endswith(\".\"):\n precis = precis[:-1]\n\n temperature = temperature.replace(\"Max\", \"\")\n temperature = temperature.strip()\n\n elif len(items) == 2:\n location, precis = items\n\n precis = precis.strip()\n if precis.endswith(\".\"):\n precis = precis[:-1]\n\n return (description, content, precis, temperature)",
"def time_of_trip(datum, city):\n \n # YOUR CODE HERE\n if city == 'NYC':\n #Matching time format with the data\n time= datetime.strptime(datum['starttime'],'%m/%d/%Y %H:%M:%S')\n month= int(time.strftime(\"%-m\")) #storing month value\n day_of_week= str(datetime.strptime(datum['starttime'],'%m/%d/%Y %H:%M:%S').strftime(\"%A\")) #storing day name\n hour= int(time.strftime(\"%-H\")) #storing hour value\n elif city=='Chicago':\n time= datetime.strptime(datum['starttime'],'%m/%d/%Y %H:%M')\n month= int(time.strftime(\"%-m\"))\n day_of_week= str(datetime.strptime(datum['starttime'],'%m/%d/%Y %H:%M').strftime(\"%A\"))\n hour= int(time.strftime(\"%-H\"))\n elif city== 'Washington':\n time= datetime.strptime(datum['Start date'],'%m/%d/%Y %H:%M')\n month= int(time.strftime(\"%-m\"))\n day_of_week= str(datetime.strptime(datum['Start date'],'%m/%d/%Y %H:%M').strftime(\"%A\"))\n hour= int(time.strftime(\"%-H\"))\n \n return (month, hour, day_of_week)",
"def get_weather_for_city(cityName, countryName):\n \n #Get the data from wunderground.com\n wundergroundURL = \"https://www.wunderground.com/weather/\"\n wundergroundURL += countryName.lower()+\"/\"+cityName.lower()\n weatherRequest = requests.get(wundergroundURL)\n weatherData = weatherRequest.text\n \n #Create a beautifulsoup object from the data\n weatherSoup = BeautifulSoup(weatherData, 'html.parser')\n \n #Process the data at hand using beautifulsoup\n try:\n invalidPage = weatherSoup.find('div', attrs={'class': 'small-12 medium-8 large-6 medium-centered columns'}).text.strip()\n #Handle invalid pages\n if \"glitch\" in invalidPage:\n return \"Invalid location\"\n except:\n pass\n \n currentForecast = \"\"\n currentTemperature = weatherSoup.find('div', attrs={'class': 'condition-data'}).find('span', attrs={'class': 'wu-value wu-value-to'}).text\n currentCondition = weatherSoup.find('div', attrs={'class': 'conditions-extra small-9 medium-5 columns small-centered medium-uncentered'}).find('div', attrs={'class': 'condition-icon small-6 medium-12 columns'}).text\n \n currentTemperature = currentTemperature.strip()\n currentCondition = currentCondition.strip()\n \n currentForecast = currentTemperature + \"F \" + currentCondition\n \n return currentForecast",
"def get_model(model,fc_date,init_date=None,leadtime=None):\n from misc import haversine\n from model_specs import model_dict\n print (\"Get model data according to selected date ....\")\n if init_date is None:\n print (\"leadtime:\",leadtime,\"h\")\n else:\n print (\"init_date:\",init_date)\n print (\"fc_date:\",fc_date)\n if model == 'ARCMFC':\n filestr = (model_dict[model]['path']\n + fc_date.strftime('%Y%m%d')\n + init_date.strftime(model_dict[model]['file_template']))\n elif (model == 'mwam4' or model=='mwam8'):\n if fc_date == init_date:\n filestr = (init_date.strftime(model_dict[model]['path_template'])\n + init_date.strftime(model_dict[model]['file_template']))\n else:\n if leadtime%6!=0:\n print (\"leadtime needs to be multiple of 6h\")\n print (\"exit loop ...\")\n #sys.exit()\n else:\n tmpdate = fc_date - timedelta(hours=leadtime)\n filedate = tmpdate\n filestr = (filedate.strftime(model_dict[model]['path_template'])\n + filedate.strftime(model_dict[model]['file_template']))\n del tmpdate\n print (filestr)\n f = netCDF4.Dataset(filestr,'r')\n model_lons = f.variables[model_dict[model]['lons']][:]\n model_lats = f.variables[model_dict[model]['lats']][:]\n model_time = f.variables[model_dict[model]['time']][:]\n # Hs [time,lat,lon]\n model_Hs = f.variables[model_dict[model]['Hs']][:].squeeze()\n f.close()\n model_basetime = model_dict[model]['basetime']\n model_time_dt=[]\n for element in model_time:\n model_time_dt.append(model_basetime\n + timedelta(seconds=element))\n model_time_dt_valid = [model_time_dt[model_time_dt.index(fc_date)]]\n model_hs_valid = model_Hs[model_time_dt.index(fc_date),:,:]\n return model_time_dt, model_hs_valid, model_lons, model_lats",
"def parse_wu_table(yr, mo, dy):\n\n # -- set the file\n html = os.path.join(\"output\", \"wunderhtml\",\n \"DailyHistory_{0:04}_{1:02}_{2:02}.html\" \\\n .format(yr, mo, dy))\n fopen = open(html, \"r\")\n soup = bs4.BeautifulSoup(fopen, \"html.parser\")\n\n # -- get header\n hdr = [i.text for i in soup.find(\"table\",\n attrs={\"class\" : \"obs-table responsive\"}) \\\n .find(\"thead\").find_all(\"tr\")[0].find_all(\"th\")]\n\n # -- get the hourly weather table from html\n rows = soup.find(\"table\", attrs={\"class\" : \"obs-table responsive\"}) \\\n .find(\"tbody\").find_all(\"tr\")\n tbl = [[ele.text.strip() for ele in row.find_all(\"td\")] for row in rows]\n fopen.close()\n\n # -- convert to dataframe\n if any([\"EDT\" in i for i in hdr]):\n cols = [\"Time (EDT)\", \"Temp.\", \"Humidity\", \"Precip\"]\n else:\n cols = [\"Time (EST)\", \"Temp.\", \"Humidity\", \"Precip\"]\n data = pd.DataFrame(tbl, columns=hdr)[cols]\n data.columns = [\"time\", \"temp\", \"humidity\", \"precip\"]\n \n # -- parse columns\n def time_to_datetime(tstr):\n \"\"\" Convert Weather Underground EST to datetime. \"\"\"\n\n return datetime.datetime.strptime(\"{0:04}/{1:02}/{2:02} \" \\\n .format(yr, mo, dy) + tstr,\n \"%Y/%m/%d %I:%M %p\")\n\n data[\"time\"] = data[\"time\"].apply(time_to_datetime)\n data[\"temp\"] = pd.to_numeric(data[\"temp\"] \\\n .apply(lambda x: x.encode(\"ascii\", \"ignore\") \\\n .replace(\"F\", \"\")), errors=\"coerce\")\n data[\"humidity\"] = pd.to_numeric([i[:-1] for i in\n data[\"humidity\"]], errors=\"coerce\")\n data[\"precip\"] = [0.0 if i == \"N/A\" else float(i[:-3]) for i in\n data[\"precip\"]]\n\n # -- add daily precipitation\n data[\"daily_precip\"] = [parse_daily_precipitation(soup)] * len(data)\n\n return data",
"def get_forecast_for_tomorrow(data):\n description = None\n precis = None\n temperature_min = None\n temperature_max = None\n\n forecasts = []\n chunks = data.split(\"\\n\\n\")\n for i, chunk in enumerate(chunks):\n if chunk.startswith(\"Forecast for \"):\n forecasts.append(i)\n\n TwoForecastsPresent = len(forecasts) > 1\n\n if TwoForecastsPresent:\n\n # typically the forecast for tomorrow spans two chunks. The first\n # contains the description and the second contains the precis and\n # temperature.\n tomorrow_forecast_index = forecasts[1]\n tomorrowsForecast = chunks[tomorrow_forecast_index]\n\n description = tomorrowsForecast.split(\"\\n\", 1)[0]\n description = description.replace(\"Forecast for \", \"\")\n description = description.strip()\n\n content = tomorrowsForecast.split(\"\\n\")[1]\n content = content.strip()\n # prefer the longer description over the shorter precis\n precis = content\n\n # the temperatures for tomorrow's forecast appears to always be in\n # the following block.\n tomorrow_details = chunks[tomorrow_forecast_index + 1]\n\n if tomorrow_details.startswith('Precis'):\n lines = tomorrow_details.split(\"\\n\")\n precis_line = lines[0]\n\n if precis_line.startswith(\"Precis\"):\n precis = precis_line.replace(\"Precis\", \"\")\n precis = precis.replace(\":\", \"\")\n precis = precis.strip()\n if precis.endswith(\".\"):\n precis = precis[:-1]\n\n # temp typically follows the precis line, but not always\n if len(lines) > 1:\n temp_line = lines[1]\n items = temp_line.split(\" \")\n items = filter(None, items) # remove empty items\n\n if len(items) == 3:\n _, temperature_min, temperature_max = items\n elif len(items) == 2:\n _, temperature_max = items\n\n if temperature_min:\n temperature_min = temperature_min.replace(\"Min\", \"\")\n temperature_min = temperature_min.strip()\n\n if temperature_max:\n temperature_max = temperature_max.replace(\"Max\", \"\")\n temperature_max = temperature_max.strip()\n # temp appears to alway be last item on line\n temp_line = temp_line.strip()\n _temperature = temp_line.split()[-1]\n\n else:\n\n forecast_line = tomorrow_details.split(\"\\n\")[0]\n items = forecast_line.split(\" \")\n items = filter(None, items) # remove empty items\n try:\n location, _, temperature_min, temperature_max = items\n\n temperature_min = temperature_min.replace(\"Min\", \"\")\n temperature_min = temperature_min.strip()\n\n temperature_max = temperature_max.replace(\"Max\", \"\")\n temperature_max = temperature_max.strip()\n\n except ValueError, ex:\n logging.error(\"Error extracting 4 items from line: \\'%s\\'. items=%s\" % (forecast_line, str(items)))\n logging.exception(ex)\n\n else:\n # try one of the other formats which looks like this:\n # Sunday Fine, partly cloudy. Min 12 Max 24\n # Monday A few showers. Min 13 Max 23\n # Tuesday A few showers. Min 14 Max 23\n # Wednesday A few showers. Min 13 Max 24\n # Thursday A few showers. Min 15 Max 25\n # Friday Showers.\n #\n # This block format seems to always follow the UV Alert block\n tomorrow_forecast_index = None\n for i, chunk in enumerate(chunks):\n # typically the chunk starts with UV Alert but sometimes it\n # can be bunched up with the chunk before.\n if \"UV Alert\" in chunk:\n tomorrow_forecast_index = i + 1\n break\n\n if tomorrow_forecast_index is not None:\n tomorrowsForecast = chunks[tomorrow_forecast_index]\n forecast_line = tomorrowsForecast.split(\"\\n\")[0]\n\n items = forecast_line.split(\" \")\n items = filter(None, items) # remove empty items\n description, precis, temperature_min, temperature_max = items\n\n description = description.strip()\n\n precis = precis.strip()\n if precis.endswith(\".\"):\n precis = precis[:-1]\n\n temperature_min = temperature_min.replace(\"Min\", \"\")\n temperature_min = temperature_min.strip()\n\n temperature_max = temperature_max.replace(\"Max\", \"\")\n temperature_max = temperature_max.strip()\n\n return (description, precis, temperature_min, temperature_max)",
"def weather_display() -> str:\r\n\r\n #Formation of URL\r\n base_url = \"http://api.openweathermap.org/data/2.5/weather?\"\r\n api_key = weather_config()\r\n city_name = str(request.args.get(\"user_city\"))\r\n complete_url = base_url + \"appid=\" + api_key + \"&q=\" + city_name\r\n\r\n #Request of JSON Data\r\n response = requests.get(complete_url)\r\n response_data = response.json()\r\n\r\n #Parsing of JSON Data\r\n if response_data[\"cod\"] != \"404\":\r\n weather_data = response_data[\"main\"]\r\n current_temperature = weather_data[\"temp\"]\r\n current_pressure = weather_data[\"pressure\"]\r\n current_humidiy = weather_data[\"humidity\"]\r\n specific_data = response_data[\"weather\"]\r\n weather_description = specific_data[0][\"description\"]\r\n\r\n #Making Data Easier to View\r\n info_list = []\r\n info_list.append(\" Temperature (in kelvin unit) = \" + str(current_temperature))\r\n info_list.append(\" Atmospheric Pressure (in hPa unit) = \" + str(current_pressure))\r\n info_list.append(\" Humidity (in percentage) = \" + str(current_humidiy))\r\n info_list.append(\" Description = \" + str(weather_description))\r\n\r\n #Accessing Template from JSON File\r\n config_file = config_handle()\r\n weather_template = config_file[\"file_paths\"][\"weather_output\"]\r\n return render_template(weather_template, display_info=info_list)",
"def get_weather_info(req):\n\n CITYID = \"2964574\"\n WEATHER = \"http://api.openweathermap.org/data/2.5/forecast\"\n APIKEY = \"89b3e577901486c8ad601fab00edd389\"\n\n r = requests.get(WEATHER, params={\"APPID\": APIKEY, \"id\": CITYID})\n js = json.loads(r.text)\n\n for i in range(len(js['list']) - 1, 0, -1):\n date, time = js['list'][i]['dt_txt'].split(' ')\n time = datetime.datetime.strptime(time, \"%H:%M:%S\")\n req_time = datetime.datetime.strptime(req['time'], \"%H:%M\")\n\n wind_speed = 0.0\n rain = 0.0\n\n if date == req['date'] and time <= req_time:\n wind_speed = js['list'][i]['wind']['speed']\n if js['list'][i]['rain'] != {}:\n rain = js['list'][i]['rain']['3h']/3\n break\n\n return rain, wind_speed",
"def temp_in_3_hours():\n\n # let's select the second result, i.e. weather in 3 hours\n temp_in_3 = temp_feels_like()[1].text\n\n return None",
"def forecast():\n logging.info(\"Received /forecast request\")\n forecast_df = webapp_utils.get_forecast_df()\n current_conditions_df = webapp_utils.get_current_conditions_df()\n\n current_time = datetime.datetime.now(pytz.timezone(config.Config.TARGET_TIMEZONE))\n current_time = current_time.strftime('%Y-%m-%d %H:%M:%S')\n\n if forecast_df is not None and len(forecast_df) > 0 and \\\n current_conditions_df is not None and len(current_conditions_df) > 0:\n\n current_conditions_df = webapp_utils.format_forecast(current_conditions_df)\n forecast_df = webapp_utils.format_forecast(forecast_df)\n table_info = [\n {'title': 'Last Known Conditions',\n 'column_names': current_conditions_df.columns.values,\n 'row_data': list(current_conditions_df.values.tolist())},\n {'title': 'Current Forecast',\n 'column_names': forecast_df.columns.values,\n 'row_data': list(forecast_df.values.tolist())},\n ]\n return render_template('forecast.html', current_time=current_time, table_info=table_info)\n else:\n return render_template('forecast_nodata.html', current_time=current_time)",
"def generate_url(self,time):\n def _gen_url(yymmdd,yyyymm,hours):\n #return self.baseurl%(self.resolution,\\\n # yyyymm,yymmdd,self.resolution,\\\n # yymmdd,hours)\n return self.baseurl%(yymmdd)\n\n\n yymmdd = datetime.strftime(time,'%Y%m%d')\n basetime = datetime.strptime(yymmdd,'%Y%m%d')\n\n # Generate the string\n yyyymm = datetime.strftime(time,'%Y%m')\n hours = (time-basetime).total_seconds()/3600\n\n url = _gen_url(yymmdd,yyyymm,hours)\n\n # Check if the url exists\n if not basetime == self.basetime:\n print('Checking if url exists...\\n\\t%s'%url)\n try:\n # Update to a new data\n #f = urllib2.urlopen('%s.html'%url)\n nc = Dataset(url)\n self.basetime = basetime\n print('yes')\n nc.close()\n return url\n except:\n print('File does not exist - we are in the forecast\\\n stage...(%s)'%(yymmdd))\n # Generate a string from the old basetime\n yymmdd = datetime.strftime(self.basetime,'%Y%m%d')\n yyyymm = datetime.strftime(self.basetime,'%Y%m')\n hours = (time-self.basetime).total_seconds()/3600\n url = _gen_url(yymmdd,yyyymm,hours)\n return url",
"def weather_scraper(year, month, day):\n date = year + month + day\n\n urlstart = 'http://api.wunderground.com/api/37d281e3f1931e1e/history_'\n urlend = '/q/Ireland/Dublin.json'\n url = urlstart + str(date) + urlend\n data = requests.get(url).json()\n\n for i in data['history']['observations']:\n if 'METAR' in i['metar']:\n datetime = year + \"-\" + month + \"-\" + day + \" \" + \\\n i['date']['hour'] + ':' + i['date']['min'] + ':00'\n summary = i['conds']\n temp = str(math.floor(float(i[\"tempm\"])))\n rain = i['rain']\n wind_speed = str(math.floor(float(i['wspdm'])))\n # Store relevant informaion in a specific format in an array\n weather_array = [datetime, summary, temp, rain, wind_speed]\n #Write each line to a csv file\n write_to.writerow(weather_array)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Getter for list of lesson logs for the student
|
def lesson_logs(self):
all_lessons = models.storage.all(LessonLog)
for lesson in all_lessons.values():
if lesson.student_id == self.id:
self.lesson_logs.append(lesson)
return self.lesson_logs
|
[
"def list_logs_by_student(student_id):\n\n student = crud.get_student_by_id(student_id)\n student_logs = student.logs\n\n if \"teacher_id\" in session:\n teacher = crud.get_teacher_by_id(session[\"teacher_id\"]) \n else:\n teacher = None\n\n return render_template('charts.html', student= student, teacher=teacher, student_logs=student_logs)",
"def get_lessons(self):\n lessons = []\n for item in self.student.student_class.lessons.all():\n lesson = (str(item.period),\n str(item.unit),\n str(item.venue),\n item.get_type_display(),\n str(item.lecturer) or \"\")\n\n self.lessons.update(\n {\"\\n\".join([lesson[0], lesson[1]]): item.pk}\n )\n\n lessons.append(lesson)\n\n return lessons",
"def get_logging_list(self):\n return self.__logging_list",
"def view_student_logs():\n\n student = crud.get_student_by_id(session['student_id'])\n teacher = crud.get_teacher_by_id(session[\"teacher_id\"])\n\n return render_template('charts.html', student=student, teacher=teacher)",
"def getLog(self, remove=True):\n res = self.logs\n if remove:\n self.logs = []\n return res",
"def logs(request, wid=None):\n\tfilteredLogs = []\n\tqueryRes = WatcherLogs.objects.filter(wid=wid)\n\tfor queryLog in queryRes:\n\t\tfilteredLogs.append({\"log\": queryLog.log})\n\treturn render(request, \"logs.html\", {\"filteredLogs\": filteredLogs, \"lw\": \"Logwatch \" + str(wid)})",
"def get_logs_by_provider(self, resource_id):\n return # osid.logging.LogList",
"def get_enrolled_students(self) -> List[str]:\n return self.enrolled_students",
"def fetch_student_records(self) -> List[str]:\n return [self.cwid, self.name, self.major, sorted(self.courses.keys())]",
"def _get_lessons(self, context):\n return Lesson.objects.select_related(\n 'room', 'room__building',\n 'group', 'group__course__discipline').filter(\n date__gte=context['academic_term'].start_date,\n date__lt=context['academic_term'].exams_start_date,\n room__pk=context['room_id'],\n ).order_by('date')",
"def do_list_clan_war_log(self, _):\n clan_war_log: List[WarLog] = self.cr_client.list_clan_war_log()\n for war_log in clan_war_log:\n print(f'Season={war_log.season_id} Date={war_log.created_date}')\n print(f'Collection day: ')\n for war_participant in war_log.participants:\n print(f' {war_participant}')\n print(f'War day: ')\n for war_standing in war_log.standings:\n print(f' {war_standing}')\n print('')",
"async def get_member_mod_logs(bot, member, guild) -> list:\n logs = []\n cursor = bot.mod.find({\"member\": member.id, \"guild_id\": guild.id})\n for document in await cursor.to_list(length=10000):\n logs.append(document)\n\n return logs",
"def get_logs(self, sn, return_type=\"json\", **kwargs):\n assert(return_type in (\"json\", \"dataframe\")), \"Bad return_type\"\n\n data = self.fetch_data(\"log/{}/\".format(sn), **kwargs)\n if return_type == \"dataframe\":\n data = list_to_dataframe(data)\n return data",
"def get_full_log_list(self):\n logs = self.get_exposed_log_list()\n try:\n logs.extend(self.get_unexposed_user_log_names())\n except SkipTest:\n pass\n try:\n logs.extend(self.get_unexposed_sys_log_names())\n except SkipTest:\n pass\n\n return logs",
"def student_list() -> List[str]:\n path = os.path.join(BASE_PATH, 'hta/groups/students.txt')\n return line_read(path)",
"def lith_logs(wells, session=None, **kwargs):\n if session is None:\n session = get_global_session()\n return session.bulk_lith_logs(wells, **kwargs)",
"def allLogs(self):\n\t\tres = \"\"\n\t\tcases = self.logs.keys()\n\t\tif \"by_date\" in cases: cases.remove(\"by_date\")\n\t\tfor case in cases:\n\t\t\tres += self.prettify.case(case, self.logs.get(case)) + \"\\n\\n\"\n\t\treturn res",
"def log(self):\n return self.res_list[-1]['log']",
"def get_student_group_history(self, classname):\n if (\"classname\" == \"Section\"):\n if not self.section_advisor_history:\n self.section_advisor_history = History.create(\n ownerref = self, attributename=\"section_advisor\",\n isreference=True, multiactive=True)\n self.put()\n return(self.section_advisor_history)\n else:\n if not self.class_session_teacher_history:\n self.class_session_teacher_history = History.create(\n ownerref = self, attributename=\"class_session_teacher\",\n isreference=True, multiactive=True)\n self.put()\n return(self.class_session_teacher_history)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Decrements the bet from the player's bank.
|
def bet(self, bet):
self.bank -= bet
|
[
"def lose_bet(self, bet):\n self.total -= bet",
"def pay_bet(self):\n self.wallet -= self.bet\n self.bet = 0",
"def win_no_blackjack(self, bet):\n self.bank += bet * 2",
"def debit(self, amount):\n if self._is_asset_or_expenses:\n self.balance += amount\n else:\n self.balance -= amount",
"def bet_money(self, amount):\n self.money -= amount # money 100->80,\n self.bet += amount # bet 0-> 20",
"def reset_bet(self):\n self.bet = 0",
"def withdraw_money(self, amount):\n self.balance -= amount",
"def win_blackjack(self, bet):\n self.bank += bet * 2.5",
"def decrement_balance(self, address, amount):\n self._wallet_pool.sub_balance(address, amount)\n self._save_wallet_pool_data()",
"def devalue_asset(self, asset, valuationLost: float) -> None:\n account = self.asset_accounts.get(asset.ctype)\n if not account:\n raise Exception(\"Asset account not found for ${asset.ctype}.\")\n account.credit(valuationLost)\n\n # TODO: perform a check here that the Asset account balances match the valuation of the assets. (?)",
"def withdraw(self, account, amount):\n # Take the amount of money our of the account\n self.accounts[account].balance -= amount\n # Return the amount of money we withdrew\n return amount",
"def debits(self, debits):\n\n self._debits = debits",
"def subtract(self, new_val):\n self.balance -= int(new_val)\n\n return self.balance",
"def decrement_stock(self, q):\n self.__stock -= q",
"def decrement(name, value=1, sample_rate=1, tags=None):",
"def abandon_bet(self, abandoner):\n self.bets = [bet for bet in self.bets if bet.user != abandoner]",
"def setBet(self, bet):\n if self.balance == 0:\n #if balance is 0 can't place a bet\n print(\"You have no money\")\n return False\n if bet > self.balance:\n #Cannot bet more then you have\n print(\"Your bet is more then your balance\")\n return False\n if bet < 0:\n #Cannot bet negatively\n #Betting negatively comes with a 5 dollar penalty\n self.balance += bet\n print(\"Because you entered a negative amount you have been penalized 5 dollars\")\n return False\n self.bet = bet\n self.__deduct() #deduct bet from balance\n print(\"You have bet {}, balance is now {}\".format(self.bet, self.balance))\n return True",
"def dec (self, by = 1):\n assert by > 0\n self.counter -= by\n if self.counter <= 0:\n # Don't leave self.counter < 0, that will screw things up in\n # future calls.\n self.counter = 0\n # Transitioning from nonzero to 0 means wait() need no longer wait.\n self.event.send()",
"def remove_player(self):\n if self.num_player > 0:\n self.num_player -= 1\n self.available_place += 1\n self.update_full_status()\n self.save()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Increments nonblackjack winnings to the player's bank.
|
def win_no_blackjack(self, bet):
self.bank += bet * 2
|
[
"def win_blackjack(self, bet):\n self.bank += bet * 2.5",
"def __payoutSideBet(self):\n dealer_card = self.dealer.getVisibleCard()\n for player in self.players:\n if player.balance > 0:\n player_first_card = player.hands[0][0]\n player_second_card = player.hands[0][1]\n suited = False\n if player_first_card.suit == player_second_card.suit == dealer_card.suit:\n suited = True\n\n if player_first_card.value == player_second_card.value == dealer_card.value == 7:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 777\")\n player.balance += player.sideBet * 200\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 777\")\n player.balance += player.sideBet * 50\n\n elif player_first_card in [6, 7, 8] and player_second_card in [6, 7, 8] and dealer_card in [6, 7, 8] \\\n and (player_first_card.value + player_second_card.value + dealer_card.value) == 21:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 678\")\n player.balance += player.sideBet * 100\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 678\")\n player.balance += player.sideBet * 30\n elif (player_first_card.value + player_second_card.value + dealer_card.value) == 21:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 21\")\n player.balance += player.sideBet * 15\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 21\")\n player.balance += player.sideBet * 3\n elif (player_first_card.value + player_second_card.value + dealer_card.value) in [19, 20]:\n player.sideBetWinCount += 1\n print(\"Player got crap\")\n player.balance += player.sideBet * 2",
"def update_credits(self, winner, is_blackjack):\n if winner == 1 and is_blackjack:\n self.player.credits = self.player.credits + (2.5 * self.player.bet)\n if winner == 1 and (not is_blackjack):\n self.player.credits = self.player.credits + (2 * self.player.bet)\n if winner == 0:\n self.player.credits = self.player.credits + self.player.bet",
"def __IntermediateCheck(self):\n #TODO: payout the side bet of lucky lucky\n #self.__payoutSideBet()\n\n # payout black jacks\n for player in self.players:\n if player.balance > 0:\n hand = player.hands[0]\n if helpers.isBlackJack(hand):\n player.balance += player.bet * 1.5\n print(\"Player \" + str(player.id) + \" got a black jack.\")\n print(\"Balance: \" + str(player.balance))",
"def __addWinnings(self, multiplier):\n self.balance += self.bet * multiplier",
"def reset_wins(self):\n self.wins = 0",
"def runBingoGame(numPlayers,gameNumber):\n\tallPlayers = []\n\tfor playerNum in range(numPlayers):\n\t\tnewPlayer = bingoCard(playerNum)\n\t\tallPlayers.append(newPlayer)\n\t\tnewPlayer.seeCard()\n\t\n\tcallList = numCalls()\n\tprint 'call order: ' + str(callList) + '\\n'\n\n\t# turn = 1\n\t# winCallHistory = []\n\t# for number in callList:\n\t# \tfor player in allPlayers:\n\t# \t\tif player.stopPlaying():\n\t# \t\t\tif player.hasNumber(number):\n\t# \t\t\t\tplayer.markNumber(number)\n\t\t\t\t\n\t# \t\t\tif number not in winCallHistory:\n\t# \t\t\t\twinCallHistory.append(number)\n\t# \t\t\t# turn += 1\n\t# \t\telse:\n\t# \t\t\twinner = player\n\t# \t\t\tbreak\n\n\t\t\t# turn += 1\n\t\t\t# callHistory.append(number)\n\n\twinCallHistory = []\n\twinner = allPlayers[0]\n\tfor number in callList:\n\t\tif winner.stopPlaying():\n\t\t\tbreak\n\n\t\tfor player in allPlayers:\n\t\t\tif player.stopPlaying():\n\t\t\t\twinner = player\n\t\t\t\tbreak\n\n\t\t\telse:\n\t\t\t\tif player.hasNumber(number):\n\t\t\t\t\tplayer.markNumber(number)\n\t\t\t\t\t\n\t\tif number not in winCallHistory:\n\t\t\t\twinCallHistory.append(number)\n\n\tif winner == allPlayers[0]:\n\t\tnumTurnsToWin = len(winCallHistory)\n\telse:\n\t\tnumTurnsToWin = len(winCallHistory)-1\n\n\tif gameNumber/50 == 0:\n\t\tprint 'game number: ' + str(gameNumber) + '/1000\\n'\n\t\tprint 'turn completed: ' + str(numTurnsToWin)\n\t\tprint 'calls until win: ' + str(winCallHistory) + '\\n'\n\t\tprint 'player ' + str(winner.getCardName()) + ' won with ' + str(winner.wonWithCondition()) + '\\n'\n\t\t# print 'bingo! \\n'\n\t\tprint 'bingo! \\ncard has marked numbers: ' + str(winner.getAllMarkedNums())\n\t\tprint 'card has marked positions: ' + str(winner.getAllMarkedPos()) + '\\n'\n\n\treturn numTurnsToWin",
"def dealer_natural(players):\n\n for p in players:\n if p.dealer and p.points >= 10:\n print('Dealer checks for blackjack ...')\n ranks = list()\n for card in p.hand:\n ranks.append(card['card'].rank)\n if count_points(ranks) == 21:\n print('Dealer has blackjack! ')\n for card in p.hand:\n card['folding'] = False\n return True\n else:\n print('Dealer does not have blackjack')\n return False",
"def dealer_draw(self, blackjack_deck):\n while self.dealer.get_score() < 17:\n self.dealer.hit(blackjack_deck.draw_card())",
"def get_winner(self):\n if self.check_for_bust(self.dealer):\n print('Dealer bust')\n return 1\n if self.dealer.hand.total >= 17 and self.dealer.hand.total > self.player.hand.total:\n print('Dealer wins')\n return -1\n if self.dealer.hand.total < self.player.hand.total:\n print(self.player.name + (' wins!'))\n return 1\n if self.dealer.hand.total == self.player.hand.total:\n print('Push!')\n return 0",
"def check_for_blackjack(self):\n winner = None\n blackjack = False\n if self.player.hand.total == 21 and len(self.player.hand.cards) == 2:\n if self.dealer.hand.total == 21 and len(self.dealer.hand.cards) == 2:\n print('Push!')\n winner = 0\n else:\n print(self.player.name + ' blackjack!')\n winner = 1\n blackjack = True\n if self.dealer.hand.total == 21 and len(self.dealer.hand.cards) == 2:\n self.dealer.show_hand(True)\n print(\"Dealer blackjack!\")\n winner = -1\n return (winner, blackjack)",
"def continue_round(self, player_cards, dealer_cards, bet):\n self.deck = self.cards[:]\n random.shuffle(self.deck)\n self.bet = bet\n self.player_cards = player_cards[:] \n self.dealer_cards = dealer_cards[:]\n while len(self.dealer_cards) < 2:\n self.deal(self.dealer_cards, self.dealer.name)\n return self.play_round()",
"def income(player: Player) -> None:\n player.coin += 1",
"def draw(self,deck):\r\n self.cards.append(deck.cards.pop())\r\n if (self.cards[-1].value == 'A'):\r\n self.aces += 1\r\n self.score += BlackJackHand.bj_scores[self.cards[-1].value]\r\n #Apparently the below isn't real, despite playing this way my whole life\r\n # #if you draw a blackjack, score is 21 automatically\r\n # if (self.cards[-1].suit in ('S','C') and self.cards[-1].value == 'J'):\r\n # self.score = 21\r\n #if you bust but have an ace, convert ace to 1\r\n if (self.score > 21 and (self.aces > self.aces_converted)):\r\n self.score += -10\r\n self.aces_converted += 1\r\n #if you draw to 7 cards without busting you win\r\n if (len(self.cards) >= 7 and self.score < 21):\r\n self.score = 21\r\n if (self.score == 21):\r\n self.blackjack = True\r\n if (self.score > 21):\r\n self.bust = True\r\n self.card_list = self.list_cards()",
"async def show_balances(self, ctx, pkr_players):\r\n await ctx.send(\"CURRENT BALANCES:\")\r\n for player in pkr_players:\r\n await ctx.send(player.get_username() + \": \" +\r\n str(player.get_game_balance()) + \" <:chips:865450470671646760>\")",
"def determine_outcome(player_total, dealer_total):\r\n\r\n BLACKJACK = 21\r\n\r\n #if players total is more than the dealers or the dealer busts you win, else you lose including if you bust\r\n if player_total > dealer_total or dealer_total > BLACKJACK:\r\n print('YOU WIN!')\r\n print()\r\n else:\r\n print('YOU LOSE!')\r\n print()",
"def game_round():\r\n win_condition = model.player_scores[player_tracker[0]] >= 13\r\n view.print_bars()\r\n view.player_start(player_tracker)\r\n current_shotguns = 0\r\n round_score = 0\r\n round_loss_condition = False\r\n view.player_score(player_tracker, model.player_scores)\r\n\r\n # loops until current player banks or loses\r\n while win_condition is False and round_loss_condition is False:\r\n # turn options\r\n view.show_current_dice(model.dice_in_hand)\r\n view.show_turn_options()\r\n turn_input = view.turn_choice_input()\r\n\r\n # evaluate turn input\r\n if turn_input not in ('1', '2'):\r\n view.bad_input()\r\n # roll dice option\r\n elif turn_input == '1':\r\n # draw and roll dice\r\n view.print_bars()\r\n model.pull_dice()\r\n view.show_current_dice(model.dice_in_hand)\r\n roll_result = model.choice_roll_dice()\r\n view.show_roll_result(roll_result)\r\n view.print_bars()\r\n\r\n # store results in more usable format and print\r\n current_shotguns += model.count_shotguns(roll_result)\r\n round_score += model.count_brains(roll_result)\r\n model.remove_brain_shotgun_post_roll(roll_result)\r\n view.round_so_far(current_shotguns, round_score, model.dice_in_hand)\r\n view.print_bars()\r\n\r\n # evaluate if player has met round loss condition\r\n round_loss_condition = model.check_loss(current_shotguns)\r\n if round_loss_condition:\r\n view.three_shot_message()\r\n # bank score option\r\n else:\r\n # total player's score, evaluate win condition, break loop\r\n model.choice_bank_score(player_tracker[0], round_score)\r\n view.bank_message()\r\n view.player_score(player_tracker, model.player_scores)\r\n win_condition = model.player_scores[player_tracker[0]] >= 13\r\n break\r\n # return whether current player has met win condition after most recent round\r\n return win_condition",
"def show_blackjack_msg(player, winnings):\n print(\"BLACKJACK! {} wins {} dollars\".format(\n player.name, winnings))",
"def test_player_give_cards_after_win(players):\n player1 = players[0]\n winning_cards = [Card('H', '7'), Card('S', '6')]\n player1.give_cards(winning_cards)\n assert len(player1.hand.cards) == 7"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Increments blackjack winnings to the player's bank.
|
def win_blackjack(self, bet):
self.bank += bet * 2.5
|
[
"def win_no_blackjack(self, bet):\n self.bank += bet * 2",
"def update_credits(self, winner, is_blackjack):\n if winner == 1 and is_blackjack:\n self.player.credits = self.player.credits + (2.5 * self.player.bet)\n if winner == 1 and (not is_blackjack):\n self.player.credits = self.player.credits + (2 * self.player.bet)\n if winner == 0:\n self.player.credits = self.player.credits + self.player.bet",
"def __IntermediateCheck(self):\n #TODO: payout the side bet of lucky lucky\n #self.__payoutSideBet()\n\n # payout black jacks\n for player in self.players:\n if player.balance > 0:\n hand = player.hands[0]\n if helpers.isBlackJack(hand):\n player.balance += player.bet * 1.5\n print(\"Player \" + str(player.id) + \" got a black jack.\")\n print(\"Balance: \" + str(player.balance))",
"def turn(self):\n self.score += self.play() ## increment the player's score\n self.add_player_status() ## mark the player's status\n self.add_player_score() ## mark the player's score\n self.add_turn() ## increment the player's turn count",
"def __addWinnings(self, multiplier):\n self.balance += self.bet * multiplier",
"def runBingoGame(numPlayers,gameNumber):\n\tallPlayers = []\n\tfor playerNum in range(numPlayers):\n\t\tnewPlayer = bingoCard(playerNum)\n\t\tallPlayers.append(newPlayer)\n\t\tnewPlayer.seeCard()\n\t\n\tcallList = numCalls()\n\tprint 'call order: ' + str(callList) + '\\n'\n\n\t# turn = 1\n\t# winCallHistory = []\n\t# for number in callList:\n\t# \tfor player in allPlayers:\n\t# \t\tif player.stopPlaying():\n\t# \t\t\tif player.hasNumber(number):\n\t# \t\t\t\tplayer.markNumber(number)\n\t\t\t\t\n\t# \t\t\tif number not in winCallHistory:\n\t# \t\t\t\twinCallHistory.append(number)\n\t# \t\t\t# turn += 1\n\t# \t\telse:\n\t# \t\t\twinner = player\n\t# \t\t\tbreak\n\n\t\t\t# turn += 1\n\t\t\t# callHistory.append(number)\n\n\twinCallHistory = []\n\twinner = allPlayers[0]\n\tfor number in callList:\n\t\tif winner.stopPlaying():\n\t\t\tbreak\n\n\t\tfor player in allPlayers:\n\t\t\tif player.stopPlaying():\n\t\t\t\twinner = player\n\t\t\t\tbreak\n\n\t\t\telse:\n\t\t\t\tif player.hasNumber(number):\n\t\t\t\t\tplayer.markNumber(number)\n\t\t\t\t\t\n\t\tif number not in winCallHistory:\n\t\t\t\twinCallHistory.append(number)\n\n\tif winner == allPlayers[0]:\n\t\tnumTurnsToWin = len(winCallHistory)\n\telse:\n\t\tnumTurnsToWin = len(winCallHistory)-1\n\n\tif gameNumber/50 == 0:\n\t\tprint 'game number: ' + str(gameNumber) + '/1000\\n'\n\t\tprint 'turn completed: ' + str(numTurnsToWin)\n\t\tprint 'calls until win: ' + str(winCallHistory) + '\\n'\n\t\tprint 'player ' + str(winner.getCardName()) + ' won with ' + str(winner.wonWithCondition()) + '\\n'\n\t\t# print 'bingo! \\n'\n\t\tprint 'bingo! \\ncard has marked numbers: ' + str(winner.getAllMarkedNums())\n\t\tprint 'card has marked positions: ' + str(winner.getAllMarkedPos()) + '\\n'\n\n\treturn numTurnsToWin",
"def dealer_draw(self, blackjack_deck):\n while self.dealer.get_score() < 17:\n self.dealer.hit(blackjack_deck.draw_card())",
"def __payoutSideBet(self):\n dealer_card = self.dealer.getVisibleCard()\n for player in self.players:\n if player.balance > 0:\n player_first_card = player.hands[0][0]\n player_second_card = player.hands[0][1]\n suited = False\n if player_first_card.suit == player_second_card.suit == dealer_card.suit:\n suited = True\n\n if player_first_card.value == player_second_card.value == dealer_card.value == 7:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 777\")\n player.balance += player.sideBet * 200\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 777\")\n player.balance += player.sideBet * 50\n\n elif player_first_card in [6, 7, 8] and player_second_card in [6, 7, 8] and dealer_card in [6, 7, 8] \\\n and (player_first_card.value + player_second_card.value + dealer_card.value) == 21:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 678\")\n player.balance += player.sideBet * 100\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 678\")\n player.balance += player.sideBet * 30\n elif (player_first_card.value + player_second_card.value + dealer_card.value) == 21:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 21\")\n player.balance += player.sideBet * 15\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 21\")\n player.balance += player.sideBet * 3\n elif (player_first_card.value + player_second_card.value + dealer_card.value) in [19, 20]:\n player.sideBetWinCount += 1\n print(\"Player got crap\")\n player.balance += player.sideBet * 2",
"def draw(self,deck):\r\n self.cards.append(deck.cards.pop())\r\n if (self.cards[-1].value == 'A'):\r\n self.aces += 1\r\n self.score += BlackJackHand.bj_scores[self.cards[-1].value]\r\n #Apparently the below isn't real, despite playing this way my whole life\r\n # #if you draw a blackjack, score is 21 automatically\r\n # if (self.cards[-1].suit in ('S','C') and self.cards[-1].value == 'J'):\r\n # self.score = 21\r\n #if you bust but have an ace, convert ace to 1\r\n if (self.score > 21 and (self.aces > self.aces_converted)):\r\n self.score += -10\r\n self.aces_converted += 1\r\n #if you draw to 7 cards without busting you win\r\n if (len(self.cards) >= 7 and self.score < 21):\r\n self.score = 21\r\n if (self.score == 21):\r\n self.blackjack = True\r\n if (self.score > 21):\r\n self.bust = True\r\n self.card_list = self.list_cards()",
"def update_winner(cache):\n SESSION.query(Character).filter(Character.id == cache.id).update(\n {Character.wins:Character.wins + 1}, synchronize_session=False)\n SESSION.commit()",
"def check_for_blackjack(self):\n winner = None\n blackjack = False\n if self.player.hand.total == 21 and len(self.player.hand.cards) == 2:\n if self.dealer.hand.total == 21 and len(self.dealer.hand.cards) == 2:\n print('Push!')\n winner = 0\n else:\n print(self.player.name + ' blackjack!')\n winner = 1\n blackjack = True\n if self.dealer.hand.total == 21 and len(self.dealer.hand.cards) == 2:\n self.dealer.show_hand(True)\n print(\"Dealer blackjack!\")\n winner = -1\n return (winner, blackjack)",
"def show_blackjack_msg(player, winnings):\n print(\"BLACKJACK! {} wins {} dollars\".format(\n player.name, winnings))",
"def test_player_give_cards_after_win(players):\n player1 = players[0]\n winning_cards = [Card('H', '7'), Card('S', '6')]\n player1.give_cards(winning_cards)\n assert len(player1.hand.cards) == 7",
"def continue_round(self, player_cards, dealer_cards, bet):\n self.deck = self.cards[:]\n random.shuffle(self.deck)\n self.bet = bet\n self.player_cards = player_cards[:] \n self.dealer_cards = dealer_cards[:]\n while len(self.dealer_cards) < 2:\n self.deal(self.dealer_cards, self.dealer.name)\n return self.play_round()",
"async def show_balances(self, ctx, pkr_players):\r\n await ctx.send(\"CURRENT BALANCES:\")\r\n for player in pkr_players:\r\n await ctx.send(player.get_username() + \": \" +\r\n str(player.get_game_balance()) + \" <:chips:865450470671646760>\")",
"def deal_cards_to_player():\r\n\r\n BLACKJACK = 20\r\n card1 = deal_card()\r\n card2 = deal_card()\r\n total = get_card_value(card1) + get_card_value(card2)\r\n print('Player drew ' + card1 + ' and ' + card2 + '.')\r\n print(\"Player's total is\", str(total) + '.')\r\n print()\r\n\r\n #while the total is less than 21 give the player the choice to hit or stay and add the card value if its a hit\r\n while total <= BLACKJACK:\r\n player_choice = input('Hit (h) or Stay (s)? ')\r\n print()\r\n if player_choice == 'h':\r\n hit_card = deal_card()\r\n total += get_card_value(hit_card)\r\n print('Player drew', str(hit_card) + '.')\r\n print(\"Player's total is\", str(total) + '.')\r\n print()\r\n elif player_choice == 's':\r\n break\r\n\r\n return int(total)",
"def draw(self):\n self.money += self.bet\n self.bet = 0",
"def income(player: Player) -> None:\n player.coin += 1",
"def gameRoll(self):\n print(\"---SHAKE SHAKE AND ROLL---\\n\")\n self.cup.roll()\n print(\"Goal is: {}\".format(self.balance)) #added after video\n self.__payout(self.cup.getSum())\n self.printBalance()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return's the player's bet when the result is a push.
|
def push(self, bet):
self.bank += bet
|
[
"def show_push_msg(player):\n print(\"Push! {} and the dealer have the same hand.\".format(player.name))",
"async def bet(ctx, bet_str, bet_value):\n\n uid = ctx.author.id\n\n if not is_user(uid):\n await error(ctx, \"You are not registered and cannot bet.\")\n return\n\n if not is_game_on():\n await error(ctx, \"No game is currently under progress.\")\n return\n\n if not is_betting_on():\n await error(ctx, \"Betting phase is over.\")\n return\n\n \n current_cash = db.db_read(uid, \"cf_score\") + db.db_read(uid, \"kirari_score\")\n\n try:\n if bet_value == \"all\":\n bet_value = current_cash\n else:\n bet_value = int(bet_value)\n except Exception:\n await error(ctx, \"The value of your bet must be integer / `all`.\")\n return\n\n\n if bet_value > current_cash:\n await error(ctx, \"You cannot bet more than what you have!\")\n return\n \n if bet_value < 0:\n await error(ctx, \"No, please no negative bets. :(\")\n return\n\n bet, flag = await parse_bet(ctx, bet_str)\n\n if flag < 0:\n return\n\n db.db_write(uid, \"bet\", bet)\n db.db_write(uid, \"bet_value\", bet_value)\n\n member_name = db.db_read(uid, \"name\")\n\n iabs = lambda x: x if x >= 0 else -x\n\n positive_potential = iabs(get_delta(True, len(bet), bet_value))\n negative_potential = iabs(get_delta(False, len(bet), bet_value))\n\n response = \"\"\"\n Bet by **%s**:\\nYou have bet on the following %d position(s): ```%s```The value of your current bet is **%s** (+%d, -%d).\n \"\"\" % (member_name, len(bet), \"<you have bet on nothing>\"\n if len(bet) == 0 else \" \".join(map(str, sorted(list(bet)))),\n coinfmt(bet_value), positive_potential, negative_potential)\n\n await ctx.send(response)",
"def betting():\n game.player1.wager = get_input(game.player1, game.player2)\n print(\"Player 1:\", game.player1.wager)\n print(\"Player 2:\", game.player2.wager)\n\n game.update_tablepot()\n\n if game.player1.folded:\n print(\"player1 folded\")\n return False\n\n game.player2.wager = get_input(game.player2, game.player1)\n print(\"Player 1:\", game.player1.wager)\n print(\"Player 2:\", game.player2.wager)\n\n game.update_tablepot()\n\n if game.player2.folded:\n print(\"player2 folded\")\n return False\n\n if game.player1.wager == game.player2.wager:\n print(\"moving on\")\n return True\n else:\n print(\"you're stuck in betting\")\n return betting()",
"def place_bet(self):\n while True:\n bet = int(input(\"Please enter how much you wish to bet: \"))\n if bet <= self.money:\n self.bet += bet\n self.money -= bet\n return bet \n else:\n print(\"The bet you entered is more than the amount you have\")\n sleep(0.5)",
"def win_blackjack(self, bet):\n self.bank += bet * 2.5",
"def bet(r, *a, **kw):\n player_id = r.GET.get(\"player_id\")\n player = player_from_player_id(player_id)\n game_id = r.GET.get(\"game_id\")\n game = game_from_game_id(game_id)\n data = {}\n if is_players_turn(game, player):\n if player:\n if game:\n bet_size = r.GET.get(\"bet_size\")\n if bet_size:\n game.update_pot(bet_size)\n data[\"bet\"] = True\n data[\"new_pot_size\"] = game.current_pot\n data[\"bet_size\"] = bet_size\n player.update_stack(amount=bet_size)\n data[\"new_stack_size\"] = player.stack\n game.update_players_turn()\n else:\n data[\"no_bet_size\"] = True\n else:\n data[\"no_game_id\"] = True\n else:\n data[\"no_player_id\"] = True\n else:\n data[\"not_your_turn\"] = True\n return JsonResponse(data)",
"def place_bet(self, player, bet):\n # bet only called on first hand of player\n player.bet = int(bet)\n\n if player.bet == 0:\n player.is_active = False\n return True\n elif player.bet == -1:\n return False\n else:\n return True",
"def get_result(self, player):\n\t\traise NotImplementedError",
"def get_result(self, player):\n\t\t# return 0 if (self.knockedOut[player]) else 1\n\t\treturn self.current_scores[player % 2] / 162.",
"def get_winner(self):\n if self.check_for_bust(self.dealer):\n print('Dealer bust')\n return 1\n if self.dealer.hand.total >= 17 and self.dealer.hand.total > self.player.hand.total:\n print('Dealer wins')\n return -1\n if self.dealer.hand.total < self.player.hand.total:\n print(self.player.name + (' wins!'))\n return 1\n if self.dealer.hand.total == self.player.hand.total:\n print('Push!')\n return 0",
"def win_no_blackjack(self, bet):\n self.bank += bet * 2",
"def btn_pushed(self,channel):\n #time.sleep(0.4)\n #if GPIO.input(channel):\n if channel == pin_buttonA:\n print \"pushed 'A' button\",channel\n pygame.event.post(pygame.event.Event(self.USEREVENT_BTNA))\n elif channel == pin_buttonB:\n print \"pushed 'B' button\",channel\n pygame.event.post(pygame.event.Event(self.USEREVENT_BTNB))\n else:\n print \"unknown button pushed\"",
"def check_for_blackjack(self):\n winner = None\n blackjack = False\n if self.player.hand.total == 21 and len(self.player.hand.cards) == 2:\n if self.dealer.hand.total == 21 and len(self.dealer.hand.cards) == 2:\n print('Push!')\n winner = 0\n else:\n print(self.player.name + ' blackjack!')\n winner = 1\n blackjack = True\n if self.dealer.hand.total == 21 and len(self.dealer.hand.cards) == 2:\n self.dealer.show_hand(True)\n print(\"Dealer blackjack!\")\n winner = -1\n return (winner, blackjack)",
"def check_response(self, player):\n other_players = [x for x in self.existing_players.values() if x is not player]\n response = player.move(self.outstanding_bid)\n player.action_required = 0\n if response[\"action\"] == \"raise\":\n self.outstanding_bid = response[\"amount\"]\n self.pot += response[\"pot_increment\"]\n # Set all other players to have a turn.\n for i in range(len(other_players)):\n other_players[i].action_required = 1\n elif response[\"action\"] == \"call\":\n # Update current bid to match outstanding bid\n player.current_bid = self.outstanding_bid\n self.pot += response[\"pot_increment\"]\n elif response[\"action\"] == \"fold\":\n self.existing_players.pop(player.player_number)\n\n # After deleting player, check if only one player left behind\n if len(self.existing_players) == 1:\n for player_number, _ in self.existing_players.items():\n print(\"Player\", player_number, \"is the winner!\")\n input(\"Press enter to quit the game.\")\n quit()",
"def get_result(self, playerjm):\n raise NotImplementedError",
"async def ask_bet(self, ctx):\r\n await ctx.send(\"What is the big blind (minimum bet) amount?\")",
"def gain_money():\n if not current_player:\n abort(400)\n\n args = request.get_json()\n amount = args.get('amount', None)\n if amount is None:\n abort(400)\n\n current_player.money += amount\n db.session.commit()\n return player_state()",
"def __payoutSideBet(self):\n dealer_card = self.dealer.getVisibleCard()\n for player in self.players:\n if player.balance > 0:\n player_first_card = player.hands[0][0]\n player_second_card = player.hands[0][1]\n suited = False\n if player_first_card.suit == player_second_card.suit == dealer_card.suit:\n suited = True\n\n if player_first_card.value == player_second_card.value == dealer_card.value == 7:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 777\")\n player.balance += player.sideBet * 200\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 777\")\n player.balance += player.sideBet * 50\n\n elif player_first_card in [6, 7, 8] and player_second_card in [6, 7, 8] and dealer_card in [6, 7, 8] \\\n and (player_first_card.value + player_second_card.value + dealer_card.value) == 21:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 678\")\n player.balance += player.sideBet * 100\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 678\")\n player.balance += player.sideBet * 30\n elif (player_first_card.value + player_second_card.value + dealer_card.value) == 21:\n if suited:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got a suited 21\")\n player.balance += player.sideBet * 15\n else:\n player.sideBetWinCount += 1\n print(\"Player \" + str(player.id) + \" got an unsuited 21\")\n player.balance += player.sideBet * 3\n elif (player_first_card.value + player_second_card.value + dealer_card.value) in [19, 20]:\n player.sideBetWinCount += 1\n print(\"Player got crap\")\n player.balance += player.sideBet * 2",
"async def higher(self, ctx):\n if self.isgambling == True:\n await ctx.send('You picked Higher. The 2nd card spins around, and reveals a...')\n time.sleep(random.randint(1,5))\n await ctx.send('{0}!'.format(self.gambleValue))\n time.sleep(2)\n if self.gambleValue > self.gamblePrev:\n self.gamblecount += 1\n if self.gamblecount <= 4:\n await ctx.send('Correct! The chest now has x{0} of the original loot! However, another card has shown up!'.format(self.gamblecount))\n self.gamblePrev = self.gambleValue\n self.gambleValue = random.randint(1,10)\n while self.gambleValue == self.gamblePrev:\n self.gambleValue = random.randint(1,10)\n await ctx.send('The shown card is now a {0}. Do you wish to pick Higher, or Lower?'.format(self.gamblePrev))\n elif self.gamblecount == 5:\n await ctx.send('Correct! The chest now has x{0} of the original loot, and has opened up!'.format(self.gamblecount))\n self.isgambling = False\n self.gamblecount = 1\n if self.floor == 6:\n await ctx.send('You are in the final chamber too! Brilliant way to end it!')\n self.ismap = False\n self.floor = 0\n self.isgambling = False\n self.pdoorchance = 10\n self.gamblechance = 10\n self.lpdoor = False\n self.rpdoor = False\n self.doorroute = [0,0,0,0,0,0]\n await self.bot.change_presence(status=discord.Status.online)\n elif self.floor <= 5:\n await ctx.send('You and your party are now in Chamber {0}. Do you want to pick the door on the left or the right?'.format(self.floor+1))\n pdoorrandom = random.randint(1, 100)\n if pdoorrandom <= self.pdoorchance and self.floor <= 4:\n self.pdoorchance = 10\n if self.doorroute[self.floor] == 0:\n self.lpdoor = True\n await ctx.send('The door on the left begins to glow brightly after you stopped gambling.')\n elif self.doorroute[self.floor] == 1:\n self.rpdoor = True\n await ctx.send('The door on the right begins to glow brightly after you stopped gambling.')\n elif self.gambleValue < self.gamblePrev:\n await ctx.send('Incorrect! The chest now remains forever locked. You might as well move onto the next chamber...')\n self.isgambling = False\n self.gamblecount = 1\n if self.floor == 6:\n await ctx.send('But wait, this is the last chamber. Sad way to end it...')\n self.ismap = False\n self.floor = 0\n self.isgambling = False\n self.pdoorchance = 10\n self.gamblechance = 10\n self.lpdoor = False\n self.rpdoor = False\n self.doorroute = [0,0,0,0,0,0]\n await self.bot.change_presence(status=discord.Status.online)\n elif self.floor <= 5: \n await ctx.send('You and your party are now in Chamber {0}. Do you want to pick the door on the left or the right?'.format(self.floor+1))\n pdoorrandom = random.randint(1, 100)\n if pdoorrandom <= self.pdoorchance and self.floor <= 4:\n self.pdoorchance = 10\n if self.doorroute[self.floor] == 0:\n self.lpdoor = True\n await ctx.send('The door on the left begins to glow brightly after you stopped gambling.')\n elif self.doorroute[self.floor] == 1:\n self.rpdoor = True\n await ctx.send('The door on the right begins to glow brightly after you stopped gambling.')\n elif self.isgambling == False:\n await ctx.send('There is no chest that requires gambling...')"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
1. Start at parent 2a. If node.cargo == value, return cargo. 2a. If value is less than root value, grab left child. 2b. If value is greater than root value, grab right child. 3a. If node is None, raise ValueError. 3b. If node exists, repeat recursive step.
|
def _traverse(self, value, parent=self.root, node=None):
if self.root is None:
raise ValueError("This binary tree is empty!")
currentNode = node
if currentNode.cargo == value:
return currentNode
else:
if value < currentNode.cargo:
return traverse(value, currentNode.left)
else:
return traverse(value, currentNode.right)
|
[
"def search_pre(self, value, node):\n if (node is not None):\n\n # Check the parent node\n if (node.get_value() == value):\n return node\n\n # Check the left branch\n left_branch = self.search_pre(value, node.get_left())\n if (left_branch is not None):\n return left_branch\n\n # Check the right branch\n right_branch = self.search_pre(value, node.get_right())\n if (right_branch is not None):\n return right_branch\n\n return None",
"def search_post(self, value, node):\n if (node is not None):\n\n # Check the left branch\n left_branch = self.search_post(value, node.get_left())\n if (left_branch is not None):\n return left_branch\n\n # Check the right branch\n right_branch = self.search_post(value, node.get_right())\n if (right_branch is not None):\n return right_branch\n\n # Check the parent node\n if (node.get_value() == value):\n return node\n\n return None",
"def search_stack(self, value, node):\n # Initialize the stack\n stack = Stack()\n\n # Add the root node\n stack.push(node)\n\n # Loop until the stack is empty\n while (not stack.is_empty()):\n\n # Get the last node from the stack\n node = stack.pop()\n\n # Check the node\n if (node.get_value() == value):\n return node\n\n # If it has a left child put it in the stack\n left_child = node.get_left()\n if (left_child is not None):\n stack.push(left_child)\n\n # If it has a right child put it in the stack\n right_child = node.get_right()\n if (right_child is not None):\n stack.push(right_child)\n\n return None",
"def get_val(self, row):\n node = self.root\n while node:\n prev_node = node\n if not len(node.children):\n return node.data\n if node.data not in row.keys():\n return None\n children = prev_node.children\n for child in children:\n if row[prev_node.data] == child.choices[-1][1]:\n node = child\n break\n else:\n node = None\n return None",
"def search_queue(self, value, node):\n # Initialize the queue\n queue = Queue()\n\n # Add the root node\n queue.enqueue(node)\n\n # Loop until the queue is empty\n while (not queue.is_empty()):\n\n # Get the last node from the queue\n node = queue.dequeue()\n\n # Check the node\n if (node.get_value() == value):\n return node\n\n # If it has a left child put it in the queue\n left_child = node.get_left()\n if (left_child is not None):\n queue.enqueue(left_child)\n\n # If it has a right child put it in the queue\n right_child = node.get_right()\n if (right_child is not None):\n queue.enqueue(right_child)\n\n return None",
"def find_recursive(self, root, item):\r\n \r\n #base cases\r\n # if root is None, return None.\r\n if root is None:\r\n return None\r\n #if item is equal to root item, return root item.\r\n elif root.item == item:\r\n return root.item\r\n #recursive cases\r\n #if item is less than root, search the left subtree recursively.\r\n elif item < root.item:\r\n return self.find_recursive(root.left, item)\r\n #otherwise search the right subtree recursively. \r\n else:\r\n return self.find_recursive(root.right, item)",
"def find_successor(self):\n succ = None\n # right child is exist, succ is the most left child of right sub tree\n if self.has_right_child():\n succ = self.right_child.find_min()\n # right child not exist\n else:\n # if it is the left child, succ is the left child's parent\n if self.is_left_child():\n succ = self.parent\n # is the right child, look up until find one node whose n-1th\n # ancestor is the left child of nth ancestor,\n # the the successor is the nth ancestor, if not founded, the node\n # is already the last one node, so there is no successor\n else:\n self.parent.right_child = None\n succ = self.parent.find_successor()\n self.parent.right_child = self\n\n return succ",
"def searchNode(self, value, child=None):\n if not value: assert(Exception(\"Cannot search with a empty `value`\"))\n child = child if child is not None else self\n if child.name == value:\n return child\n else:\n children = child.children\n if len(children):\n for child in children:\n y = child.searchNode(value, child)\n if y: return y\n return None",
"def ceiling(self, value):\n def _ceiling(node):\n if node is None:\n return None\n if node.value == value:\n return value\n if node.value < value:\n return _ceiling(node.right)\n if node.value > value:\n ancestor = _ceiling(node.left)\n return node if ancestor is None else ancestor\n return _ceiling(self._root)",
"def find(self, t):\n node = self.root\n while node is not None:\n if t == node.data:\n return node\n elif t < node.data:\n node = node.left\n else:\n node = node.right\n return None",
"def descend_tree(self, structure, atoms, root=None, strict=False):\n\n if root is None:\n for root in self.top:\n if self.match_node_to_structure(root, structure, atoms, strict):\n break # We've found a matching root\n else: # didn't break - matched no top nodes\n return None\n elif not self.match_node_to_structure(root, structure, atoms, strict):\n return None\n\n next_node = []\n for child in root.children:\n if self.match_node_to_structure(child, structure, atoms, strict):\n next_node.append(child)\n\n if len(next_node) == 1:\n return self.descend_tree(structure, atoms, next_node[0], strict)\n elif len(next_node) == 0:\n if len(root.children) > 0 and root.children[-1].label.startswith('Others-'):\n return root.children[-1]\n else:\n return root\n else:\n # logging.warning('For {0}, a node {1} with overlapping children {2} was encountered '\n # 'in tree with top level nodes {3}. Assuming the first match is the '\n # 'better one.'.format(structure, root, next, self.top))\n return self.descend_tree(structure, atoms, next_node[0], strict)",
"def test_find_right_subtree_leftmost_child(full_bst):\n assert full_bst._find_right_subtree_leftmost_child(full_bst.root).val == 10",
"def test_find_left_subtree_rightmost_child(full_bst):\n assert full_bst._find_left_subtree_rightmost_child(full_bst.root).val == 7",
"def search(self, val, retrieve=False):\n curr = self\n while curr and curr.val:\n if curr.val == val:\n if retrieve:\n return curr\n else:\n return True\n if curr.val < val:\n curr = curr.right\n else:\n curr = curr.left\n\n if retrieve:\n return None\n else:\n return False",
"def tree_search_iterative(node, key):\n while node is not None and key != node.key:\n if key < node.key:\n node = node.left\n else:\n node = node.right\n return node",
"def right_left_most(self, value=True):\n assert not self.is_leaf\n if value:\n return self.right_child.scope.start\n curr_node = self.right_child\n while not curr_node.is_leaf:\n curr_node = curr_node.left_child\n return curr_node",
"def depth_first_search(self, val, retrieve=False):\n node_stack = []\n node_stack.append(self)\n\n while node_stack:\n curr = node_stack.pop()\n if curr.val == val:\n if retrieve:\n return curr\n return True\n if curr.right:\n node_stack.append(curr.right)\n if curr.left:\n node_stack.append(curr.left)\n\n if retrieve:\n return None\n return False",
"def _findNode(self, currentNodeidx, val):\n\t\tcurrentNode = self.tree[currentNodeidx]\n\t\tif currentNode['val'] == val:\n\t\t\tcurrentidx = currentNodeidx\n\t\telif val <= currentNode['val']:\n\t\t\tleftChild = currentNode['L']\n\t\t\tif leftChild == None:\n\t\t\t\traise ValueError('Value not found in the tree.')\n\t\t\telse:\n\t\t\t\tcurrentidx = self._findNode(leftChild, val)\n\t\telif val > currentNode['val']:\n\t\t\trightChild = currentNode['R']\n\t\t\tif rightChild == None:\n\t\t\t\traise ValueError('Value not found in the tree.')\n\t\t\telse:\n\t\t\t\tcurrentidx = self._findNode(rightChild, val)\n\t\treturn currentidx",
"def tree_search(node, key):\n if node is None or key == node.key:\n return node\n if key < node.key:\n return tree_search(node.left, key)\n return tree_search(node.right, key)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Override the typical runner with an instance of `PyunitConsumer`.
|
def run_suite(self, suite, **kwargs):
return PyunitConsumer(
verbosity=self.verbosity,
failfast=self.failfast,
).run(suite)
|
[
"def __init__(self, command_line_args=None):\n command_line_args = command_line_args or sys.argv[1:]\n\n runner_action, test_path, test_runner_args, other_opts = parse_test_runner_command_line_args(command_line_args)\n \n self.setup_logging(other_opts)\n \n runner = TestRunner(**test_runner_args)\n\n bucket_overrides = {}\n if other_opts.bucket_overrides_file:\n bucket_overrides = get_bucket_overrides(other_opts.bucket_overrides_file)\n\n try:\n runner.discover(test_path, bucket=other_opts.bucket, bucket_count=other_opts.bucket_count, bucket_overrides=bucket_overrides)\n except test_discovery.DiscoveryError, e:\n self.log.error(\"Failure loading tests: %s\", e)\n sys.exit(1)\n\n if runner_action == ACTION_LIST_SUITES:\n runner.list_suites()\n sys.exit(0)\n elif runner_action == ACTION_LIST_TESTS:\n runner.list_tests()\n sys.exit(0)\n elif runner_action == ACTION_RUN_TESTS:\n result = runner.run()\n sys.exit(not result)",
"def prepareTestRunner(self, runner):\n # replace with our runner class\n return MultiProcessTestRunner(stream=runner.stream,\n verbosity=runner.config.verbosity,\n config=runner.config,\n loaderClass=self.loaderClass)",
"def test():\n from .tests import runner\n runner.run()",
"def prepareTestRunner(self, runner):\n\n lrunner = lettuce.Runner(self._base_path,\n verbosity=self.conf.lettuce_verbosity,\n scenarios=self.conf.lettuce_scenarios)\n result = lrunner.run()\n if not result or result.steps != result.steps_passed:\n # I really don't have any idea how to handle this properly.\n raise RuntimeError(\"Lettuce test suite failed\")\n\n return runner",
"def run_with(self, runner):\n runner([self.path] + self.arguments)",
"def test_another_queue(self):\n tasks.print_task2('hello another')\n with run_kuyruk(queue='another_queue') as worker:\n worker.expect('another_queue')\n worker.expect('hello another')\n worker.expect('Task is processed')",
"def __startUnittest(self):\n self.__startProc(\"eric6_unittest.py\")",
"def test_module_test_runner(self):\n from ..sffrw import _module_test_runner\n from . import test_base\n args = parse_args(u\"tests all\", use_shlex=True)\n status = _module_test_runner(test_base, args)\n self.assertEqual(status, 0)",
"def run_test(self, ctxt, test, kwargs, reply_addr):",
"def test_discover_test_runner(self):\n from ..sffrw import _discover_test_runner\n args = parse_args(u\"tests schema\", use_shlex=True)\n path = \"..\" # no tests to be found here\n status = _discover_test_runner(path, args)\n self.assertEqual(status, 0)",
"def setUp_param(self, overrides={}, logging={}):\n def setup(test=None):\n self.doctestmgr = self.doctestmgr_factory()\n self.doctestmgr.overrides = overrides\n self.doctestmgr.logging = logging\n self.doctestmgr.setUp()\n return setup # we were called to get a setUp function",
"def tests(self):\n pass",
"def _test_classes(self):",
"def run(self):\n\n # Detect UART source type, and decide which command to test.\n self.prepare()\n\n # Run the test on each UART port in thread.\n self.logger.info('Test starts')\n for _, ser in self.serials.items():\n ser.start_test()\n\n # Wait all tests to finish.\n for _, ser in self.serials.items():\n ser.wait_test_done()\n\n # Print the result.\n char_lost = self.print_result()\n if char_lost:\n raise ChargenTestError('Test failed: lost %d character(s)' %\n char_lost)\n\n self.logger.info('Test is done')",
"def test_check_runner_manually(self):\n cleared = self.clear_queue_and_runners()\n assert (cleared)\n check = run_result.CheckResult(self.connection, 'test_random_nums')\n prior_res = check.get_latest_result()\n # first, bad input\n bad_res = self.app_utils_obj.run_check_runner({'sqs_url': None})\n assert (bad_res is None)\n # queue a check without invoking runner. Get resulting run uuid\n to_send = ['test_checks/test_random_nums', {}, []]\n tries = 0\n test_success = False\n while tries < 10 and not test_success:\n tries += 1\n run_uuid = self.app_utils_obj.send_single_to_queue(self.environ, to_send, None, invoke_runner=False)\n time.sleep(1)\n with captured_output() as (out, err):\n # invoke runner manually (without a lamba)\n res = self.app_utils_obj.run_check_runner({'sqs_url': self.queue.url}, propogate=False)\n read_out = out.getvalue().strip()\n if res and res.get('uuid') == run_uuid:\n # check the result from run_check_runner\n assert (res['name'] == 'test_random_nums')\n assert (res['uuid'] == run_uuid)\n assert ('_run_info' in res['kwargs'])\n assert (res['kwargs']['_run_info']['run_id'] == run_uuid)\n # check a couple things about printed runner output\n assert ('%s (uuid)' % run_uuid in read_out)\n assert ('Finished: test_checks/test_random_nums' in read_out)\n test_success = True\n assert (test_success)\n # check the stored result as well\n post_res = check.get_result_by_uuid(run_uuid)\n assert (post_res is not None)\n assert ('_run_info' in post_res['kwargs'])\n assert ({'run_id', 'receipt', 'sqs_url'} <= set(post_res['kwargs']['_run_info'].keys()))\n assert (post_res['kwargs']['_run_info']['run_id'] == run_uuid)",
"def run(self):\n if self.test:\n self.set_test_env()\n try:\n consumer = AsyncConsumer(\n cfg=self.cfg, cbk=self._callback, worker=self)\n rabbitmq_conn = consumer.connect()\n rabbitmq_conn.ioloop.start()\n except (KeyboardInterrupt, SystemExit):\n consumer.stop()",
"def runner() -> CliRunner:\n click_runner = CliRunner()\n\n with click_runner.isolated_filesystem():\n yield click_runner",
"def test_worker(self):\n colors = ['blue', 'green', 'red', 'pink', 'black']\n # Test blocking with timeout:\n self.queue.put(*colors)\n msgs = []\n\n @self.queue.worker(timeout=1)\n def appender(msg):\n msgs.append(msg)\n appender()\n self.assertEqual(msgs, colors)\n # Test non-blocking:\n self.queue.put(*colors)\n msgs = []\n\n @self.queue.worker(block=False)\n def appender(msg):\n msgs.append(msg)\n appender()\n self.assertEqual(msgs, colors)\n # Test decorating a class method:\n self.queue.put(*colors)\n msgs = []\n\n class MyClass(object):\n @self.queue.worker(block=False)\n def appender(self, msg):\n msgs.append(msg)\n my_instance = MyClass()\n my_instance.appender()\n self.assertEqual(msgs, colors)",
"def test_patch_run(self):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Check if the module exists, and satisfies the minimum version requirement. Raises ImportError and AssertionError.
|
def check_module(name, min_version=None):
name = '{}'.format(name)
try:
the_module = importlib.import_module(name)
except ImportError:
tf.logging.info(
'Optional Python module %s not found, '
'please install %s and retry if the application fails.',
name, name)
raise
try:
if min_version is not None:
assert the_module.__version__ >= '{}'.format(min_version)
except AttributeError:
pass
except AssertionError:
tf.logging.info(
'Optional Python module %s version %s not found, '
'please install %s-%s and retry if the application fails.',
name, min_version, name, min_version)
raise
|
[
"def check_install(module, at_least_version=None, debug=False):\n try:\n module_version = __import__(module).__version__\n is_module = True\n except ImportError as e:\n is_module = False\n if is_module:\n if at_least_version is not None:\n if parse_version(at_least_version) <= parse_version(module_version):\n return True\n else:\n return False\n else:\n return True\n else:\n False",
"def test_import_and_version(package, expected_version):\n from importlib import metadata\n\n try:\n return metadata.version(package) >= expected_version\n except metadata.PackageNotFoundError:\n return False",
"def _has_required_package():\n\n packages_ok = True\n\n # Check tensorflow with a recent version is installed.\n try:\n # pylint: disable=g-import-not-at-top\n import tensorflow as tf\n # pylint: enable=g-import-not-at-top\n except ImportError:\n eprint('Cannot import Tensorflow. Please verify '\n '\"python -c \\'import tensorflow\\'\" works.')\n packages_ok = False\n try:\n if tf.__version__ < '0.10.0':\n eprint('Tensorflow version must be at least 0.10.0. ',\n VERIFY_TENSORFLOW_VERSION)\n packages_ok = False\n except (NameError, AttributeError) as e:\n eprint('Error while getting the installed TensorFlow version: ', e,\n '\\n', VERIFY_TENSORFLOW_VERSION)\n packages_ok = False\n\n # Check cloud ml sdk with a recent version is installed.\n try:\n # pylint: disable=g-import-not-at-top\n import google.cloud.ml as cloudml\n # pylint: enable=g-import-not-at-top\n except ImportError:\n eprint('Cannot import google.cloud.ml. Please verify '\n '\"python -c \\'import google.cloud.ml\\'\" works.')\n packages_ok = False\n try:\n if cloudml.__version__ < '0.1.7':\n eprint('Cloudml SDK version must be at least 0.1.7 '\n 'to run local prediction. ', VERIFY_CLOUDML_VERSION)\n packages_ok = False\n except (NameError, AttributeError) as e:\n eprint('Error while getting the installed Cloudml SDK version: ', e,\n '\\n', VERIFY_CLOUDML_VERSION)\n packages_ok = False\n\n return packages_ok",
"def attempt_import(name, error_message=None, only_catch_importerror=True,\n minimum_version=None):\n try:\n module = importlib.import_module(name)\n if minimum_version is None:\n return module, True\n elif _check_version(module, minimum_version):\n return module, True\n elif error_message:\n error_message += \" (version %s does not satisfy the minimum \" \\\n \"version %s)\" % (\n module.__version__, minimum_version)\n else:\n error_message = \"The %s module version %s does not satisfy \" \\\n \"the minimum version %s\" % (\n name, module.__version__.minimum_version)\n except ImportError:\n pass\n except:\n if only_catch_importerror:\n raise\n\n if not error_message:\n error_message = \"The %s module (an optional Pyomo dependency) \" \\\n \"failed to import\" % (name,)\n return ModuleUnavailable(error_message), False",
"def min_version(the_module, min_version_str: str = \"\") -> bool:\n if min_version_str:\n mod_version = tuple(int(x) for x in the_module.__version__.split(\".\")[:2])\n required = tuple(int(x) for x in min_version_str.split(\".\")[:2])\n return mod_version >= required\n return True # always valid version",
"def module_check():\n\tstatus = True\n\ttry:\n\t\timport fpdf\n\t\tprint '[+] Fpdf module installed.'\n\texcept ImportError as e:\n\t\tstatus = False\n\t\tif \"fpdf\" in repr(e):\n\t\t\tprint \"[-] FPDF module not installed. Run the following commands:\"\n\t\t\tprint \"[-] python -m pip install fpdf\"\n\ttry:\n\t\timport enum\n\t\tprint '[+] Enum module installed.'\n\texcept ImportError as e:\n\t\tstatus = False\n\t\tif \"enum\" in repr(e):\n\t\t\tprint \"[-] Enum module not installed. Run the following commands:\"\n\t\t\tprint \"[-] python -m pip install enum34\"\n\ttry:\n\t\timport psutil\n\t\tprint '[+] Psutil module installed'\n\texcept ImportError as e:\n\t\tstatus = False\n\t\tif \"psutil\" in repr(e):\n\t\t\tprint \"Enum module not installed. Run the following commands:\"\n\t\t\tprint \"python -m pip install psutil\"\n\treturn status",
"def exact_version(the_module, version_str: str = \"\") -> bool:\n return bool(the_module.__version__ == version_str)",
"def dependencies_check():\n # enforce Python minimum version\n vsys_py = sys.version_info[:3] # 4th element is a string\n if (vsys_py < PYTHON_MIN):\n vmin_py_str = \".\".join((\"%d\" % i) for i in PYTHON_MIN)\n vsys_py_str = \".\".join((\"%d\" % i) for i in vsys_py)\n depfails.append((\"bad\", (\"need Python %s but running under %s: %s\"\n % (vmin_py_str, vsys_py_str, sys.executable))))\n # report problems & exit\n for (p, v) in depfails:\n ERROR(\"%s dependency: %s\" % (p, v))\n if (len(depfails) > 0):\n sys.exit(1)",
"def check_module(\n self, module_tree, module_name, version, parse_error_handler=log_error\n ):\n\n if not module_tree.module_exists(module_name, version):\n module_display = f\"{module_name}\"\n if version is not None:\n module_display += f\"-{version}\"\n raise SystemExit(f\"Module {module_display} does not exist.\")\n try:\n loader = module_tree.load_module(module_name, version, parse_error_handler)\n except ValueError as e:\n raise SystemExit(f\"Error loading module: {e}\")\n return loader",
"def check_python_version():",
"def check_version():\n err = \"PaddlePaddle version 1.6 or higher is required, \" \\\n \"or a suitable develop version is satisfied as well. \\n\" \\\n \"Please make sure the version is good with your code.\" \\\n\n try:\n fluid.require_version('1.7.0')\n except Exception as e:\n logger.error(err)\n sys.exit(1)",
"def check_python_version():\n if sys.version < MINIMUM_PYTHON_VERSION:\n sys.exit(\"Python {0}+ is required.\".format(MINIMUM_PYTHON_VERSION))",
"def test_requirement_in_ha_core():\n request = requests.get(\n \"https://raw.githubusercontent.com/home-assistant/home-assistant/dev/setup.py\"\n )\n res = request.text.split(\"REQUIRES = [\")[-1].split(\"]\")[0]\n requirements = {}\n for line in res.split(\"\\n\"):\n if \"=\" in line and not \"#\" in line:\n line = line.split('\"')[1]\n package = line.split(\">\")[0].split(\"=\")[0]\n version = line.split(\"=\")[-1]\n requirements[package] = version\n\n with open(MANIFEST_FILE, \"r\") as manifest_file:\n for line in json.loads(manifest_file.read())[\"requirements\"]:\n package = line.split(\">\")[0].split(\"=\")[0]\n assert package not in requirements",
"def test_import_not_found(self):\n try:\n import_version('bogus', Requirement.parse('bogus==1.0'),\n Environment(['plugins']))\n except DistributionNotFound, err:\n self.assertEqual(str(err),\n 'could not find distribution satisfying bogus==1.0')",
"def _check_python_component(self):\n\n if self._target_py_version_nr is None:\n self._missing_component('python')",
"def test_requirement_versions():\n request = requests.get(\n \"https://raw.githubusercontent.com/home-assistant/home-assistant/dev/requirements_all.txt\"\n )\n requirements = {}\n for line in request.text.split(\"\\n\"):\n if \"=\" in line and not \"#\" in line:\n package = line.split(\">\")[0].split(\"=\")[0]\n version = line.split(\"=\")[-1]\n requirements[package] = version\n\n with open(MANIFEST_FILE, \"r\") as manifest_file:\n for line in json.loads(manifest_file.read())[\"requirements\"]:\n package = line.split(\">\")[0].split(\"=\")[0]\n version = line.split(\"=\")[-1]\n if package in requirements:\n if version != requirements[package]:\n warnings.warn(\n \"Package has different version from HA, this might casuse problems\"\n )",
"def test_no_dist_version(self):\n self.assertNoReport(self.mk_check(), self.mk_pkg('1.7.0'))",
"def test_version_matches_expected():\n assert __version__ == \"0.1.0\"",
"def check_installed() -> None:\n try:\n spec = importlib.util.find_spec(\"pyscf\") # type: ignore\n if spec is not None:\n return\n except Exception as ex: # pylint: disable=broad-except\n logger.debug(\"PySCF check error %s\", str(ex))\n raise MissingOptionalLibraryError(\n libname=\"PySCF\",\n name=\"PySCFDriver\",\n pip_install=\"pip install 'qiskit-nature[pyscf]'\",\n msg=\"See https://pyscf.org/install.html\",\n ) from ex\n\n raise MissingOptionalLibraryError(\n libname=\"PySCF\",\n name=\"PySCFDriver\",\n pip_install=\"pip install 'qiskit-nature[pyscf]'\",\n msg=\"See https://pyscf.org/install.html\",\n )"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Update an amenity if not error 404
|
def updateAmenity(amenity_id):
flag = 0
amenity = request.get_json()
text_final = "{}.{}".format('Amenity', amenity_id)
if not amenity:
abort(400, {'Not a JSON'})
amen = storage.get('Amenity', amenity_id)
if not amen:
abort(404)
ignore = ['id', 'created_at', 'updated_at']
for key, value in amenity.items():
if key not in ignore:
setattr(amen, key, value)
storage.save()
return jsonify(amen.to_dict()), 200
|
[
"def update_amenity(amenity_id):\n amenity_data = request.get_json()\n if amenity_data is None:\n return jsonify({'error': \"Not a JSON\"}), 400\n amenity_update = storage.get(\"Amenity\", amenity_id)\n if amenity_update is None:\n abort(404)\n no_updates = ['id', 'created_at', 'updated_at']\n for attr, value in amenity_data.items():\n if attr in no_updates:\n pass\n else:\n setattr(amenity_update, attr, value)\n amenity_update.save()\n return jsonify(amenity_update.to_dict()), 200",
"def amenity_update(amenity_id):\n data = request.get_json()\n if data is None:\n error_dict = {\"error\": \"Not a JSON\"}\n return jsonify(error_dict), 400\n single_amenity = storage.get(\"Amenity\", amenity_id)\n if single_amenity is None:\n abort(404)\n\n setattr(single_amenity, 'name', data['name'])\n single_amenity.save()\n storage.save()\n\n return jsonify(single_amenity.to_dict())",
"def update_amenity(amenity_id):\n amenities = storage.get(Amenity, amenity_id)\n if not amenities:\n abort(404)\n re = request.get_json()\n if re is None:\n abort(400, 'Not a JSON')\n for k, v in re.items():\n if k not in ['id', 'created_at', 'updated_at']:\n setattr(amenities, k, v)\n storage.save()\n return make_response(jsonify(amenities.to_dict()), 200)",
"def update_non_existing_vehicle_fails(self):\n response = self.client.patch(\n reverse('vehicle:update-fare', args=['-LsaKac4wduiew']),\n self.fare ,\n HTTP_AUTHORIZATION='token {}'.format(self.token))\n return response",
"def test_update_amenity_id(self):\n amenity_args = {\"name\": \"quokka\", \"id\": \"QO1\"}\n amenity = Amenity(**amenity_args)\n amenity.save()\n rv = self.app.put('{}/amenities/{}/'.format(self.path, amenity.id),\n content_type=\"application/json\",\n data=json.dumps({\"id\": \"Z\"}),\n follow_redirects=True)\n self.assertEqual(rv.status_code, 200)\n self.assertEqual(rv.headers.get(\"Content-Type\"), \"application/json\")\n json_format = getJson(rv)\n self.assertEqual(json_format.get(\"name\"), amenity_args[\"name\"])\n self.assertEqual(json_format.get(\"id\"), amenity_args[\"id\"])\n storage.delete(amenity)",
"def update_request():",
"def update_anime():\n info = MALB.get_anime_info(request.form.get('malId'), ['episodes'])[0]\n\n utoa = UserToAnime(g.user.get_id(), request.form['malId'])\n utoa.episodes = info.episodes\n\n for field in request.form:\n if request.form[field]:\n setattr(utoa, field, request.form[field])\n\n form = UpdateAnimeForm(utoa)(csrf_enabled=False)\n if form.validate_on_submit():\n MALB.update_anime([utoa], session['malKey'])\n return Response(status=200, mimetype=\"text/html\")\n\n return Response(render_template('displayformerrors.html', form=form), status=400, mimetype=\"text/html\")",
"def test_ad_invalid_update(self):\n another_user = User.objects.create_user(\n \"another_test@test.org\",\n password=\"123\",\n first_name=\"Ola\",\n last_name=\"Nordmann\",\n phone_number=\"+4798989898\",\n )\n another_token = AccessToken.for_user(another_user)\n self.client.credentials(HTTP_AUTHORIZATION=\"Bearer \" + str(another_token))\n url_update = reverse(\"ad-detail\", args=[\"1\"])\n response = self.client.put(url_update, self.ad_update_data, format=\"json\")\n # Checks that its not possible to update the ad\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_anon_update_ml_model(self):\n\n response = self.client.put(self.url, data=self.valid_payload)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def put(self):\n abort(403)",
"def test_update_DNE(self):\n r = self.client.get(reverse('makeReports:update-dp',kwargs={'dept':self.dept.pk,'pk':901}))\n self.assertEquals(r.status_code,404)",
"def test_update_DNE(self):\n r = self.client.get(reverse('makeReports:update-dp',kwargs={'dept':self.dept.pk,'pk':910}))\n self.assertEquals(r.status_code,404)",
"def test_404_update_nonexistent_book(self):\n\n res = self.client().patch(\n '/books/300', json=self.new_book)\n data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(data['success'], False)\n self.assertEqual(data['message'], 'Not Found')",
"def test_update_invalid_data_user(self):\n self.assertEqual(400, self.resp.status_code)",
"def test_update_non_exist_todo_item(self):\n\n payload = {\n 'name': 'new_name',\n }\n res = self.client.put(get_todo_item_detail_url(999), payload)\n\n self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)",
"def test_update_nonexistent_flight(self):\n\n res1 = self.client().post('/api/v1/auth/signin', data=json.dumps(user_data[16]), content_type=\"application/json\")\n res1 = json.loads(res1.data)\n token = res1['data'][0]['token']\n\n res = self.client().put(\n '/api/v1/flight/100',\n data=json.dumps(flight[11]),\n headers={\n 'content-type': 'application/json',\n 'auth_token': token\n }\n )\n self.assertEqual(res.status_code, 404)\n response = json.loads(res.data)\n self.assertTrue(response['error'])",
"def test_update_login_required(self):\n res = self.client.put(reverse('message:update', args=[1]))\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_user_edit_approved_entry(self):\r\n url, entry, data = self.edit_entry_helper()\r\n\r\n response = self.client.get(url)\r\n self.assertEqual(response.status_code, 404)\r\n\r\n response = self.client.post(url, data=data)\r\n self.assertEqual(response.status_code, 404)",
"def test_update_curriculum_that_DNE(self):\n response = self.client.patch(self.url + \"100/\", {'name': 'Does not exist'})\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertEqual(response.data, {'detail': 'Not found'})"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Given the orbital parameters compute the RV at times t. Input
|
def RV_model(t, p):
(period, ttran, ecosomega, esinomega, K, gamma, gamma_offset, sigma_jitter1_sqrd, sigma_jitter2_sqrd) = p
e = np.sqrt(ecosomega**2. + esinomega**2.)
omega = np.arctan2(esinomega, ecosomega)
#mean motion: n = 2pi/period
n = 2. * np.pi / period
# Sudarsky 2005 Eq. 9 to convert between center of transit
# and pericenter passage (tau)
edif = 1. - e**2.
fcen = np.pi/2. - omega
tau = (ttran + np.sqrt(edif) * period / (2 * np.pi) *
(e * np.sin(fcen) / (1. + e * np.cos(fcen)) - 2. / np.sqrt(edif) *
np.arctan(np.sqrt(edif) * np.tan(fcen / 2.) / (1. + e))))
#Define mean anomaly: M
M = (n * (t - tau)) % (2. * np.pi)
#Determine the Energy: E
E = kepler(M, e)
#Solve for fanom (measure of location on orbit)
tanf2 = np.sqrt((1. + e) / (1. - e)) * np.tan(E / 2.)
fanom = (np.arctan(tanf2) * 2.) % (2. * np.pi)
#Calculate RV at given location on orbit
RV = K * (e * np.cos(omega) + np.cos(fanom + omega)) + gamma
return RV
|
[
"def rv(t, orbits, acc=1.e-12):\n\n # handle just one orbit\n if isinstance(orbits, Norbit):\n orbits = [orbits]\n \n # Three minus signs combine to give the final sign: z points towards \n # Earth which gives one minus sign. Another comes because we are \n # considering reflex motion. A third comes because of the way omega\n # is defined relative to the ascending node.\n rvs = t - t\n for orb in orbits:\n tanom = orb.true(t, acc)\n rvs -= orb.n*subs.AU/subs.DAY/1000.*orb.k*orb.a/np.sqrt(1-orb.e**2)* \\\n m.sin(orb.iangle)*(np.cos(tanom+orb.omega)+orb.e*np.cos(orb.omega))\n\n return rvs",
"def ptolrvm(pd):\n\n norbit, nephem = norbeph(pd) \n\n # tref is the time at which the orbital elements are assumed to apply\n mass0 = pd['mass0']\n tref = pd['tstart']\n coord = pd['coord']\n \n r0, v0 = Vec3(), Vec3()\n msum = mass0\n if coord == 'Astro':\n for i in range(1,norbit+1):\n stri = str(i)\n msum += pd['mass'+stri]\n lrvm = []\n pdc = pd.copy()\n\n # 'n' in what follows is the conventional symbol for the angular frequency\n # of an orbit.\n ks = []\n for i in range(1,norbit+1):\n stri = str(i)\n\n # compute angular frequency\n a = pd['a'+stri]\n mass = pd['mass' + stri]\n if coord == 'Jacobi':\n msum += mass\n k = mass/msum\n mu = mass0/(1-k)\n elif coord == 'Marsh':\n msum += mass\n k = mass/msum\n mu = msum\n elif coord == 'Astro':\n mu = mass0+mass\n k = mass/msum\n else:\n raise Exception('Unrecognised coordinates in ptolrvm')\n\n n = comp_n(mu,a)\n pdc['mu'+stri] = mu\n\n orb = Orbit(pdc,i,pdc['eomega'+stri])\n r,v = orb.torv(tref)\n\n # accumulate reflex sums (automatically barycentric)\n r0 -= k*r\n v0 -= k*v\n\n # store in Rvm list, store k values\n lrvm.append(Rvm(r, v, mass, a, n*a, pdc['rint'+stri]))\n ks.append(k)\n\n if coord == 'Jacobi' or coord == 'Marsh':\n # Need to convert the Jacobi coordinates to barycentric ones\n # for N-body work. Work through rvm list in reverse order:\n rsum, vsum = Vec3(), Vec3()\n for i in range(len(ks)-1,-1,-1):\n rsum += ks[i]*lrvm[i].r\n vsum += ks[i]*lrvm[i].v\n lrvm[i].r -= rsum\n lrvm[i].v -= vsum\n\n elif coord == 'Astro':\n # to get from astro to barycentric simply add r0, v0\n for i in range(len(ks)):\n lrvm[i].r += r0\n lrvm[i].v += v0\n\n # Create and insert the zeroth object Rvm and return\n rvm0 = Rvm(r0, v0, mass0, r0.norm(), v0.norm(), pdc['rint0'])\n lrvm.insert(0,rvm0)\n return lrvm",
"def integrate_orbit(self, t_end = 1.0E17, dt=1.0E11,\n verbose=True, **kwargs):\n nsteps = int(np.ceil(t_end / dt))\n\n print \"integrating orbit for \" + self.name\n print \"for %5.4e Myr\"%(t_end/cgs.Myr)\n print \"Using %2.2e timesteps at dt = %5.4e\"%(nsteps,dt)\n\n \n\n t,x,v = leapfrog_integrate(self.acceleration_function, self.x0,\n self.v0, dt, nsteps, verbose, kwargs)\n\n\n self.t = t\n self.x = x\n self.v = v\n\n self.r = np.sqrt(np.sum(x**2, axis=-1)).flatten()\n self.vr = np.sqrt(np.sum(v**2, axis=-1)).flatten()",
"def ExplicitRK45(self): \r\n \r\n self.DifferentialFunction(self.Yn,self.dy1,time=self.Clock)\r\n self.dYtmp[:]=self.inv4*self.dy1[:]\r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep)\r\n \r\n self.DifferentialFunction(self.Ytmp,self.dy2,time=self.Clock+self.inv4*self.TimeStep)\r\n self.dYtmp[:]=3.0*self.inv32*self.dy1[:]+9.0*self.inv32*self.dy2[:]\r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep)\r\n \r\n self.DifferentialFunction(self.Ytmp,self.dy3,time=self.Clock+3.0*self.inv8*self.TimeStep)\r\n self.dYtmp[:]=1932.0*self.inv2197*self.dy1[:]-7200.0*self.inv2197*self.dy2[:]+7296.0*self.inv2197*self.dy3[:]\r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep)\r\n \r\n self.DifferentialFunction(self.Ytmp,self.dy4,time=self.Clock+12.0*self.inv13*self.TimeStep)\r\n self.dYtmp[:]=439.0*self.inv216*self.dy1[:]-8.0*self.dy2[:]+3680.0*self.inv513*self.dy3[:]-845.0*self.inv4104*self.dy4[:]\r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep) \r\n \r\n self.DifferentialFunction(self.Ytmp,self.dy5,time=self.Clock+self.TimeStep)\r\n\r\n self.dYtmp[:]=-8.0*self.inv27*self.dy1[:]+2.0*self.dy2[:]-3544.0*self.inv2565*self.dy3[:]+1859*self.inv4104*self.dy4[:]-11.0*self.inv40*self.dy5[:]\r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep) \r\n self.DifferentialFunction(self.Ytmp,self.dy6,time=self.Clock+self.TimeStep*self.inv2)\r\n \r\n self.dYtmp[:]=(25.0*self.inv216*self.dy1[:]+1408.0*self.inv2565*self.dy3[:]+2197.0*self.inv4104*self.dy4[:]-0.20*self.dy5[:])\r\n self.dy_tmp_45[:]=self.dYtmp[:]\r\n self.Ynp1[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep)",
"def rotationalVelocity(self, t):\n pass",
"def __call__(self,X,t):\n xvals = X[:3]-self.locs\n rvals = numpy.sqrt( (xvals**2).sum(1) )\n \n dVdt = sum([ self.halos[i].accel(rvals[i])*xvals[i]/rvals[i] \\\n for i in range(self.N) ])\n return numpy.concatenate([X[3:] * 1E3 * yr/kpc,\n dVdt])",
"def R(self,t,z,p):\n return t,z,p",
"def lorzrk(s,t,param):\n \n #* For clarity, unravel input vectors\n x, y, z = s[0], s[1], s[2]\n r = param[0]\n sigma = param[1]\n b = param[2]\n\n #* Return the derivatives [dx/dt dy/dt dz/dt]\n deriv = np.empty(3)\n deriv[0] = sigma*(y-x)\n deriv[1] = r*x - y - x*z\n deriv[2] = x*y - b*z\n return deriv",
"def _vol_fn(t, x):\n # Get parameter values at time `t`\n volatility = _get_parameters(tf.expand_dims(t, -1), self._volatility)[0]\n volatility = tf.transpose(volatility)\n if self._corr_matrix is not None:\n corr_matrix = _get_parameters(tf.expand_dims(t, -1), self._corr_matrix)\n corr_matrix = corr_matrix[0]\n corr_matrix = tf.linalg.cholesky(corr_matrix)\n else:\n corr_matrix = tf.eye(self._dim, dtype=volatility.dtype)\n\n return volatility * corr_matrix + tf.zeros(\n x.shape.as_list()[:-1] + [self._dim, self._dim],\n dtype=volatility.dtype)",
"def ExplicitRK3(self): \r\n self.DifferentialFunction(self.Yn,self.dy1,time=self.Clock)\r\n self.dYtmp[:]=self.dy1[:]\r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep*self.inv2)\r\n \r\n self.DifferentialFunction(self.Ytmp,self.dy2,time=self.Clock+self.TimeStep*self.inv2)\r\n self.dYtmp[:]=-self.dy1[:]+2.0*self.dy2[:]\r\n \r\n self.Ytmp[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep)\r\n self.DifferentialFunction(self.Ytmp,self.dy3,time=self.Clock+self.TimeStep)\r\n self.dYtmp[:]=(self.dy1[:]+4.0*self.dy2[:]+self.dy3)*self.inv6\r\n\r\n \r\n self.Ynp1[:]=self.NextStepComputation(self.Yn,self.dYtmp,self.TimeStep)",
"def calcResiduals(self, params):\n self.rr.reset() # Put back to time zero\n # Update the simulation parameters\n for name, value in params.valuesdict().items():\n self.rr[name] = value\n fittedArr = self.rr.simulate(0, self.observedTS.end,\n len(self.observedTS))\n self._fittedArr = fittedArr.copy()\n fittedArr = fittedArr[:, 1:] # Delete time column\n observedArr = self.observedTS[self.colnames]\n self._residualsArr = observedArr - fittedArr\n residualsArr = self._residualsArr.flatten()\n return residualsArr",
"def velocity(obs0, obs1, r0, r1):\n\tsigma = G/(np.linalg.norm(r0)**3)\n\tv0 = (r1 - vel_f(obs1.JD, obs0.JD, sigma, 0)*r0)/vel_g(obs1.JD, obs0.JD, sigma)\n\tfor _ in range(4): # Iterate to get tau\n\t\ttau = r0.dot(v0)/r0.dot(r0)\n\t\tv0 = (r1 - vel_f(obs1.JD, obs0.JD, sigma, tau)*r0)/vel_g(obs1.JD, obs0.JD, sigma)\n\treturn v0",
"def lsr_nonsense():\n\n RO = 8.\n VO = 220.\n BOVY_TIME_CONVERSION = bovy_conversion.time_in_Gyr(VO, RO) * 1000 # Myr/bovy_time\n\n perimeter = 2 * np.pi * 8 * u.kpc\n velocity = 220 * u.km / u.s\n # for reference, LSR (at 8 kpc, with V = 220 km/s) should take this long\n # to complete one orbit\n orbit_time = (perimeter / velocity).to(\"Myr\")\n\n max_age = orbit_time.value / BOVY_TIME_CONVERSION\n ntimes = 100\n ts = np.linspace(0, max_age, ntimes)\n\n # demo a star (with vT=220, vR=0, vZ=0, z=0, phi=0.1 pi) staying\n # fixed in our coordinate frame\n R, vR, vT, z, vz, phi = 1., 0., 1., 0., 0., 0.\n LSR_coords = [R, vR, vT, z, vz, phi]\n lsr = Orbit(vxvv=LSR_coords, solarmotion='schoenrich', vo=220, ro=8)\n lsr.integrate(ts, mp, method='odeint')\n\n lsr_data = lsr.getOrbit()\n lsrR = RO * lsr_data[:,0]\n lsrphi = lsr_data[:,5]\n\n lsrX = lsrR * np.cos(lsrphi)\n lsrY = lsrR * np.sin(lsrphi)\n lsrZ = RO * lsr_data[:,3]\n\n R, vR, vT, z, vz, phi = 1., 0., 1., 0., 0., 0.25*np.pi\n rot_lsr_coords = [R, vR, vT, z, vz, phi]\n rot_lsr = Orbit(vxvv=rot_lsr_coords, solarmotion='schoenrich', vo=220, ro=8)\n rot_lsr.integrate(ts, mp, method='odeint')\n\n rot_lsr_data = rot_lsr.getOrbit()\n\n # putting into corotating cartesian system centred on LSR\n XYZUVW_rot = galpy_coords_to_xyzuvw(rot_lsr_data, ts)\n plt.clf()\n plt.plot(XYZUVW_rot[:,0], XYZUVW_rot[:,1])\n plt.savefig(\"temp_plots/rotXY.png\")\n\n\n orbit_time = (perimeter / velocity).to(\"Myr\")\n ts = np.linspace(0., 10*orbit_time.value, 1000) / BOVY_TIME_CONVERSION\n ra, dec, dist, mu_ra, mu_dec, vlos = 0., 0., 0., 0., 0., 0.\n solar_coords = [ra, dec, dist, mu_ra, mu_dec, vlos]\n sun = Orbit(vxvv=solar_coords, radec=True,\n solarmotion='schoenrich') # should just be the sun's orbit\n sun.integrate(ts, mp, method='odeint')\n\n # get the orbit [R, vR, vT, z, vz, phi] (pos scaled by ro, vel scaled by vo)\n sun_data = sun.getOrbit()\n XYZUVW_sun = galpy_coords_to_xyzuvw(sun_data, ts)\n plt.clf()\n plt.plot(XYZUVW_sun[:,0], XYZUVW_sun[:,1])\n plt.savefig(\"temp_plots/sunXY.png\")\n plt.clf()\n plt.plot(XYZUVW_sun[:,0], XYZUVW_sun[:,2])\n plt.savefig(\"temp_plots/sunXZ.png\")",
"def demo_lsr_and_sun_cal():\n perimeter = 2 * np.pi * 8 * u.kpc\n velocity = 220 * u.km/ u.s\n # for reference, LSR (at 8 kpc, with V = 220 km/s) should take this long\n # to complete one orbit\n orbit_time = (perimeter / velocity).to(\"Gyr\")\n\n max_age = 100 * orbit_time / bovy_conversion.time_in_Gyr(220., 8.) # Gyr\n ntimes = 10000\n ts = np.linspace(0,max_age,ntimes)\n\n # INITIALISING SUN COORDINATES AND ORBIT\n #deg, deg, kpc, mas/yr, mas/yr, km/s\n ra, dec, dist, mu_ra, mu_dec, vlos = 0., 0., 0., 0., 0., 0.\n solar_coords = [ra, dec, dist, mu_ra, mu_dec, vlos]\n sun = Orbit(vxvv=solar_coords, radec=True, solarmotion='schoenrich') # should just be the sun's orbit\n sun.integrate(ts,mp,method='odeint')\n\n # get the orbit [R, vR, vT, z, vz, phi] (pos scaled by ro, vel scaled by vo)\n sun_data = sun.getOrbit()\n\n # plots the sun's motion with respect to Galactic Centre\n sunR = 8 * sun_data[:,0]\n sunphi = sun_data[:,5]\n sunX = sunR * np.cos(sunphi)\n sunY = sunR * np.sin(sunphi)\n sunZ = 8 * sun_data[:,3]\n plt.clf()\n plt.plot(sunX, sunY)\n plt.savefig('temp_plots/sunXY.png')\n\n plt.clf()\n plt.plot(sunX, sunZ)\n plt.savefig('temp_plots/sunXZ.png')\n\n # plot the XY of the sun's motion using galpy's plot function (w.r.t GC)\n plt.clf()\n sun.plot(d1='x', d2='y')\n plt.savefig('temp_plots/galpy_sunXY.png')\n\n sun.plot(d1='x', d2='z')\n plt.savefig('temp_plots/galpy_sunXZ.png')\n\n plt.clf()\n sun.plot(d1='R', d2='z')\n plt.savefig('temp_plots/galpy_sunRZ.png')\n\n # kpc, km/s\n # INITIALISING THE LSR (at XYZUVW (w.r.t sun) of [0,0,-0.025,0,220,0]\n R, vR, vT, z, vz, phi = 1., 0., 1., 0., 0., 0. # <--- Galpy units\n LSR_coords = [R, vR, vT, z, vz, phi]\n lsr = Orbit(vxvv=LSR_coords, solarmotion='schoenrich', vo=220, ro=8)\n lsr.integrate(ts, mp, method='odeint')\n\n # plots a perfect circle\n plt.clf()\n lsr.plot(d1='x', d2='y')\n plt.savefig('temp_plots/galpy_lsrXY.png')\n\n plt.clf()\n lsr.plot(d1='x', d2='z')\n plt.savefig('temp_plots/galpy_lsrXZ.png')\n\n # Manually reconstructing orbit\n lsr_data = lsr.getOrbit()\n lsrR = 8 * lsr_data[:,0]\n lsrphi = lsr_data[:,5]\n\n lsrX = lsrR * np.cos(lsrphi)\n lsrY = lsrR * np.sin(lsrphi)\n lsrZ = 8 * lsr_data[:,3]\n\n plt.clf()\n plt.plot(lsrX, lsrY)\n plt.savefig('temp_plots/lsrXY.png')\n plt.clf()\n plt.plot(lsrX, lsrZ)\n plt.savefig('temp_plots/lsrXZ.png')\n\n # plotting both sun and lsr\n plt.clf()\n plt.plot(lsrX, lsrY)\n plt.plot(sunX, sunY)\n plt.savefig('temp_plots/combXY.png')\n plt.clf()\n plt.plot(lsrX, lsrZ)\n plt.plot(sunX, sunZ)\n plt.savefig('temp_plots/combXZ.png')\n\n # Finding sun's path w.r.t the LSR in non-corotating frame\n relsunX = sunX - lsrX\n relsunY = sunY - lsrY\n relsunZ = sunZ - lsrZ\n\n plt.clf()\n plt.plot(relsunX, relsunY)\n plt.savefig('temp_plots/relsunXY.png')\n plt.clf()\n plt.plot(relsunX, relsunZ)\n plt.savefig('temp_plots/relsunXZ.png')\n\n # Getting sun's path w.r.t the LSR in cortating frame\n sun_rel_data = sun_data - lsr_data\n sun_relR = 8 * sun_rel_data[:,0]\n sun_relphi = sun_rel_data[:,5]\n\n sun_relX = sun_relR * np.cos(sun_relphi)\n sun_relY = sun_relR * np.sin(sun_relphi)\n sun_relZ = 8 * sun_rel_data[:,3]\n\n plt.clf()\n plt.plot(sun_relX, sun_relY)\n plt.savefig('temp_plots/sun_relXY.png')\n plt.clf()\n plt.plot(sun_relX, sun_relZ)\n plt.savefig('temp_plots/sun_relXZ.png')\n\n # Try and plot LSR and sun in 3D for comparison with\n # relative plot\n plt.clf()\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n #theta = np.linspace(-4 * np.pi, 4 * np.pi, 100)\n #z = np.linspace(-2, 2, 100)\n #r = z ** 2 + 1\n #x = r * np.sin(theta)\n #y = r * np.cos(theta)\n ax.plot(sunX, sunY, sunZ, label='sun')\n ax.plot(lsrX, lsrY, lsrZ, label='lsr')\n ax.legend()\n plt.savefig('temp_plots/3D_sun_lsr.png')\n plt.show()\n #galpy_coords_to_xyzuvw(lsr_data)\n print(\"Max age is {} and max phi is {}... does this make sense?\".\\\n format(max_age, np.max(lsr_data[:,5])))\n print(\"Max age is {} and max phi is {}... does this make sense?\". \\\n format(max_age, np.max(sun_data[:,5])))",
"def comp_orbfit(of,vxvv,ts,pot,lb=False,radec=False,ro=None,vo=None):\n from galpy.util import bovy_coords\n bovy_coords._APY_COORDS_ORIG= bovy_coords._APY_COORDS\n bovy_coords._APY_COORDS= False # too slow otherwise\n of.integrate(ts,pot)\n off= of.flip()\n off.integrate(ts,pot)\n #Flip velocities again\n off._orb.vxvv[1]*= -1.\n off._orb.vxvv[2]*= -1.\n off._orb.vxvv[4]*= -1.\n if lb:\n allvxvv= []\n for ii in range(len(ts)):\n allvxvv.append([of.ll(ts[ii]),of.bb(ts[ii]),\n of.dist(ts[ii]),of.pmll(ts[ii]),\n of.pmbb(ts[ii]),of.vlos(ts[ii])])\n allvxvv.append([off.ll(ts[ii]),off.bb(ts[ii]),\n off.dist(ts[ii]),off.pmll(ts[ii]),\n off.pmbb(ts[ii]),off.vlos(ts[ii])])\n allvxvv= numpy.array(allvxvv)\n elif radec:\n allvxvv= []\n for ii in range(len(ts)):\n allvxvv.append([of.ra(ts[ii],ro=ro,vo=vo),of.dec(ts[ii],ro=ro,vo=vo),\n of.dist(ts[ii],ro=ro,vo=vo),of.pmra(ts[ii],ro=ro,vo=vo),\n of.pmdec(ts[ii],ro=ro,vo=vo),of.vlos(ts[ii],ro=ro,vo=vo)])\n allvxvv.append([off.ra(ts[ii]),off.dec(ts[ii],ro=ro,vo=vo),\n off.dist(ts[ii],ro=ro,vo=vo),off.pmra(ts[ii],ro=ro,vo=vo),\n off.pmdec(ts[ii],ro=ro,vo=vo),off.vlos(ts[ii],ro=ro,vo=vo)])\n allvxvv= numpy.array(allvxvv)\n else:\n allvxvv= numpy.concatenate((of.getOrbit(),off.getOrbit()),axis=0)\n out= []\n for ii in range(vxvv.shape[0]):\n out.append(numpy.amin(numpy.sum((allvxvv-vxvv[ii])**2.,axis=1)))\n bovy_coords._APY_COORDS= bovy_coords._APY_COORDS_ORIG\n return numpy.array(out)",
"def __call__(self, values, t0, t1, X0):\n def f(t, x):\n return self.xd(t, x, *values)\n\n def jac(t, x):\n return [self.xd_jac(t, x, *values)]\n \n # Solve the initial value problem to get X, which in turns\n # lets us obtain values for XD, dependent as it probably is\n # (due to logistic component likely included) on X\n r = Results(t0, t1)\n sol = solve_ivp(\n f, [t0, t1], [X0],\n method='Radau', t_eval=r.t, vectorized=True, jac=jac)\n if not sol.success:\n return\n r.X = sol.y[0]\n if t1 < t0: r.flip()\n r.XD = f(r.t, r.X)\n return r",
"def val_TR_biv_params_to_rotor(x):\n B_val = val_vec_repr_to_bivector(x)\n R_val = val_exp(B_val)\n return R_val",
"def euler_method(t, f_y_t, y0, vin):\n \n y = np.zeros((len(y0), len(t)+1))\n dt = t[1]-t[0]\n print(y.shape)\n y[:,0] = y0\n \n\n \n for index, tn in enumerate(t):\n \n y[:,index+1] = dt * (f_y_t(tn, y[:,index], dt)) + y[:,index]\n \n return y[:,:len(t)]",
"def rv2classical(r,v):\n \n # I,J,K fundamental versors\n I = np.array([1, 0, 0])\n J = np.array([0, 1, 0])\n K = np.array([0, 0, 1])\n\n # radius and velocity versors\n i_r = r/norm(r)\n i_v = v/norm(v)\n \n mi = 398600.4418 # gravitational parameter\n E_g = (norm(v)**2)/2 - mi/norm(r) # specific orbital energy\n SMA = - mi/(2*E_g) # semi-major axis\n\n # specific angular momentum vector h and versor i_h\n h = np.cross(r.reshape(1,-1),v.reshape(1,-1)).reshape(-1,1)\n i_h = h/norm(h)\n\n # eccentricity vector e\n e = 1/mi*((norm(v)**2-mi/norm(r))*r-norm(r.transpose().dot(v))*v)\n # eccentricity value ECC\n ECC = norm(e)\n i_e = e/norm(e) # eccentricity versor i_e\n\n # ascending node vector n and versor i_n\n n = np.cross(K,h.reshape(1,-1)).reshape(-1,1)\n if norm(n)==0:\n i_n = n\n else:\n i_n = n/norm(n)\n \n # inclination INC\n INC = norm(np.arccos(K.dot(i_h)))\n if np.isnan(INC):\n INC = 0\n\n # right ascension of ascending node RAAN\n if J.dot(i_n) >= 0:\n RAAN = norm(np.arccos(I.dot(i_n)))\n else:\n RAAN = 2*np.pi-norm(np.arccos(I.dot(i_n)))\n if np.isnan(RAAN) or INC == 0:\n RAAN = 0\n \n # true anomaly TA\n if r.transpose().dot(v) >=0:\n TA = norm(np.arccos(i_r.transpose().dot(i_e)))\n else:\n TA = 2*np.pi-norm(np.arccos(i_r.transpose().dot(i_e)))\n\n # argument of perigee AOP\n if K.dot(i_e) >= 0:\n AOP = norm(np.arccos(i_n.transpose().dot(i_e)))\n else:\n AOP = 2*np.pi-norm(np.arccos(i_n.transpose().dot(i_e)))\n if np.isnan(AOP):\n AOP = 0\n\n return [SMA,ECC,INC,RAAN,AOP,TA]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns datetime.datetime for the photo collection time
|
def get_photo_datetime(image_name):
image = Image.open(image_name)
info = image._getexif()
photo_datetime = None
if info:
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
if decoded == "DateTimeOriginal":
photo_datetime = value
return photo_datetime
|
[
"def get_date_picture_taken(self):\n return self.get_tag('DateTime')",
"def datetime():\n return _get_rtc().datetime()",
"def _get_time(self) -> datetime:\n return datetime.fromtimestamp(int(self._raw_meta['time']) / 1e3)",
"def creation_datetime(self):\n return super()._to_datetime(self.creation_time)",
"def test_get_date(self):\n image_with_datetaken = {\n 'id': '39831840270', 'datetaken': '2018-04-22 16:41:11', 'ownername': 'Marian',\n 'originalformat': 'jpg', 'latitude': '0', 'longitude': '0', 'height_o': '800', 'width_o': '533',\n 'url': 'https://live.staticflickr.com/882/39831840270_ba571c8254_c.jpg'}\n expected_datetime_date_taken = datetime(2018, 4, 22, 16, 41, 11)\n date_taken = image_provider_flickr._get_date(image_with_datetaken)\n self.assertEqual(expected_datetime_date_taken, date_taken)",
"def datetime(self):\n return self.date_published.strftime('%Y-%m-%d %H:%M:%S')",
"def time():\n return datetime.datetime.now()",
"def _convert_timestamps_to_datetimes(image_meta):\n for attr in ['created_at', 'updated_at', 'deleted_at']:\n if image_meta.get(attr):\n image_meta[attr] = _parse_glance_iso8601_timestamp(\n image_meta[attr])\n return image_meta",
"def test_photo_has_date_uploaded(self):\n one_photo = Photo.objects.get(title='wedding')\n now = datetime.now().strftime('%x %X')[:2]\n self.assertEqual(one_photo.date_uploaded.strftime('%x %X')[:2], now)",
"def get_date(self):\n if self.is_image:\n dat = None\n # for subsecond prec, see doi.org/10.3189/2013JoG12J126 , sect. 2.2,2.3\n tags = [\n (36867, 37521), # (DateTimeOriginal, SubsecTimeOriginal)\n # when img taken\n (36868, 37522), # (DateTimeDigitized, SubsecTimeDigitized)\n # when img stored digitally\n (306, 37520), ] # (DateTime, SubsecTime)#when file was changed\n try:\n with Image.open(self.abs_path) as image:\n self.exif = image._getexif()\n if self.exif:\n for tag in tags:\n dat = self.exif.get(tag[0])\n # sub = exif.get(tag[1], 0)\n # PIL.PILLOW_VERSION >= 3.0 returns a tuple\n dat = dat[0] if type(dat) == tuple else dat\n # sub = sub[0] if type(sub) == tuple else sub\n if dat is not None:\n break\n if dat is None:\n t = os.path.getmtime(self.abs_path)\n return str(dt.fromtimestamp(t))[:16]\n if str(dat)[:4] != '0000':\n return str(dat)\n except:\n log_save(f\"Ошибка при получении даты изображения {self.abs_path}\")\n t = os.path.getmtime(self.abs_path)\n return str(dt.fromtimestamp(t))[:16]",
"def iget_date(self , time_index):\n long_time = EclSum.cNamespace().iget_sim_time( self , time_index )\n ct = CTime(long_time)\n return ct.datetime()",
"def uploaded_date(self):\n return dt.fromtimestamp(normal_epoch(self.uploaded_timestamp))",
"def get_creation_date_time(self):\n return self._root[\"CreationDateTime\"]",
"def atime(self):\r\n return datetime.fromtimestamp(os.path.getatime(self.path))",
"def set_date_picture_taken(self, datetime):\n return self.set_tag('DateTime', datetime)",
"def get_time(self):\n return self.event_time",
"def filetime(d):\n return d.highdatetime * 4294967296 + d.lowdatetime",
"def get_image_unixtime(ibs, gid_list):\n return ibs.db.get(IMAGE_TABLE, ('image_time_posix',), gid_list)",
"def get_photo_timestamp(filepath):\n meta = get_photo_metadata(filepath)\n\n tags = [\n 'Xmp.exif.DateTimeOriginal',\n 'Exif.Photo.DateTimeOriginal',\n ]\n\n if meta:\n return getattr(rget(meta, tags), 'value', None)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a camera make and model from the exif data of an PIL Image item.
|
def get_camera_makemodel(image_name):
image = Image.open(image_name)
info = image._getexif()
if info:
make = None
model = None
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
if decoded == "Model":
model = value.strip()
if decoded == "Make":
make = value.strip()
make_model_string = make+model
else:
make_model_string = 'null'
return make_model_string
|
[
"def camera_exif(self):\n\n tags = [\n 'ExifImageHeight',\n 'ExifImageWidth',\n 'ExifOffset',\n 'FNumber',\n 'Flash',\n 'FocalLength',\n 'ISOSpeedRatings',\n 'Make',\n 'MeteringMode',\n 'Model',\n 'SceneCaptureType',\n 'WhiteBalance',\n ]\n return self.get_tags(self.exif, tags)",
"def make_foto_info(self, item):\n descr = u'{{Photograph\\n'\n descr += u' |photographer = %s\\n' % self.get_creator(\n item.namn_fotograf)\n descr += u' |title = \\n'\n descr += u' |description = %s\\n' % item.get_description()\n descr += u' |original description = %s\\n' % (\n item.get_original_description(), )\n descr += u' |depicted people = %s\\n' % '/'.join(\n self.get_depicted_object(item, typ='person'))\n descr += u' |depicted place = %s\\n' % (\n item.get_depicted_place(self.mappings), )\n if item.avbildat_fartyg:\n linked_objects = self.get_depicted_object(item, typ='ship')\n descr += SMMInfo.get_depicted_ship_field(linked_objects)\n descr += u' |date = %s\\n' % (\n helpers.std_date_range(item.date_foto), )\n descr += u' |medium = %s\\n' % (\n item.get_materials(self.mappings), )\n descr += u' |institution = %s\\n' % item.get_institution()\n descr += u' |accession number = %s\\n' % item.get_id_link()\n descr += u' |source = %s\\n' % item.get_source()\n descr += u' |permission = {{SMM cooperation project}}\\n'\n descr += u'%s\\n' % item.get_license()\n descr += u' |other_versions = \\n'\n descr += u'}}'\n return descr",
"def parse_image_meta(meta):\n image_id = meta[:, 0]\n image_shape = meta[:, 1:5]\n window = meta[:, 5:11]\n active_class_ids = meta[:, 11:]\n return image_id, image_shape, window, active_class_ids",
"def get_exif_data(self):\n exif_data = {}\n info = self.image._getexif()\n if info:\n for tag, value in info.items():\n decoded = TAGS.get(tag, tag)\n if decoded == \"GPSInfo\":\n gps_data = {}\n for t in value:\n sub_decoded = GPSTAGS.get(t, t)\n gps_data[sub_decoded] = value[t]\n\n exif_data[decoded] = gps_data\n else:\n exif_data[decoded] = value\n self.exif_data = exif_data\n return exif_data",
"def get_exif(filename):\n metadata = pyexiv2.ImageMetadata(filename)\n metadata.read()\n return metadata",
"def get_exif(photo):\n cmd = [\n '/usr/bin/exiftool',\n '-d',\n '%Y-%m-%d %H:%M:%S',\n '-coordFormat',\n '%+.6f',\n '-json',\n ]\n cmd += TAGS\n cmd.append(photo)\n for_some_reason_a_list = json.loads(subprocess.check_output(cmd))\n return for_some_reason_a_list[0]",
"def getExifInfo(self, photo_id):\n def getRawOrClean(xmlnode):\n try:\n return xmlnode.clean[0].text\n except AttributeError:\n try:\n return xmlnode.raw[0].text\n except AttributeError:\n return ''\n\n def testResultKey(result_elem, label):\n if result_elem['label'] == label:\n return getRawOrClean(result_elem)\n else:\n return None\n\n exif_data = {'Make': '', 'Model': '', 'Orientation': '',\n 'Exposure': '', 'Software': '', 'Aperture': '',\n 'ISO Speed': '', 'Metering Mode': '', 'Flash': '',\n 'Focal Length': '', 'Color Space': ''}\n try:\n result = self.flickr.photos_getExif(photo_id=photo_id)\n except flickrapi.FlickrError:\n return exif_data\n\n try:\n for exif_elem in result.photo[0].exif:\n for label in exif_data.keys():\n data = testResultKey(exif_elem, label)\n if data and not exif_data[label]:\n exif_data[label] = data\n return exif_data\n except:\n return exif_data",
"def parse_camera_card(self, card) -> list:\n if self.checkSupported(card['camera_image']):\n entity = card['camera_image']\n else: \n entity = None\n if 'title' in card: \n title = self.truncate_name(card['title']) \n else: \n title = None\n if 'camera_view' not in card.keys(): \n camera_view = 'auto' \n else: \n camera_view = 'live'\n additionalInformation = {'camera_view' : camera_view}\n return (entity, title, additionalInformation)",
"def image_meta(self, i: int) -> Dict[str, Any]:\n return self.coco.imgs[self.img_ids[i]]",
"def getitem(self, i: int) -> Tuple[PIL.Image.Image, List[Dict]]:\n annotations: List[Dict] = self.raw_anns(i)\n img: PIL.Image.Image = self.load_orig_image(i)\n return img, annotations",
"def get_exif(xml_string):\n # process the output of exiftool -X -l\n # to extract the paramters that needs to be stored in SQL\n # and that needs to be used ulater in processing\n\n # ********************** BEST to use COMPOSITE Tags ************\n # http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/Composite.html\n # **************************************************\n\n paths=[\n '/rdf:RDF/rdf:Description/File:MIMEType/et:prt',\n '/rdf:RDF/rdf:Description/Composite:ImageSize/et:prt', # used for nikon NEF to get the largest\n '/rdf:RDF/rdf:Description/Composite:Duration/et:val', # for videos\n '/rdf:RDF/rdf:Description/IFD0:Make/et:prt|/rdf:RDF/rdf:Description/SigmaRaw:Make/et:prt',\n '/rdf:RDF/rdf:Description/IFD0:Model/et:prt|/rdf:RDF/rdf:Description/SigmaRaw:Model/et:prt',\n\n '/rdf:RDF/rdf:Description/Composite:Aperture/et:prt', \n\n # '/rdf:RDF/rdf:Description/ExifIFD:ApertureValue/et:prt',\n '/rdf:RDF/rdf:Description/ExifIFD:ExposureTime/et:val',\n '/rdf:RDF/rdf:Description/ExifIFD:FocalLength/et:val',\n '/rdf:RDF/rdf:Description/ExifIFD:Flash/et:val',\n '/rdf:RDF/rdf:Description/ExifIFD:WhiteBalance/et:val',\n '/rdf:RDF/rdf:Description/ExifIFD:ExposureMode/et:val',\n '/rdf:RDF/rdf:Description/ExifIFD:DateTimeOriginal/et:prt',\n '/rdf:RDF/rdf:Description/Composite:GPSLatitude/et:val',\n '/rdf:RDF/rdf:Description/Composite:GPSLongitude/et:val',\n\n ]\n namespaces={\n 'rdf':'http://www.w3.org/1999/02/22-rdf-syntax-ns#',\n 'et':'http://ns.exiftool.ca/1.0/',\n 'et:toolkit':'Image::ExifTool 8.60',\n 'ExifTool':'http://ns.exiftool.ca/ExifTool/1.0/',\n 'System':'http://ns.exiftool.ca/File/System/1.0/',\n 'File':'http://ns.exiftool.ca/File/1.0/',\n 'IFD0':'http://ns.exiftool.ca/EXIF/IFD0/1.0/',\n 'ExifIFD':'http://ns.exiftool.ca/EXIF/ExifIFD/1.0/',\n 'IFD1':'http://ns.exiftool.ca/EXIF/IFD1/1.0/',\n 'SubIFD1':'http://ns.exiftool.ca/EXIF/SubIFD1/1.0/',\n 'Composite':'http://ns.exiftool.ca/Composite/1.0/',\n # special ones\n 'CanonRaw':'http://ns.exiftool.ca/MakerNotes/CanonRaw/1.0/',\n 'Canon':'http://ns.exiftool.ca/MakerNotes/Canon/1.0/',\n 'SigmaRaw':'http://ns.exiftool.ca/SigmaRaw/SigmaRaw/1.0/',\n 'Composite':'http://ns.exiftool.ca/Composite/1.0/',\n\n }\n return get_new_image_params_basic(xml_string,paths,namespaces)",
"def get_photo_metadata(filepath):\n meta = pyexiv2.ImageMetadata(filepath)\n try:\n meta.read()\n except IOError:\n return False\n return meta",
"def get_camera_state(self) -> GoProResp:",
"def get_image_metadata(path_to_file: str):\n string_arr = os.path.basename(path_to_file).split(\"_\")\n row_col = string_arr[-2] # Row and column\n fied_of_view = string_arr[-1][5:9]\n action_list_number = string_arr[-1][12:15]\n z_number_3d = string_arr[-1][15:18]\n imaging_channel = string_arr[-1][18:21]\n is_mask = \"/masks/\" in path_to_file\n magnification = os.path.basename(os.path.dirname(path_to_file))\n return {\n \"row_col\": row_col,\n \"field of view\": fied_of_view,\n \"action_list_number\": action_list_number,\n \"z_number_3d\": z_number_3d,\n \"imaging_channel\": imaging_channel,\n \"path\": path_to_file,\n \"is_mask\": is_mask,\n \"magnification\": magnification\n }",
"def cameraById(self, cid):\n return self.get_camera(cid)",
"def get_exif_data(fname):\n ret = {}\n try:\n img = Image.open(fname)\n if hasattr( img, '_getexif' ):\n \t# raw data\n exifinfo = img._getexif()\n if exifinfo != None:\n for tag, value in exifinfo.items():\n decoded = TAGS.get(tag, tag)\n ret[decoded] = value\n # fo.write(ret[decoded]+\"\\n\")\n except IOError:\n print 'IOERROR ' + fname\n # fo.close() \n return ret",
"def get_exif_data(filename):\n ret = {}\n try:\n img = Image.open(filename)\n if hasattr( img, '_getexif' ):\n exifinfo = img._getexif()\n if exifinfo != None:\n for tag, value in exifinfo.items():\n decoded = TAGS.get(tag, tag)\n ret[decoded] = value\n except IOError:\n print 'IOERROR ' + filename\n img.close()\n return ret",
"def get_image_metadata(path):\n exif = {}\n try:\n image = PIL.Image.open(path)\n exif = {\n PIL.ExifTags.TAGS[k]: str(v)\n for k, v in image._getexif().items()\n if k in PIL.ExifTags.TAGS\n }\n except:\n print(\"Unable to retrieve EXIF\")\n return exif",
"def handle_exif_rotation(image: Image.Image) -> Image.Image:\n\n def get_key_by_value(dictionary: Mapping[int, str], value: str) -> int:\n for k, v in dictionary.items():\n if v == value:\n return k\n raise ValueError(f\"No such value {value}.\")\n\n try:\n orientation = get_key_by_value(ExifTags.TAGS, \"Orientation\")\n exif = dict(image.getexif().items())\n if exif[orientation] == 3:\n image = image.transpose(Image.ROTATE_180)\n elif exif[orientation] == 6:\n image = image.transpose(Image.ROTATE_270)\n elif exif[orientation] == 8:\n image = image.transpose(Image.ROTATE_90)\n return image\n except (AttributeError, KeyError, IndexError, ValueError):\n return image"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Align 1000 curated tracks based on division events
|
def make_division_adjusted_tracks():
curated_tracks = sorted(pd.read_csv(DATA_ROOT / 'curated_tracks.csv', header=None).astype(int).values.flatten())
df = pd.read_csv(DATA_ROOT / 'Spots in tracks statistics nq.csv', na_values='None').dropna()
df = df[df['TRACK_ID'].isin(curated_tracks)]
div_frames = dict.fromkeys(curated_tracks)
rows = []
for frame_num in range(200):
print('Frame', frame_num + 1)
row = []
dt = df.loc[df['FRAME'] == frame_num, ['TRACK_ID', 'POSITION_X', 'POSITION_Y', 'GFP_cmdn', 'Cy3_cmdn']]
gfp_frame_average = df.loc[df['FRAME'] == frame_num, 'GFP_cmdn'].median()
cy3_frame_average = df.loc[df['FRAME'] == frame_num, 'Cy3_cmdn'].median()
row.extend([frame_num, gfp_frame_average, cy3_frame_average])
for track in curated_tracks:
dxy = dt[dt['TRACK_ID'] == track]
if (dxy.shape[0] > 1) and (div_frames[track] is None): # div_frame is where 2 cells
div_frames[track] = frame_num
if dxy.shape[0] < 1:
time = np.nan # div_frame
x, y = np.nan, np.nan
green_median = np.nan
red_median = np.nan
green_mean = np.nan
red_mean = np.nan
else:
time = frame_num
x, y = dxy[['POSITION_X', 'POSITION_Y']].astype(int).values[0]
green_median = dxy['GFP_cmdn'].values[0]
red_median = dxy['Cy3_cmdn'].values[0]
green_mean = dxy['GFP_cmdn'].values[0]
red_mean = dxy['Cy3_cmdn'].values[0]
row.extend([time, x, y, green_median, red_median, green_mean, red_mean])
rows.append(row)
div_frames = {k: 0 if v is None else v for k, v in div_frames.items()}
columns = [('frame_num',), ('gfp_frame_average',), ('cy3_frame_average',)]
columns_ = [[(track, 'time'), (track, 'x'), (track, 'y')] +
[(track, color, fun)
for fun in ('median', 'mean')
for color in ('green', 'red')]
for track in curated_tracks]
columns.extend(tt for t in columns_ for tt in t)
dfo = pd.DataFrame.from_records(rows, columns=pd.MultiIndex.from_tuples(columns))
for t in curated_tracks:
dfo[(t, 'time')] -= div_frames[t]
dfo.to_csv(DATA_ROOT / 'intensities nq.csv', index=False)
|
[
"def makeTracks(self):\n\n # Geometry preliminaries\n w = self.geometry.width # Width of 2D domain\n h = self.geometry.height # Height of 2D domain\n self.div = 2 # Azimuthal angle subdivision, e.g. div=2 is (0,pi)\n nangle = self.nangle//self.div # Num of azimuthal angles followed\n\n self.angles = [Angle() for i in range(nangle)]\n\n for i, a in enumerate(self.angles):\n p = 2*pi/self.nangle*(0.5+i) # Desired angles\n a.nx = int(abs(w/self.delta*sin(p)) + 1) # Num of intersections along x-axis\n a.ny = int(abs(h/self.delta*cos(p)) + 1) # Num of intersections along y-axis\n a.phi = arctan(h*a.nx/(w*a.ny)) # Actual angle\n if p > pi/2:\n a.phi = pi - a.phi # Fix angles in (pi/2, pi)\n\n a.xprime = w/a.nx # Spacing between points along x-axis\n a.yprime = h/a.ny # Spacing between points along y-axis\n a.delta = a.xprime*sin(a.phi) # Actual track spacing\n\n # Determine azimuthal weight\n for i, a in enumerate(self.angles):\n if i+1 < nangle:\n x1 = 0.5*(self.angles[i+1].phi - a.phi)\n else:\n x1 = 2*pi/self.div - a.phi\n if i > 0:\n x2 = 0.5*(a.phi - self.angles[i-1].phi)\n else:\n x2 = a.phi\n a.weight = (x1 + x2)/(2*pi)*a.delta**2*self.div\n \n # Determine coordinates\n self.tracks = []\n for a in self.angles:\n xin = zeros(a.nx + a.ny)\n yin = zeros(a.nx + a.ny)\n \n xin[:a.nx] = a.xprime*(0.5 + arange(a.nx))\n yin[:a.nx] = 0\n yin[a.nx:] = a.yprime*(0.5 + arange(a.ny))\n if sin(a.phi) > 0 and cos(a.phi) > 0:\n xin[a.nx:] = 0\n elif sin(a.phi) > 0 and cos(a.phi) < 0:\n xin[a.nx:] = w\n self.tracks.append([])\n for x,y in zip(xin,yin):\n r_in = geom.Vector2D(x,y)\n r_out = self.geometry.endpoint(r_in,a.phi)\n newTrack = Track2D(r_in, r_out, a)\n self.tracks[-1].append(newTrack)",
"def sub_method_tracks_to_visual(self):\n kbh_tracks = np.zeros((self.nr_tracks, self.state_dims[0]), dtype=np.int)\n track_iterator = 0\n for track in self.tracks:\n specific_track = []\n for train in track:\n part_length = int(train[-1:])\n specific_track.extend([train]*part_length)\n # fill up with zeros until track length\n specific_track = [0]*((self.track_lengths[track_iterator] - len(specific_track))) + specific_track\n # fill up with 9's to make it a grid.\n specific_track.extend([-99]*(self.state_dims[0] - len(specific_track)))\n \n kbh_tracks[track_iterator] = specific_track\n track_iterator += 1\n return kbh_tracks",
"def get_core_track_info(self, # type: ResTech\n grid, # type: RoutingGrid\n min_tracks, # type: Tuple[int, ...]\n em_specs, # type: Dict[str, Any]\n connect_up=False, # type: bool\n ):\n # type: (...) -> Tuple[List[int], List[Union[int, float]], Tuple[int, int], Tuple[int, int]]\n track_widths = []\n track_spaces = []\n prev_width = -1\n min_w = min_h = 0\n cur_layer = self.get_bot_layer()\n for idx, min_num_tr in enumerate(min_tracks):\n # make sure that current layer can connect to next layer\n if idx < len(min_tracks) - 1 or connect_up:\n top_tr_w = grid.get_min_track_width(cur_layer + 1, unit_mode=True, **em_specs)\n top_w = grid.get_track_width(cur_layer + 1, top_tr_w, unit_mode=True)\n else:\n top_w = -1\n\n tr_p = grid.get_track_pitch(cur_layer, unit_mode=True)\n cur_width = grid.get_min_track_width(cur_layer, bot_w=prev_width, top_w=top_w,\n unit_mode=True, **em_specs)\n cur_space = grid.get_num_space_tracks(cur_layer, cur_width, half_space=True)\n track_widths.append(cur_width)\n track_spaces.append(cur_space)\n cur_ntr = min_num_tr * (cur_width + cur_space)\n if isinstance(cur_space, float):\n cur_ntr += 0.5\n min_dim = int(round(tr_p * cur_ntr))\n\n if grid.get_direction(cur_layer) == 'x':\n min_h = max(min_h, min_dim)\n else:\n min_w = max(min_w, min_dim)\n\n prev_width = grid.get_track_width(cur_layer, cur_width, unit_mode=True)\n cur_layer += 1\n\n # get block size\n wblk, hblk = grid.get_block_size(cur_layer - 1, unit_mode=True, include_private=True,\n half_blk_x=False, half_blk_y=False)\n wblk_drc, hblk_drc = self.get_block_pitch()\n wblk = lcm([wblk, wblk_drc])\n hblk = lcm([hblk, hblk_drc])\n min_w = -(-min_w // wblk) * wblk\n min_h = -(-min_h // hblk) * hblk\n\n return track_widths, track_spaces, (min_w, min_h), (wblk, hblk)",
"def plotAlignedEvents(self): # plot aligned subspaces in SubSpaces object\n for a, station in enumerate(self.ssStations):\n for ind, row in self.subspaces[station].iterrows():\n plt.figure(figsize=[10, .9 * len(row.Events)])\n # f.set_figheight(1.85 * len(row.Events))\n # plt.subplot(len(self.subspaces[station]), 1, ind + 1)\n events = row.Events\n stKeys = row.SampleTrims.keys() # sample trim keys\n for evenum, eve in enumerate(events):\n # plt.subplot(len(self.subspaces[station]), 1, evenum + 1)\n aliTD = row.AlignedTD[eve] # aligned wf for event eve\n if 'Starttime' in stKeys and 'Endtime' in stKeys:\n start = row.SampleTrims['Starttime']\n stop = row.SampleTrims['Endtime']\n aliwf = aliTD[start: stop]\n else:\n aliwf = row.AlignedTD[eve]\n plt.plot(aliwf / (2 * max(aliwf)) + 1.5 * evenum, c='k')\n plt.xlim([0, len(aliwf)])\n plt.ylim(-1, 1.5 * evenum + 1)\n plt.xticks([])\n plt.yticks([])\n plt.title('Station %s, %s, %d events' % (station, row.Name, len(events)))\n plt.show()",
"def overlay(tracks):\n main_track = tracks[0]\n for track in tracks[1:]:\n main_track = main_track.overlay(track, loop=True)\n return main_track",
"def createTimeline(trackEventList):\r\n lastTrackNo = 0\r\n# print(trackEventList)\r\n\r\n for eventFileProp in trackEventList:\r\n searchFile = True\r\n if eventFileProp != []:\r\n curTrackNo = eventFileProp[2]\r\n \r\n if curTrackNo != lastTrackNo:\r\n # new track\r\n trackName = eventFileProp[3]\r\n print (\"Track No:\", curTrackNo)\r\n lastTrackNo = curTrackNo\r\n if eventFileProp[0] == \"Audio\":\r\n mediaType = MediaType.Audio\r\n eventTrack = pyVEGAS.Project.AddAudioTrack()\r\n \r\n else: \r\n mediaType = MediaType.Video\r\n eventTrack = pyVEGAS.Project.AddVideoTrack()\r\n \r\n if eventFileProp[0] != \"Video\" and eventFileProp[0] != \"Audio\":\r\n searchFile = False\r\n eventTrack.Name = trackName \r\n addTrackEvent(eventFileProp, eventTrack, mediaType,searchFile)\r\n \r\n return",
"def track_position_analysis():\n\n found_positions = []\n found_positions_per_track = {\n 1: [], 2: [], 3: [], 4: [], 5: []\n }\n\n found_in_initial_tracks = {\n 1: 0, 2: 0, 3: 0, 4: 0, 5: 0\n }\n\n for user in Session.get_users_with_tracks():\n\n chosen_track_ids = [track_id for track_id in user.get_chosen_tracks()]\n hovered_tracks = user.get_hovered_tracks()\n\n found_tracks = 0\n index = 0\n\n found_positions_temp = []\n\n for track_id, track in hovered_tracks.items():\n\n index += 1\n\n if track_id in chosen_track_ids:\n found_tracks += 1\n\n found_positions_temp.append(index / len(hovered_tracks))\n found_positions_per_track[found_tracks].append(index / len(hovered_tracks))\n\n if index <= 5:\n found_in_initial_tracks[found_tracks] += 1\n\n if found_tracks != 5:\n continue\n\n found_positions.extend(found_positions_temp)\n\n print(f\"Found position: {statistics.mean(found_positions):.2f}, {statistics.stdev(found_positions):.2f}\")\n\n print(\"Positions per index: \", end=\"\")\n for key, position_list in found_positions_per_track.items():\n print(f\"{key}: {statistics.mean(position_list):.2f}, {statistics.stdev(position_list):.2f}; \", end=\"\")\n print(\"\")\n\n print(\"Number of items found in initial selection: \", end=\"\")\n for key, position_list in found_in_initial_tracks.items():\n print(f\"{key}: {position_list}; \", end=\"\")\n print(\"\")\n\n boxplot_data = [found_positions_per_track[1], found_positions_per_track[2], found_positions_per_track[3],\n found_positions_per_track[4], found_positions_per_track[5]]\n\n fig1, ax1 = plt.subplots()\n ax1.boxplot(boxplot_data, vert=False,\n boxprops=dict(linestyle='-', linewidth=1.5),\n medianprops=dict(linestyle='-', linewidth=2),\n whiskerprops=dict(linestyle='-', linewidth=1.5),\n capprops=dict(linestyle='-', linewidth=1.5),\n showfliers=True\n )\n ax1.set_xlabel(\"Relative Position\")\n ax1.set_ylabel(\"Number of Selected Tracks\")\n ax1.xaxis.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5)\n ax1.set_xlim((0, 1))\n\n fig1.tight_layout()\n\n plt.show()",
"def detect_placement(data):\n data.reset_index(inplace=True, drop=True)\n start, end = detect_activity(data)\n for i in range(len(start)):\n try:\n data_sub = data.loc[start[i]:end[i], :]\n data_sub.reset_index(inplace=True, drop=True)\n\n # prepare foot1 data\n pitch_foot1 = qc.quat_to_euler(\n data_sub['quat_0_w'],\n data_sub['quat_0_x'],\n data_sub['quat_0_y'],\n data_sub['quat_0_z'],\n )[100:, 1] * 180 / np.pi\n # # rotate if the placement is at too high angle creating the weird divets in pitch data\n # # TODO: This seems to work but unsure how need to make sure math works\n # if np.nanmean(pitch_foot1[0:100]) > 35:\n # pitch_foot1 = extract_geometry(quats_1, -np.pi/2)\n # elif np.nanmean(pitch_foot1[0:100]) < -35:\n # pitch_foot1 = extract_geometry(quats_1, np.pi/2)\n\n if np.nanmean(data_sub.acc_0_y_original.values[0:100]) < -4.9: # the sensor was upside down\n pitch_foot1 = -pitch_foot1\n\n # prepare foot2 data\n pitch_foot2 = qc.quat_to_euler(\n data_sub['quat_2_w'],\n data_sub['quat_2_x'],\n data_sub['quat_2_y'],\n data_sub['quat_2_z'],\n )[100:, 1] * 180 / np.pi\n # # rotate if the placement is at too high angle creating the weird divets in pitch data\n # # TODO: This seems to work but unsure how need to make sure math works\n # if np.nanmean(pitch_foot2[0:100]) > 35:\n # pitch_foot2 = extract_geometry(quats_2, -np.pi/2)\n # elif np.nanmean(pitch_foot2[0:100]) < -35:\n # pitch_foot2 = extract_geometry(quats_2, np.pi/2)\n\n if np.nanmean(data_sub.acc_2_y_original.values[0:100]) < -4.9: # the sensor was upside down\n pitch_foot2 = -pitch_foot2\n\n return [0, 1, 2] if is_foot1_left(pitch_foot1, pitch_foot2) else [2, 1, 0]\n\n except Exception as e:\n print(e)\n continue\n\n raise PlacementDetectionException('Could not detect placement using any of the movements')",
"def track(camera, upDistance01=\"string\", left=\"string\", down=\"string\", right=\"string\", upDistance02=\"string\"):\n pass",
"def analyseTracks(self):\n\t\t# Analyze front and rear of objects from the tracks and input image\n\t\tif self.parameters[\"CalculateFrontRear\"]:\n\t\t\tinputFile = self.parameters[\"InputImage\"]\n\t\t\treaders = Modules.DynamicLoader.getReaders()\n\t\t\tbxcReader = readers['BXCDataSource'][0]()\n\t\t\tbxcReader.loadFromFile(inputFile)\n\t\t\tfor track in self.tracks:\n\t\t\t\ttrack.calculateFrontAndRear(bxcReader, 10.0, 0.01)\n\n#\t\ttrack length\n#\t\tDirectional persistance = distance to starting point / path length\n#\t\tspeed\n#\t\tangle (avg of changes)\n\t\ttracks = self.tracks\n\t\trows = [[\"Track #\", \"# of tps\", u\"Length (\\u03BCm)\", u\"Avg. speed (\\u03BCm/s)\", \"Directional persistence\", \"Avg. angle\", u\"Avg. front speed (\\u03BCm/s)\", u\"Avg. rear speed (\\u03BCm)\"]]\n\t\tself.globalmin = 9999999999\n\t\tself.globalmax = 0\n\t\tself.lengths = []\n\t\tself.dps = []\n\t\tself.speeds = []\n\t\tself.angles = []\n\t\tself.tpCount = []\n\t\tdpsPerTp={}\n\t\tself.frontSpeeds = []\n\t\tself.rearSpeeds = []\n\t\tself.frontCoordinates = []\n\t\tself.rearCoordinates = []\n\t\tfor i, track in enumerate(tracks):\n\t\t\ttps = track.getNumberOfTimepoints()\n\t\t\t#if tps < self.parameters[\"MinLength\"]:\n\t\t\t#\tcontinue\n\t\t\tlength = track.getLength()\n\t\t\tspeed = track.getSpeed()\n\t\t\tdp = track.getDirectionalPersistence()\n\t\t\tif tps not in dpsPerTp:\n\t\t\t\tdpsPerTp[tps] = []\n\t\t\tdpsPerTp[tps].append(dp)\n\t\t\tfrontSpeed = track.getFrontSpeed()\n\t\t\trearSpeed = track.getRearSpeed()\n\t\t\tfrontCoords = track.getFrontCoordinates()\n\t\t\trearCoords = track.getRearCoordinates()\n\t\t\t\n\t\t\tself.lengths.append(length)\n\t\t\tself.speeds.append(speed)\n\t\t\tself.tpCount.append(tps)\n\t\t\tself.dps.append(dp)\n\t\t\tavgang,avgangstd,avgangstderr = track.getAverageAngle()\n\t\t\tself.angles.append((avgang,avgangstderr))\n\t\t\tself.frontSpeeds.append(frontSpeed)\n\t\t\tself.rearSpeeds.append(rearSpeed)\n\t\t\tself.frontCoordinates.append(frontCoords)\n\t\t\tself.rearCoordinates.append(rearCoords)\n\n\t\t\trow = [i+1, tps, \"%.3f\"%(length), \"%.6f\"%(speed), \"%.2f\"%(dp), u\"%.2f\\u00B1%.2f\"%(avgang,avgangstderr), \"%.6f\"%(frontSpeed), \"%.6f\"%(rearSpeed)]\n\t\t\tmintp, maxtp = track.getTimeRange()\n\t\t\tif mintp < self.globalmin:\n\t\t\t\tself.globalmin = mintp\n\t\t\tif maxtp > self.globalmax:\n\t\t\t\tself.globalmax = maxtp\n\t\t\tfor tp in range(0, maxtp + 1):\n\t\t\t\tif tp < mintp:\n\t\t\t\t\trow.append(\"\")\n\t\t\t\t\tcontinue\n\t\t\t\tval, pos = track.getObjectAtTime(tp)\n\t\t\t\tfPos = track.getFrontCoordinatesAtTime(tp)\n\t\t\t\trPos = track.getRearCoordinatesAtTime(tp)\n\n\t\t\t\tif val == -1:\n\t\t\t\t\trow.append(\"\")\n\t\t\t\telse:\n\t\t\t\t\tposText = \"(%d,%d,%d), (%d,%d,%d), (%d,%d,%d)\"%(round(pos[0]), round(pos[1]), round(pos[2]), round(fPos[0]), round(fPos[1]), round(fPos[2]), round(rPos[0]), round(rPos[1]), round(rPos[2]))\n\t\t\t\t\trow.append(posText)\n\t\t\trows.append(row)\n\n\t\tdpkeys = dpsPerTp.keys()\n\t\tdpkeys.sort()\n\t\tfor k in dpkeys:\n\t\t\tprint \"Avg. dp for tracks of len %d = %.3f\"%(k, lib.Math.averageValue(dpsPerTp[k]))\n\t\t\n\t\tfor i in range(0, self.globalmax+1):\n\t\t\trows[0].append(\"T%d com,front,rear\" %(i+1))\n\n\t\tself.trackListBox.setContents(rows)\n\n\t\tself.avglen = lib.Math.meanstdeverr(self.lengths)\n\t\tself.avgspeed = lib.Math.meanstdeverr(self.speeds)\n\t\tself.avgdps = lib.Math.meanstdeverr(self.dps)\n\t\tself.avgang = lib.Math.meanstdeverr([x for x,y in self.angles])\n\t\tself.avgTpCount = lib.Math.averageValue(self.tpCount)\n\t\tself.avgFrontSpeeds = lib.Math.meanstdeverr(self.frontSpeeds)\n\t\tself.avgRearSpeeds = lib.Math.meanstdeverr(self.rearSpeeds)\n\n\t\ttotalRows = [[\"Quantity\", \"Value\"]]\n\t\tavgs = [[\"# of tracks\", len(tracks)],\n\t\t\t\t[\"Avg. tps\", \"%.2f\"%self.avgTpCount],\n\t\t\t\t[u\"Avg. length (\\u03BCm)\", u\"%.3f\\u00B1%.3f\"%(self.avglen[0],self.avglen[2])],\n\t\t\t\t[u\"Avg. speed (\\u03BCm/s)\", u\"%.6f\\u00B1%.6f\"%(self.avgspeed[0],self.avgspeed[2])],\n\t\t\t\t[\"Avg. DP\", u\"%.2f\\u00B1%.2f\"%(self.avgdps[0],self.avgdps[2])],\n\t\t\t\t[\"Avg. angle\", u\"%.2f\\u00B1%.2f\"%(self.avgang[0],self.avgang[2])],\n\t\t\t\t[u\"Avg. front speed (\\u03BCm/s)\", u\"%.6f\\u00B1%.6f\"%(self.avgFrontSpeeds[0], self.avgFrontSpeeds[2])],\n\t\t\t\t[u\"Avg. rear speed (\\u03BCm/s)\", u\"%.6f\\u00B1%.6f\"%(self.avgRearSpeeds[0], self.avgRearSpeeds[2])]]\n\t\ttotalRows += avgs\n\t\tself.aggregateBox.setContents(totalRows)\n\t\tself.aggregateBox.SetColumnWidth(0, 150)\n\t\tself.aggregateBox.SetColumnWidth(1, 200)",
"def track_map(self, **kw):\n _w, _h = _figure.figaspect(kw.get(\"aspect_ratio\", 3/4))\n fig = plt.figure(\n figsize = (_w, _h),\n constrained_layout = True,\n )\n plt.get_current_fig_manager().set_window_title(\n \"{} - {} Tracks\".format(\n self.atcfid,\n self.name\n )\n )\n fig.suptitle(\"Tracks for {} - {}\".format(\n \"{} {} ({})\".format(\n self.status_highest,\n self.name.title(),\n self.atcfid\n ) if self.name != \"UNNAMED\" else \\\n \"{} - {}\".format(\n self.atcfid,\n self.status_highest\n ),\n \"{:%b %d}-{:{MD2}}, {}\".format(\n self.entry[0].time,\n self.entry[-1].time,\n self.entry[0].time.year,\n MD2 = \"%d\" \\\n if self.entry[0].time.month == self.entry[-1].time.month \\\n else \"%b %d\"\n ) if self.entry[0].time.year == self.entry[-1].time.year else \\\n \"{:%Y-%m-%d} - {:%Y-%m-%d}\".format(\n self.entry[0].time,\n self.entry[-1].time\n )\n ))\n rc = matplotlib.rcParams\n ax = plt.axes(\n facecolor = kw.get(\"ocean\", \"lightblue\")\n )\n self._hd2.fig = fig\n\n # Set the axis labels\n ax.set_ylabel(\"Latitude\")\n ax.set_xlabel(\"Longitude\")\n ax.yaxis.set_major_locator(_ticker.MultipleLocator(5))\n ax.yaxis.set_minor_locator(_ticker.MultipleLocator(1))\n ax.xaxis.set_major_locator(_ticker.MultipleLocator(5))\n ax.xaxis.set_minor_locator(_ticker.MultipleLocator(1))\n labelrot = 0 # initialization for label rotation\n for indx, entry in enumerate(self.entry):\n if entry != self.entry[-1]:\n labelangle = math.degrees(\n math.atan2(\n self.entry[indx+1].lat - entry.lat,\n self.entry[indx+1].lon - entry.lon\n )\n )\n # labelrot = 45 if 150 <= labelangle <= 180 else \\\n # (-45 if -180 <= labelangle <= -150 else 0)\n labelrot = 45 \\\n if (135 <= labelangle <= 180 or labelangle <= -170) else \\\n (-45 if -170 < labelangle <= -135 else labelangle - 90)\n halign = \"left\" if 0 <= labelrot <= 90 else \"right\"\n # print(entry.time, labelangle, labelrot, halign)\n ax.plot(\n [entry.lon, self.entry[indx + 1].lon],\n [entry.lat, self.entry[indx +1].lat],\n \"-\" if entry.status in [\"SD\", \"TD\", \"SS\", \"TS\", \"HU\"] else \":\",\n color = \"hotpink\" if entry.saffir_simpson == 5 else \\\n (\"purple\" if entry.saffir_simpson == 4 else \\\n (\"red\" if entry.saffir_simpson == 3 else \\\n (\"orange\" if entry.saffir_simpson == 2 else \\\n (\"yellow\" if entry.saffir_simpson == 1 else \\\n (\"green\" if entry.saffir_simpson == 0 else \"black\"))))),\n linewidth = kw.get(\"linewidth\", rc[\"lines.linewidth\"]),\n )\n # from ._calculations import distance_from_coordinates\n # TS Radius\n # w = _patches.Wedge(\n # entry.location_reversed,\n \n # )\n # ax.fill(\n # [\n # entry.longitude,\n # distance_from_coordinates(*entry.location, entry.tsNE, \"N\")[1],\n # distance_from_coordinates(*entry.location, entry.tsNE, \"E\")[1],\n # ],\n # [\n # entry.latitude,\n # distance_from_coordinates(*entry.location, entry.tsNE, \"N\")[0],\n # distance_from_coordinates(*entry.location, entry.tsNE, \"E\")[0],\n # ],\n # zorder=10\n # )\n\n # Status points\n ax.plot(\n entry.lon,\n entry.lat,\n \"o\",\n color = \"black\",\n markersize = kw.get(\"markersize\", 2),\n )\n if kw.get(\"labels\", True):\n if entry.record_identifier == \"L\":\n ax.annotate(\n \"L \",\n (entry.lon, entry.lat),\n fontsize = 10,\n fontweight = \"bold\",\n rotation = labelrot,\n horizontalalignment = halign\n )\n ax.annotate(\n \" {:{YMD}{HM}Z}\".format(\n entry.time,\n YMD = \"%y/%m%d \" \\\n if (entry.hour, entry.minute) == (0,0) else \"\",\n HM = \"%H%M\" if entry.minute != 0 else \"%H\"\n ),\n (entry.lon, entry.lat),\n fontsize = 6,\n rotation = labelrot,\n horizontalalignment = halign\n )\n else:\n ax.annotate(\n \" {:{YMD}%HZ}\".format(\n entry.time,\n YMD = \"%y/%m%d \" \\\n if (entry.hour, entry.minute) == (0,0) else \"\"\n ),\n (entry.lon, entry.lat),\n fontsize = 6,\n color = \"black\" if (entry.hour, entry.minute) == (0,0) \\\n else (0.5,0.5,0.5),\n rotation = labelrot,\n horizontalalignment = halign\n )\n ax.grid(True, color=(0.3, 0.3, 0.3))\n ax.grid(True, which=\"minor\", color=(0.6, 0.6, 0.6), linestyle=\":\")\n # print(ax.get_ylim(), ax.get_xlim())\n\n # Set view-limits to be equal\n maxaxis = ax.xaxis \\\n if abs(operator.sub(*ax.get_xlim())) >= \\\n abs(operator.sub(*ax.get_ylim())) else ax.yaxis\n maxaxis_diff = abs(operator.sub(*maxaxis.get_view_interval())) * (\n kw.get(\"aspect_ratio\", 3/4) \\\n if ax.xaxis == maxaxis \\\n else (1 / kw.get(\"aspect_ratio\", 3/4))\n )\n maxaxis_interval = maxaxis.get_view_interval()\n\n minaxis = ax.xaxis \\\n if abs(operator.sub(*ax.get_xlim())) < \\\n abs(operator.sub(*ax.get_ylim())) else ax.yaxis\n # minaxis_diff = abs(operator.sub(*minaxis.get_view_interval()))\n minaxis_interval = [\n (statistics.mean(minaxis.get_view_interval()) + maxaxis_diff / 2),\n (statistics.mean(minaxis.get_view_interval()) - maxaxis_diff / 2)\n ]\n\n # Draw Map\n for atlas in [\n _maps.all_land,\n _maps.usa,\n _maps.centam,\n _maps.islands,\n ]:\n for country in atlas:\n for polygon in country[1:]:\n ax.fill(\n [lon for lon, lat in polygon],\n [lat for lon, lat in polygon],\n color = kw.get(\"land\", \"lightseagreen\"),\n edgecolor = \"black\",\n linewidth = 0.5,\n )\n # for file in [\n # \"countrydb_all_land_minus_americas.json\",\n # \"countrydb_centamerica.json\",\n # \"countrydb_usafull.json\",\n # \"countrydb_islands.json\"\n # ]:\n # with open(file) as r:\n # countrydata = json.loads(r.read())\n # for country in countrydata:\n # for polygon in country[1:]:\n # ax.fill(\n # [lon for lon, lat in polygon],\n # [lat for lon, lat in polygon],\n # color = kw.get(\"land\", \"lightseagreen\"),\n # edgecolor = \"black\",\n # linewidth = 0.5,\n # )\n\n # Reset Intervals to the TC Track\n maxaxis.set_view_interval(\n min(maxaxis_interval),\n max(maxaxis_interval),\n ignore=True\n )\n minaxis.set_view_interval(\n min(minaxis_interval),\n max(minaxis_interval),\n ignore=True\n )\n\n plt.show(block=False)",
"def get_tracks(df, min_frames=20, id_col='particle', time_col='frame',\n coord_cols=('x', 'y', 'z'), scale=(1, 1, 1), w_prop=True):\n time_0 = time.time()\n id_array = df[id_col].to_numpy()\n track_count = np.bincount(id_array)\n df['track length'] = track_count[id_array]\n df_filtered = df.loc[df['track length'] >= min_frames, :]\n df_filtered = df_filtered.sort_values(by=[id_col, time_col])\n data_cols = [id_col, time_col] + list(coord_cols)\n track_data = df_filtered[data_cols].to_numpy()\n track_data[:, -3:] *= scale\n print(f'{np.sum(track_count >= min_frames)} tracks found in '\n f'{time.time() - time_0} seconds')\n if w_prop:\n return track_data, dict(df_filtered)\n else:\n return track_data",
"def build_tracks(track_1, track_2, track_3, track_4, file_name):\r\n note_file = open(file_name, 'r')\r\n note_file.readline() # Buffer to eliminate bpm data\r\n\r\n line1 = note_file.readline().strip()\r\n read_note_line(track_1, line1)\r\n line2 = note_file.readline().strip()\r\n read_note_line(track_2, line2)\r\n line3 = note_file.readline().strip()\r\n read_note_line(track_3, line3)\r\n line4 = note_file.readline().strip()\r\n read_note_line(track_4, line4)\r\n\r\n note_file.close()",
"def get_real_position(self, unit='volts'):\n with nidaqmx.Task() as fsm_task:\n fsm_task.ai_channels.add_ai_voltage_chan(self.ai_chan['x'], 'FSM x axis')\n fsm_task.ai_channels.add_ai_voltage_chan(self.ai_chan['y'], 'FSM y axis')\n target_x, target_y = fsm_task.read()\n\n curr_x = target_x\n curr_y = target_y\n self.go_to_position(self.volts_to_micron(curr_x,'x'),self.volts_to_micron(curr_y,'y'))\n \n threshold = 0.005 # volt\n \n # repeat at most 3 times\n for i in list(range(3)):\n \n with nidaqmx.Task() as fsm_task:\n fsm_task.ai_channels.add_ai_voltage_chan(self.ai_chan['x'], 'FSM x axis')\n fsm_task.ai_channels.add_ai_voltage_chan(self.ai_chan['y'], 'FSM y axis')\n curr_x2, curr_y2 = fsm_task.read()\n if max(abs(target_x - curr_x2),abs(target_y - curr_y2))< threshold:\n break\n\n curr_x += target_x - curr_x2\n curr_y += target_y - curr_y2\n self.go_to_position(self.volts_to_micron(curr_x,'x') ,self.volts_to_micron(curr_y,'y') )\n\n# self.go_to_position(self.volts_to_micron(curr_x,'x') +(curr_x-curr_x2)*self.conversion['x'] ,self.volts_to_micron(curr_y,'y')+(curr_y-curr_y2)*self.conversion['y'])\n\n return self.return_position(unit)",
"def create_grid(track_frame, masses=np.concatenate((np.arange(0.7,0.9,0.05),np.arange(0.9,2.0,0.02))),extend_below = 0,extend_above=0,with_extension=False,lum_max=None,teff_min=None):\r\n tracks = []\r\n left_border = []\r\n right_border = []\r\n num_masses = len(masses)\r\n for i,mass in enumerate(masses):\r\n mass = np.round(mass,2)\r\n track = track_frame[track_frame['mass']==mass]\r\n tracks.append(track)\r\n left_border.append((track.iloc[0]['log_Teff'],track.iloc[0]['log_L']))\r\n right_border.append((track.iloc[-1]['log_Teff'],track.iloc[-1]['log_L']))\r\n if i==0:\r\n lower_border = list(zip(track['log_Teff'], track['log_L']))\r\n elif i==num_masses-1:\r\n upper_border = list(zip(track['log_Teff'],track['log_L']))\r\n border = left_border[1:-1]+upper_border+right_border[-1:1:-1]+lower_border[-1:1:-1]\r\n borderpath = mplPath.Path(border)\r\n return pd.concat(tracks),tracks,borderpath",
"def _ape_master_track_update_fire(self):\r\n pass",
"def event_parser(self):\n dBm_range = max(self.dBm) - min(self.dBm)\n window_size = self.pps*3\n window_slide = self.pps\n curr_time = self.timeline[0]\n print(self.timeline[0], self.timeline[window_size])\n for i in range(0, len(self.dBm), window_slide):\n w_dBm = self.dBm[i:i+window_size]\n w_time = self.timeline[i:i+window_size]\n\n # Determine if this section is flat\n w_range = max(w_dBm) - min(w_dBm)\n if w_range / dBm_range < 0.2:\n continue\n\n # Event trigger for this window\n w_trigger = w_range/5\n w_interval = int(self.pps/6)\n base = (0, w_dBm[0])\n for j in range(w_interval, min(window_size, len(w_dBm[w_interval:])), w_interval):\n curr = (j, w_dBm[j])\n diff = curr[1] - base[1]\n\n if diff > w_trigger or diff < -w_trigger:\n # Got an event @ index j\n prev_time = curr_time\n curr_time = self.timeline[i+j]\n if curr_time - prev_time < 0.5:\n break\n if diff > w_trigger:\n a = \"up\"\n else:\n a = \"down\"\n self.marks.append(self.timeline[i+base[0]])\n duration, code, marks = self.decode_event(\n i+j-int(self.pps/2))\n gID = which_gesture(code)\n print(\"@\", self.timeline[i+base[0]],\n code, gestureNames[gID])\n break\n else:\n base = (j, w_dBm[j])\n print(\"==========================\")",
"def onReadTracks(self, event):\n\t\tfilename = self.parameters[\"ResultsFile\"]\n\t\tif not os.path.exists(filename):\n\t\t\treturn\n\t\tself.track = lib.Track.TrackReader()\n\t\tself.track.readFromFile(filename)\n\t\tself.tracks = self.track.getTracks(self.parameters[\"MinLength\"])\n\t\tself.trackGrid.showTracks(self.tracks)",
"def draw_measures(self, ind):\n measures = self.progression.length()\n hashes = self.time_signature\n num_lines = math.ceil(measures/8)\n bar_height = self.window_height/2/num_lines\n for i in range(num_lines): # i is the line number\n if i+1 == num_lines and measures%8 != 0:\n meas = measures%8\n else:\n meas = 8\n y = self.window_height/4 + bar_height*(i + 0.5)\n pygame.draw.line(self.display_surf, (0,0,0), (20,y-bar_height/6),(20,y+bar_height/6),6) # at start of line\n for j in range(meas): # j is the measure within that line\n meas_length = (self.window_width-40)/meas\n x = 20 + meas_length*(j+1)\n if (i*8+j == ind):\n pygame.draw.line(self.display_surf, (255,255,0), (x-meas_length+3,y),(x-3,y),50) # the highlight\n pygame.draw.line(self.display_surf, (0,0,0), (x,y-bar_height/6),(x,y+bar_height/6),6) # barlines between measures\n\n\n chord_idx = i*8+j\n # The root and tonality of the chord are displayed separately to faciltate click detection.\n chord_message = \"{} {}\".format(self.progression.chord_list[chord_idx][0], self.progression.chord_list[chord_idx][1])\n chord_text = pygame.font.Font(None, 30).render(chord_message, 1, (0,0,0))\n self.display_surf.blit(chord_text, (x-(meas_length+chord_text.get_width())/2, y-bar_height/5))\n self.progression.chord_pos[chord_idx] = (\\\n x-(meas_length+chord_text.get_width())/2,\\\n y-bar_height/5,\\\n x-(meas_length-chord_text.get_width())/2,\\\n y-bar_height/5+chord_text.get_height())\n\n # self.display_surf.blit(chord_text, (x-meas_length/2-chord_text.get_width()/2,y-bar_height/5)) # Chord labels\n\n for k in range(hashes): # k is the hash (beat) within that measure\n hashspace = meas_length/hashes\n x2 = x - meas_length+ hashspace*(k + 0.5)\n pygame.draw.line(self.display_surf, (0,0,0), (x2+10,y-bar_height/12),(x2-10,y+bar_height/12),4) # the hashes"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Add frame average and standard deviation columns for each channel Using curated tracks averages
|
def add_mean_std(df, verbose=False):
channels = ['GFP', 'Cy3', 'DAPI', 'BF']
print(f'Adding averages and standard deviations for {", ".join(channels)} channels')
curated_tracks = sorted(pd.read_csv(DATA_ROOT / 'curated_tracks.csv', header=None).astype(int).values.flatten())
df_curated_tracks = df[df['TRACK_ID'].isin(curated_tracks)]
for channel in channels:
if verbose:
print(channel)
df[channel + '_average'] = 0
df[channel + '_std'] = 0
for frame_num in range(200):
if verbose:
print('Frame', frame_num + 1)
img_average = df_curated_tracks.loc[df['FRAME'] == frame_num, channel + '_cmdn'].median()
img_std = df_curated_tracks.loc[df['FRAME'] == frame_num, channel + '_cmdn'].std()
df.loc[df['FRAME'] == frame_num, channel + '_average'] = img_average
df.loc[df['FRAME'] == frame_num, channel + '_std'] = img_std
df[channel + '_std'] = df[channel + '_std'].mean()
return df
|
[
"def frame_mean ( frame , expression , cuts = '' ) : \n return frame_moment ( frame , order = 1 , expression = expression , cuts = cuts )",
"def make_division_adjusted_tracks():\n\n curated_tracks = sorted(pd.read_csv(DATA_ROOT / 'curated_tracks.csv', header=None).astype(int).values.flatten())\n df = pd.read_csv(DATA_ROOT / 'Spots in tracks statistics nq.csv', na_values='None').dropna()\n df = df[df['TRACK_ID'].isin(curated_tracks)]\n\n div_frames = dict.fromkeys(curated_tracks)\n rows = []\n for frame_num in range(200):\n print('Frame', frame_num + 1)\n row = []\n dt = df.loc[df['FRAME'] == frame_num, ['TRACK_ID', 'POSITION_X', 'POSITION_Y', 'GFP_cmdn', 'Cy3_cmdn']]\n gfp_frame_average = df.loc[df['FRAME'] == frame_num, 'GFP_cmdn'].median()\n cy3_frame_average = df.loc[df['FRAME'] == frame_num, 'Cy3_cmdn'].median()\n row.extend([frame_num, gfp_frame_average, cy3_frame_average])\n\n for track in curated_tracks:\n dxy = dt[dt['TRACK_ID'] == track]\n if (dxy.shape[0] > 1) and (div_frames[track] is None): # div_frame is where 2 cells\n div_frames[track] = frame_num\n if dxy.shape[0] < 1:\n time = np.nan # div_frame\n x, y = np.nan, np.nan\n green_median = np.nan\n red_median = np.nan\n green_mean = np.nan\n red_mean = np.nan\n else:\n time = frame_num\n x, y = dxy[['POSITION_X', 'POSITION_Y']].astype(int).values[0]\n green_median = dxy['GFP_cmdn'].values[0]\n red_median = dxy['Cy3_cmdn'].values[0]\n green_mean = dxy['GFP_cmdn'].values[0]\n red_mean = dxy['Cy3_cmdn'].values[0]\n row.extend([time, x, y, green_median, red_median, green_mean, red_mean])\n rows.append(row)\n\n div_frames = {k: 0 if v is None else v for k, v in div_frames.items()}\n columns = [('frame_num',), ('gfp_frame_average',), ('cy3_frame_average',)]\n columns_ = [[(track, 'time'), (track, 'x'), (track, 'y')] +\n [(track, color, fun)\n for fun in ('median', 'mean')\n for color in ('green', 'red')]\n for track in curated_tracks]\n columns.extend(tt for t in columns_ for tt in t)\n dfo = pd.DataFrame.from_records(rows, columns=pd.MultiIndex.from_tuples(columns))\n for t in curated_tracks:\n dfo[(t, 'time')] -= div_frames[t]\n dfo.to_csv(DATA_ROOT / 'intensities nq.csv', index=False)",
"def video_mean(self):\r\n self.imganalysis_averageimage = np.mean(self.videostack, axis = 0)\r\n self.pw_averageimage.setImage(self.imganalysis_averageimage)\r\n self.samplingrate_cam = self.Spincamsamplingrate.value() \r\n self.cam_time_label = np.arange(self.videostack.shape[0])/self.samplingrate_cam\r\n \r\n fig = plt.figure(figsize=(8.0, 5.8))\r\n fig.suptitle(\"Mean intensity of raw video\")\r\n plt.imshow(self.imganalysis_averageimage)\r\n fig.savefig(os.path.join(self.main_directory, 'Analysis results//Mean intensity of raw video.png'), dpi=1000)\r\n plt.show()\r\n \r\n self.mean_camera_counts = []\r\n for i in range(self.videostack.shape[0]):\r\n self.mean_camera_counts.append(np.mean(self.videostack[i]))\r\n \r\n fig2, ax2 = plt.subplots(figsize=(8.0, 5.8))\r\n fig2.suptitle(\"Mean intensity trace of raw video\")\r\n plt.plot(self.cam_time_label, self.mean_camera_counts)\r\n ax2.set_xlabel('time(s)')\r\n ax2.set_ylabel('Pixel values')\r\n fig2.savefig(os.path.join(self.main_directory, 'Analysis results//Mean intensity trace of raw video.png'), dpi=1000)\r\n plt.show()",
"def treat_channel_data(index):\n ch = traces[index-1]\n freq = self.format_and_eval_string(getattr(self,\n 'freq_%d' % index))*1e6\n\n # Remove points that do not belong to a full period.\n samples_per_period = int(sampling_rate/freq)\n samples_per_trace = int(ch.shape[-1])\n if (samples_per_trace % samples_per_period) != 0:\n extra = samples_per_trace % samples_per_period\n ch = ch.T[:-extra].T\n\n if not avg_bef_demod:\n ntraces, nsamples = np.shape(ch)\n ch = ch.reshape(int(ntraces/num_loop), num_loop, nsamples)\n else:\n nsamples = np.shape(ch)[0]\n phi = np.linspace(0, 2*np.pi*freq*((nsamples-1)*2e-9), nsamples)\n cosin = np.cos(phi)\n sinus = np.sin(phi)\n # The mean value of cos^2 is 0.5 hence the factor 2 to get the\n # amplitude.\n if not avg_bef_demod:\n ch_i = 2*np.mean(ch*cosin, axis=2)\n ch_q = 2*np.mean(ch*sinus, axis=2)\n ch_i_av = ch_i.T[0] if not avg_aft_demod else np.mean(ch_i,\n axis=0)\n ch_q_av = ch_q.T[0] if not avg_aft_demod else np.mean(ch_q,\n axis=0)\n else:\n ch_i = None\n ch_q = None\n ch_i_av = 2*np.mean(ch*cosin)\n ch_q_av = 2*np.mean(ch*sinus)\n self.write_in_database('Ch%d_I' % index, ch_i_av)\n self.write_in_database('Ch%d_Q' % index, ch_q_av)\n\n if getattr(self, 'ch%d_trace' % index):\n ch_av = ch if not avg_aft_demod else np.mean(ch, axis=0)\n self.write_in_database('Ch%d_trace' % index, ch_av)\n\n return freq, cosin, sinus, ch_i, ch_q",
"def get_mean_audio_features():\n\n conn = psycopg2.connect(database='albumpitch', user='lukewoloszyn')\n cur = conn.cursor()\n\n try:\n cur.execute(\"\"\"\n SELECT p.id, p.url, avg(acousticness), avg(danceability),\n avg(energy), avg(instrumentalness), avg(key),\n avg(liveness), avg(loudness), avg(speechiness),\n avg(tempo), avg(time_signature), avg(valence)\n FROM pitchfork p\n JOIN spotify_albums sa ON p.spotify_id = sa.id\n JOIN spotify_audio_features saf ON sa.id = saf.album_id\n GROUP BY p.id;\n \"\"\")\n result = cur.fetchall()\n finally:\n conn.close()\n\n df = pd.DataFrame(result, columns=['review_id', 'url', 'acoustic', 'dance',\n 'energy', 'instrument', 'key',\n 'live', 'loud', 'speech', 'tempo',\n 'time_signature', 'valence'])\n return df",
"def add_arith_mean_cols(assay_results_df, input_dir):\n for metab in assay_results_df.index:\n resistant = assay_results_df.ix[metab, :6]\n sensitive = assay_results_df.ix[metab, 6:12]\n overall = assay_results_df.ix[metab, :12]\n\n for count, group in enumerate([resistant, sensitive, overall]):\n arith_mean = np.mean(group)\n arith_var = np.var(group)\n if count == 0:\n assay_results_df.ix[metab, 'resistant_amean'] = arith_mean\n assay_results_df.ix[metab, 'resistant_avar'] = arith_var\n if count == 1:\n assay_results_df.ix[metab, 'sensitive_amean'] = arith_mean\n assay_results_df.ix[metab, 'sensitive_avar'] = arith_var\n if count == 2:\n assay_results_df.ix[metab, 'overall_amean'] = arith_mean\n assay_results_df.ix[metab, 'overall_avar'] = arith_var\n\n assay_results_df.to_csv(input_dir + 'assay_results_extended.tsv',\n sep='\\t',\n na_rep='NaN')\n\n return assay_results_df",
"def set_df_avg(self, save=False):\n self.set_ssc()\n self.df_avg = self.clean_df(self.df)\n self.df_avg[\"ssc_sd\"] = self.df.ssc.resample(\"%ss\" % self.i).std()\n if self.dtype == \"bedframe\":\n self._calc_bursts()\n self.save_H5(avg=save)",
"def calc_mean_amplitudes(self, channels=None):\n if channels is None or channels is self.channels:\n channel_indices = np.arange(self.channels.size)\n else:\n channel_indices = self.channels.find_fixed_indices(\n channels.fixed_index)\n\n if self.channel_profiles is None or self.channel_profiles.size == 0:\n self.channel_profiles = np.ones((self.channels.size, self.nF),\n dtype=float)\n\n self.amplitudes.fill(0.0)\n self.amplitude_weights.fill(0.0)\n # Get coarse average spectrum (FFT is stored in the filter attribute)\n fnf.calc_mean_amplitudes(\n amplitudes=self.amplitudes,\n amplitude_weights=self.amplitude_weights,\n spectrum=self.data,\n windows=self.windows,\n channel_indices=channel_indices)",
"def add_mean_and_std(df):\r\n mean_series = df.mean(axis=0)\r\n std_series = df.std(axis=0)\r\n ret = df.copy()\r\n ret.loc[0] = mean_series\r\n ret.loc[-1] = std_series\r\n return ret.sort_index()",
"def frame_variance ( frame , expression , cuts = '' ) : \n return frame_central_moment ( frame , order = 2 , expression = expression , cuts = cuts )",
"def avData(self):\n\n return self.averageData(nsamples=10)",
"def featurize_samples(samples, fs):\n features = []\n for sample in samples:\n # extract each sample to each sensor time & freq data\n time_data = sample[0]\n freqs = np.abs(sample[1][0][0])\n freq_data=[np.abs(sensor_freq_power[1]) for sensor_freq_power in sample[1]]\n # average freq power for all accel axes\n # Time features\n min_vals = [np.min(col_data) for col_data in time_data]\n max_vals = [np.max(col_data) for col_data in time_data]\n mean_vals = [np.mean(col_data) for col_data in time_data]\n median_vals=[np.median(col_data) for col_data in time_data]\n std_vals = [np.std(col_data) for col_data in time_data]\n var_vals = [np.var(col_data) for col_data in time_data]\n percentile_5=[np.percentile(col_data, 5) for col_data in time_data]\n percentile_10=[np.percentile(col_data, 10) for col_data in time_data]\n percentile_25=[np.percentile(col_data, 25) for col_data in time_data]\n percentile_75=[np.percentile(col_data, 75) for col_data in time_data]\n percentile_90=[np.percentile(col_data, 90) for col_data in time_data]\n percentile_95=[np.percentile(col_data, 95) for col_data in time_data]\n time_features =[]\n time_features.extend(min_vals)\n time_features.extend(max_vals)\n time_features.extend(median_vals)\n time_features.extend(mean_vals)\n time_features.extend(std_vals)\n time_features.extend(var_vals)\n time_features.extend(percentile_5)\n time_features.extend(percentile_10)\n time_features.extend(percentile_25)\n time_features.extend(percentile_75)\n time_features.extend(percentile_90)\n time_features.extend(percentile_95)\n\n total_features = time_features\n features.append(np.array(total_features))\n return(features)",
"def stack_meanVolt(subject_file,mean_arr,stimulus_epochs,stimulus_chans,stimuli_pairs,file_id):\n stimuliName = ['S1 obj ', 'S2 match ','S2 nomatch']\n full_epochs = epochs = [5,10,20,30,60,80,100,140,160,170,180,200,240]\n full_chans = ['FPZ','AFZ','AF1','FCZ','CPZ','PZ','P1','PO2','OZ','nd']\n row_values = []\n for s_id,stimulus in enumerate(stimuliName):\n epochs = stimulus_epochs[s_id]\n channels = stimulus_chans[s_id]\n pairs = stimuli_pairs[s_id]\n for chanel in channels:\n chan_id = full_chans.index(chanel)\n for epoch in epochs :\n epoch_id = full_epochs.index(epoch)\n voltages_mean = mean_arr[s_id,epoch_id+(chan_id*len(full_epochs)),file_id].tolist()\n row_values.extend([voltages_mean])\n \n chanMat_ = create_chanMat(pd.read_pickle(subject_file),stimulus)\n for pair in pairs:\n row_values.extend([chanMat_.corr().at[pair.split('_')[0],pair.split('_')[1]]])\n \n return row_values",
"def calc_disp(self):\r\n self.add_new_channel_zeros('disp_from_strain', \"mm\")\r\n self.channels['disp_from_strain'][\"data\"] = self.channels['avg_strain'][\"data\"] * self.specimen_length\r\n\r\n self.add_new_channel_zeros('disp_from_lvdt', \"mm\")\r\n for i in range(4):\r\n self.channels[\"disp_from_lvdt\"][\"data\"] = self.channels[\"disp_from_lvdt\"][\"data\"] + \\\r\n self.channels[\"LVDT{}\".format(i + 1)][\"data\"]\r\n self.channels[\"disp_from_lvdt\"][\"data\"] = self.channels[\"disp_from_lvdt\"][\"data\"] / 4.",
"def _compute_samples_stats(self): \n samples = self.samples\n keys_yu = {'I','s','rho_ini','rho_ter','theta'}\n keys_y = {'d_ini','d_ter','T','p'}\n samples_stats= {}\n for key in {'mean','sd','5','50','95'}: samples_stats[key]={}\n for key in keys_yu: \n samples_stats['mean'][key]=np.mean(samples[key],axis=2)\n samples_stats['sd'][key]=np.sqrt(np.var(samples[key],axis=2))\n samples_stats['5'][key]=np.percentile(samples[key],q=5,axis=2)\n samples_stats['50'][key]=np.percentile(samples[key],q=50,axis=2)\n samples_stats['95'][key]=np.percentile(samples[key],q=95,axis=2)\n for key in keys_y:\n samples_stats['mean'][key]=np.mean(samples[key],axis=1)\n samples_stats['sd'][key]=np.sqrt(np.var(samples[key],axis=1))\n samples_stats['5'][key]=np.percentile(samples[key],q=5,axis=1)\n samples_stats['50'][key]=np.percentile(samples[key],q=50,axis=1)\n samples_stats['95'][key]=np.percentile(samples[key],q=95,axis=1)\n self.samples_stats = samples_stats",
"def get_spectral_values(saveFileName=csv_save, audioDirectory=data_directory):\r\n us8k = 'air_conditioner,car_horn,children_playing,dog_bark,drilling,' \\\r\n 'engine_idling,gun_shot,jackhammer,siren,street_music'.split(sep=\",\")\r\n\r\n # Create a header for the CSV file\r\n header = 'filename chroma_stft rmse spectral_centroid spectral_bandwidth rolloff zero_crossing_rate'\r\n for i in range(1, 21):\r\n header += f' mfcc{i}'\r\n header += ' label'\r\n header = header.split()\r\n print(header)\r\n\r\n # Save Spectral feature values to a CSV file\r\n on_file = 0\r\n file = open(saveFileName, 'w', newline='')\r\n with file:\r\n writer = csv.writer(file)\r\n writer.writerow(header)\r\n for i in range(1, 11):\r\n for filename in os.listdir(f'{audioDirectory}/fold{i}'):\r\n clip = f'{audioDirectory}/fold{i}/{filename}'\r\n if clip[-3:] == \"wav\":\r\n on_file = on_file + 1\r\n print(f'On File: {on_file}')\r\n y, sr = librosa.load(clip, mono=True)\r\n rms = librosa.feature.rms(y=y)\r\n chroma_stft = librosa.feature.chroma_stft(y=y, sr=sr)\r\n spec_cent = librosa.feature.spectral_centroid(y=y, sr=sr)\r\n spec_bw = librosa.feature.spectral_bandwidth(y=y, sr=sr)\r\n rolloff = librosa.feature.spectral_rolloff(y=y, sr=sr)\r\n zcr = librosa.feature.zero_crossing_rate(y)\r\n mfcc = librosa.feature.mfcc(y=y, sr=sr)\r\n to_append = f'{filename} {np.mean(chroma_stft)} {np.mean(rms)} {np.mean(spec_cent)} {np.mean(spec_bw)} {np.mean(rolloff)} {np.mean(zcr)}'\r\n for e in mfcc:\r\n to_append += f' {np.mean(e)}'\r\n to_append += f' {us8k[int(filename.split(sep=\"-\")[1])]}'\r\n file = open(saveFileName, 'a', newline='')\r\n with file:\r\n writer = csv.writer(file)\r\n writer.writerow(to_append.split())",
"def CA_Average(avg_pts,quiet='q',rate=\"Slow\",scanDIM=1):\n print(\"\\nAverage set to: \"+str(max(avg_pts,1)))\n CA_list=Detector_List(BL_ioc())\n n=len(CA_list)-1\n for i in RangeUp(0,n,1):\n ca_ioc=CA_list[i][0]\n ca_num=CA_list[i][1]\n CA_Filter (ca_ioc,ca_num,avg_pts,rate,quiet,scanDIM)",
"def process_frame_average_color(frame):\n\n rgb_avg = int(np.average(frame[:, :, 0])), int(\n np.average(frame[:, :, 1])), int(np.average(frame[:, :, 2]))\n return rgb_avg",
"def update_player_averages(self, game):\n #dictionary providing the index of players in the current pandas dataframe\n names_ord = {k: n for n, k in enumerate(self.basic_player_data['Players'])}\n\n ##Basic Player Data Processing\n #removing unecessary columns\n b_game = game.basic_player_data.drop(['FG%', '3P%', 'FT%'], axis=1)\n #players names from new data\n names = b_game.pop('Players')\n #converting time string\n MP = b_game.pop('MP')\n MP2 = []\n for time in MP:\n if len(time) < 5:\n min = int(time[:1])\n sec = int(time[2:])\n else:\n min = int(time[:2])\n sec = int(time[3:])\n MP2.append(round(min + (sec/60), 2))\n #converting numerical data\n b_game = b_game.astype(float)\n #b_game.insert(0, 'Players', names)\n b_game.insert(1, 'MP', MP2)\n\n ##Advanced Player Data Processing\n #removing unecesary columns\n a_game = game.advanced_player_data.drop(['MP'], axis=1)\n names = a_game.pop('Players')\n #converting empty values to 0\n a_game = a_game.replace('', 0, regex=True)\n #converting to numerical data\n a_game = a_game.astype(float)\n a_game.insert(1, 'MP', MP2)\n\n ##Updating Averages\n for n,p in enumerate(names):\n #case where this player has already had a game in these averages\n if p in names_ord:\n for c in b_game.columns:\n #basic averages update\n tot1 = self.basic_player_data[c][names_ord[p]] * self.players_games_played[p]\n tot1 += b_game[c][n]\n self.basic_player_data[c][names_ord[p]] = tot1 / (self.players_games_played[p]+1)\n for c in a_game.columns:\n #advanced averages update\n tot2 = self.advanced_player_data[c][names_ord[p]] * self.players_games_played[p]\n tot2 += a_game[c][n]\n self.advanced_player_data[c][names_ord[p]] = tot2 / (self.players_games_played[p]+1)\n self.players_games_played[p] += 1\n #case otherwise\n else:\n b_data = {'Players': p}\n a_data = {'Players': p}\n for c in b_game.columns:\n b_data[c] = b_game[c][n]\n for c in a_game.columns:\n a_data[c] = a_game[c][n]\n self.players_games_played[p] = 1\n #adding new players to averages dataframe\n self.basic_player_data = self.basic_player_data.append(b_data, ignore_index = True)\n self.advanced_player_data = self.advanced_player_data.append(a_data, ignore_index=True)\n\n #sorting df by minutes played\n self.basic_player_data.sort_values(by='MP', ascending=False)\n self.advanced_player_data.sort_values(by='MP', ascending = False)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Add cell intensities for GFP and Cy3 channels
|
def add_intensities(df, sz=20, n_frames=None, verbose=False):
print('Adding cell intensities for GFP and Cy3 channels')
df['GFP_nq'] = df['GFP_cmdn'] - df['GFP_average']
df['Cy3_nq'] = df['Cy3_cmdn'] - df['Cy3_average']
del df['GFP_cmdn']
del df['Cy3_cmdn']
del df['DAPI_cmdn']
del df['BF_cmdn']
df['POSITION_X'] = df['POSITION_X'].astype(int)
df['POSITION_Y'] = df['POSITION_Y'].astype(int)
return df
|
[
"def ens_CM1_C2A(ens, var = 'ALL'):\n \n# Copy data from cell centered surrogate, then average the staggered fields to the centers\n \n t0 = timer()\n \n nx = ens.nx\n ny = ens.ny\n nz = ens.nz\n \n if var.upper() == \"U\" or var.upper() == \"ALL\":\n\n fstate.xyz3d[ens.u_ptr,:,:,:,0] = 0.5*(fstate.u[:,:,:,0] + fstate.u[:,:,:,1])\n fstate.xyz3d[ens.u_ptr,:,:,:,nx-1] = 0.5*(fstate.u[:,:,:,nx-1] + fstate.u[:,:,:,nx])\n fstate.xyz3d[ens.u_ptr,:,:,:,1:nx-1] = (-fstate.u[:,:,:,0:nx-2] + 13.0*fstate.u[:,:,:,1:nx-1] \\\n -fstate.u[:,:,:,3:nx+1] + 13.0*fstate.u[:,:,:,2:nx] ) / 24.0\n \n if var.upper() == \"V\" or var.upper() == \"ALL\":\n\n fstate.xyz3d[ens.v_ptr,:,:,0,:] = 0.5*(fstate.v[:,:,0,:] + fstate.v[:,:,1,:])\n fstate.xyz3d[ens.v_ptr,:,:,ny-1,:] = 0.5*(fstate.v[:,:,ny-1,:] + fstate.v[:,:,ny,:])\n fstate.xyz3d[ens.v_ptr,:,:,1:ny-1,:] = (-fstate.v[:,:,0:ny-2,:] + 13.0*fstate.v[:,:,1:ny-1,:] \\\n -fstate.v[:,:,3:ny+1,:] + 13.0*fstate.v[:,:,2:ny,:] ) / 24.0\n \n if var.upper() == \"W\" or var.upper() == \"ALL\":\n\n fstate.xyz3d[ens.w_ptr,:,0,:,:] = 0.5*(fstate.w[:,0,:,:] + fstate.w[:,1,:,:])\n fstate.xyz3d[ens.w_ptr,:,nz-1,:,:] = 0.5*(fstate.w[:,nz-1,:,:] + fstate.w[:,nz,:,:])\n fstate.xyz3d[ens.w_ptr,:,1:nz-1,:,:] = (-fstate.w[:,0:nz-2,:,:] + 13.0*fstate.w[:,1:nz-1,:,:] \\\n -fstate.w[:,3:nz+1,:,:] + 13.0*fstate.w[:,2:nz,:,:] ) / 24.0\n \n# Create ens variables to point at A-grid velocities\n\n ens.addvariable(\"UA\", data=fstate.xyz3d[ens.u_ptr,:,:,:,:], coords = ('MEMBER,NZ,NY,NX')) \n ens.addvariable(\"VA\", data=fstate.xyz3d[ens.v_ptr,:,:,:,:], coords = ('MEMBER,NZ,NY,NX')) \n ens.addvariable(\"WA\", data=fstate.xyz3d[ens.w_ptr,:,:,:,:], coords = ('MEMBER,NZ,NY,NX')) \n \n if time_all: print(\"\\n Wallclock time to convert from C to A grid:\", round(timer() - t0, 3), \" sec\")\n\n return",
"def __init__(self, input_size, input_dim, hidden_dim, kernel_size, bias):\n super(ConvGRUCell, self).__init__()\n self.height, self.width = input_size\n self.padding = kernel_size[0] // 2, kernel_size[1] // 2\n self.hidden_dim = hidden_dim\n self.bias = bias\n \n self.conv_gates = nn.Conv2d(in_channels=input_dim + hidden_dim,\n out_channels=2*self.hidden_dim, # for update_gate,reset_gate respectively\n kernel_size=kernel_size,\n padding=self.padding,\n bias=self.bias)\n\n self.conv_can = nn.Conv2d(in_channels=input_dim+hidden_dim,\n out_channels=self.hidden_dim, # for candidate neural memory\n kernel_size=kernel_size,\n padding=self.padding,\n bias=self.bias)",
"def add_lgn_cell(img_size):\n #x, y, sc, ss, rc, rs\n xx, yy = np.meshgrid(img_size,img_size)\n grid_xx = tf.constant(xx)\n grid_yy = tf.constant(yy)\n\n \n pi = tf.constant(np.pi)\n pos = ((grid_xx - x)**2 + (self.grid_yy - y)**2)\n center = tf.exp(-pos/2/sc) / (2*(sc)*pi)\n surround = tf.exp(-pos/2/(sc + ss)) / (2*(sc + ss)*pi)\n \n weight_vec = tf.reshape((rc*(center)) - (rs*(surround)), [-1, 1])\n return tf.matmul(self.images, weight_vec)",
"def channels_per_node():\n return 1",
"def __init__(self, cl_context, num_boxes_h, allow_anim=True):\n super(OpenCLGridNoise3D, self).__init__(_NUM_CHANNELS, _NUM_SPACE_DIMS)\n \n self.cl_context = cl_context\n self.num_boxes_h = num_boxes_h\n self.box_width = 1 / num_boxes_h\n self.allow_anim = allow_anim\n \n # Precompile the OpenCL programs.\n with open('opencl/gridNoise3D.cl', 'r', encoding='utf-8') as program_file:\n self.cl_program_noise = pyopencl.Program(self.cl_context, program_file.read()) \\\n .build(options=['-I', 'opencl/include/'])\n \n self.seed = random.randrange(0, 2 ** 32)",
"def _assign_chs(self, n_nom_channels=0):\n if n_nom_channels == 0:\n n_nom_channels = self.n_channels\n channels_per_subgrid_cell = []\n channels_per_subgrid_cell_accu = [0]\n channels_per_cell = n_nom_channels / 7\n ceil = math.ceil(channels_per_cell)\n floor = math.floor(channels_per_cell)\n tot = 0\n for i in range(7):\n if tot + ceil + (6 - i) * floor > n_nom_channels:\n tot += ceil\n cell_channels = ceil\n else:\n tot += floor\n cell_channels = floor\n channels_per_subgrid_cell.append(cell_channels)\n channels_per_subgrid_cell_accu.append(tot)\n for r in range(self.rows):\n for c in range(self.cols):\n label = self.labels[r][c]\n lo = channels_per_subgrid_cell_accu[label]\n hi = channels_per_subgrid_cell_accu[label + 1]\n self.nom_chs_mask[r][c][lo:hi] = 1\n self.nom_chs[r][c] = np.arange(lo, hi)",
"def copy_values_to_grid(self):\n self.grid.at_node[\"flow__depth\"] = self.h\n self.grid.at_link[\"flow__horizontal_velocity\"] = self.u\n self.grid.at_link[\"flow__vertical_velocity\"] = self.v\n self.C_i[:, :] = self.C_init\n all_wet_nodes = np.where(self.h > self.h_w)\n self.C_i[:, all_wet_nodes] = self.Ch_i[:,\n all_wet_nodes] / self.h[all_wet_nodes]\n for i in range(self.number_gclass):\n self.grid.at_node[\"flow__sediment_concentration_\" +\n str(i)] = self.C_i[i, :]\n self.grid.at_node[\n \"flow_sediment_volume__horizontal_gradient_\" + str(i)\n ] = self.dChdx_i[i, :]\n self.grid.at_node[\n \"flow_sediment_volume__vertical_gradient_\" + str(i)\n ] = self.dChdy_i[i, :]\n self.grid.at_node[\n \"bed__sediment_volume_per_unit_area_\" + str(i)\n ] = self.bed_thick_i[i, :]\n self.grid.at_node[\n \"bed__active_layer_fraction_\" + str(i)\n ] = self.bed_active_layer[i, :]\n self.C[:] = np.sum(self.C_i, axis=0)\n self.grid.at_node[\"flow__sediment_concentration_total\"] = self.C\n self.grid.at_node[\"topographic__elevation\"] = self.eta\n self.grid.at_node[\"bed__thickness\"] = self.bed_thick\n self.grid.at_node[\"flow__surface_elevation\"] = self.eta + self.h\n self.grid.at_node[\"flow__horizontal_velocity_at_node\"] = self.u_node\n self.grid.at_node[\"flow__vertical_velocity_at_node\"] = self.v_node\n self.grid.at_link[\"flow_horizontal_velocity__horizontal_gradient\"] = self.dudx\n self.grid.at_link[\"flow_horizontal_velocity__vertical_gradient\"] = self.dudy\n self.grid.at_link[\"flow_vertical_velocity__horizontal_gradient\"] = self.dvdx\n self.grid.at_link[\"flow_vertical_velocity__vertical_gradient\"] = self.dvdy\n self.grid.at_node[\"flow_depth__horizontal_gradient\"] = self.dhdx\n self.grid.at_node[\"flow_depth__vertical_gradient\"] = self.dhdy\n if self.model == \"4eq\":\n self.grid.at_link[\"flow__TKE\"] = self.Kh\n self.grid.at_link[\"flow_TKE__horizontal_gradient\"] = self.dKhdx\n self.grid.at_link[\"flow_TKE__vertical_gradient\"] = self.dKhdy",
"def get_Hu():\n \n ue = np.zeros((nx+1,ny)) \n uw = np.zeros((nx+1,ny))\n un = np.zeros((nx+1,ny))\n us = np.zeros((nx+1,ny))\n vn = np.zeros((nx+1,ny))\n vs = np.zeros((nx+1,ny))\n τxxe = np.zeros((nx+1,ny))\n τxxw = np.zeros((nx+1,ny))\n τxyn = np.zeros((nx+1,ny))\n τxys = np.zeros((nx+1,ny))\n Hu = np.zeros((nx+1,ny))\n \n i = np.arange(1,nx) # u-cell centers in domain interior\n \n ue[i,:] = (u[i+1,:] + u[i,:])/2\n uw[i,:] = (u[i,:] + u[i-1,:])/2\n \n j = np.arange(0,ny-1)\n un[IJ(i,j)] = (u[IJ(i,j+1)] + u[IJ(i,j)])/2\n un[i,ny-1] = ubc_t\n j = np.arange(1,ny)\n us[IJ(i,j)] = (u[IJ(i,j)] + u[IJ(i,j-1)])/2\n us[i,0] = ubc_b\n \n j = np.arange(0,ny)\n vn[IJ(i,j)] = (v[IJ(i-1,j+1)]+v[IJ(i,j+1)])/2\n vs[IJ(i,j)] = (v[IJ(i-1,j)] +v[IJ(i,j)]) /2\n \n τxxe[i,:] = -2*ν*(u[i+1,:] - u[i,:]) /Δx\n τxxw[i,:] = -2*ν*(u[i,:] - u[i-1,:])/Δx\n \n j = np.arange(0,ny-1)\n τxyn[IJ(i,j)] = -ν*(u[IJ(i,j+1)]-u[IJ(i,j)])/Δy - ν*(v[IJ(i,j+1)]-v[IJ(i-1,j+1)])/Δx\n τxyn[i,ny-1] = -ν*(ubc_t-u[i,ny-1])/(Δy/2) - ν*(v[i,ny]-v[i-1,ny])/Δx \n \n j = np.arange(1,ny)\n τxys[IJ(i,j)] = -ν*(u[IJ(i,j)]-u[IJ(i,j-1)])/Δy - ν*(v[IJ(i,j)]-v[IJ(i-1,j)])/Δx\n τxys[i,0] = -ν*(u[i,0]-ubc_b)/(Δy/2) - ν*(v[i,0]-v[i-1,0])/Δx\n \n Hu[i,:] = -((ue[i,:]*ue[i,:] - uw[i,:]*uw[i,:])/Δx + (un[i,:]*vn[i,:] - us[i,:]*vs[i,:])/Δy) \\\n -((τxxe[i,:] - τxxw[i,:])/Δx + (τxyn[i,:] - τxys[i,:])/Δy)\n \n return Hu",
"def cTensorMultiply():\n code_text = \"\"\"\n KERNEL void cTensorMultiply(\n const unsigned int batch, // batch \n const unsigned int dim, // dimensions\n GLOBAL_MEM const unsigned int *Nd, // In batch mode, Nd*batch but is calculated outside the kernel\n GLOBAL_MEM const unsigned int *Nd_elements, // Number of elements to move along the dimension = strides / itemsize\n GLOBAL_MEM const float *invNd_elements, // float: inverse of the Nd_elements, which aims for fast division // batch mode: Nd_elements / batch\n GLOBAL_MEM const float *vec, // Real, vector, length sum Nd[dimid]\n GLOBAL_MEM float2 *outdata, \n const unsigned int div) \n {\n const unsigned int gid=get_global_id(0); \n const unsigned int pid = (float)gid / (float)batch;\n // const unsigned int bat = gid - pid * batch;\n \n unsigned int group;\n unsigned int Nd_indx_shift = 0;\n float mul = 1.0; \n unsigned int res = pid; \n \n for (unsigned int dimid = 0; dimid < dim; dimid ++){\n group = (float)res * invNd_elements[dimid]; // The index along the axis\n res = res - group * Nd_elements[dimid];\n \n const unsigned int N = Nd[dimid]; \n \n mul = mul * vec[group + Nd_indx_shift];\n \n Nd_indx_shift = Nd_indx_shift + N;\n }\n \n if (div == 1){\n // for (unsigned int bat = 0; bat < batch; bat ++ )\n // {\n float2 tmp = outdata[gid];\n tmp.x = tmp.x / mul;\n tmp.y = tmp.y / mul;\n outdata[gid] = tmp;\n // };\n };\n if (div == 0){\n // for (unsigned int bat = 0; bat < batch; bat ++ )\n // {\n float2 tmp = outdata[gid];\n tmp.x = tmp.x * mul;\n tmp.y = tmp.y * mul;\n outdata[gid] = tmp;\n // };\n }; \n \n };\n \"\"\" \n return code_text",
"def _updateCost(self):\n assert len(self.multiplicity) > 0\n\n self._numNodes = len(self.multiplicity)\n\n discreteCost = 1\n sharedCost = 2\n if self.chipCounter != self._postChipCounter:\n discreteCost += 1\n sharedCost += 1\n numDiscrete = np.sum(self.multiplicity > 1)\n hasShared = np.any(self.multiplicity == 1)\n self._numAxons = numDiscrete + hasShared\n self._numAxonCfgEntries = \\\n numDiscrete * discreteCost + hasShared * sharedCost\n self._cost = self._numAxonCfgEntries / self._maxNumAxonCfgEntries",
"def custom_grid():\n\n return np.arange(1, 82, dtype=np.int32).reshape((9, 9))",
"def cSpmvh():\n \n R=\"\"\"\n \n KERNEL void pELL_spmvh_mCoil(\n const unsigned int Reps, // number of coils\n const unsigned int nRow, // number of rows\n const unsigned int prodJd, // product of Jd\n const unsigned int sumJd, // sum of Jd\n const unsigned int dim, // dimensionality\n GLOBAL_MEM const unsigned int *Jd, // Jd\n // GLOBAL_MEM const unsigned int *curr_sumJd, // \n GLOBAL_MEM const unsigned int *meshindex, // meshindex, prodJd * dim\n GLOBAL_MEM const unsigned int *kindx, // unmixed column indexes of all dimensions\n GLOBAL_MEM const float2 *udata, // interpolation data before Kronecker product\n GLOBAL_MEM float2 *k, \n //GLOBAL_MEM float2 *res,\n GLOBAL_MEM const float2 *input) // y\n { \n const unsigned int t = get_local_id(0);\n const unsigned int vecWidth=${LL};\n // Thread ID within wavefront\n const unsigned int id = t & (vecWidth-1);\n \n // One row per wavefront\n unsigned int vecsPerBlock=get_local_size(0)/vecWidth;\n unsigned int myRow=(get_group_id(0)*vecsPerBlock) + (t/ vecWidth); // the myRow-th non-Cartesian sample\n unsigned int m = myRow / Reps;\n unsigned int nc = myRow - m * Reps;\n \n float2 zero;\n zero.x = 0.0;\n zero.y = 0.0;\n \n \n if (myRow < nRow * Reps)\n {\n const unsigned int vecStart = 0; \n const unsigned int vecEnd =prodJd; \n float2 u=zero;\n \n for (unsigned int j = vecStart+id; j<vecEnd; j += vecWidth)\n { \n // now doing the first dimension\n unsigned int index_shift = m * sumJd;\n // unsigned int tmp_sumJd = 0;\n unsigned int J = Jd[0];\n unsigned int index = index_shift + meshindex[dim*j + 0];\n unsigned int col = kindx[index] ;\n float2 spdata = udata[index];\n index_shift += J; \n for (unsigned int dimid = 1; dimid < dim; dimid ++ )\n {\n J = Jd[dimid];\n index = index_shift + meshindex[dim*j + dimid]; // the index of the partial ELL arrays *kindx and *udata\n col += kindx[index];// + 1 ; // the column index of the current j\n float tmp_x = spdata.x;\n float2 tmp_udata = udata[index];\n spdata.x = tmp_x * tmp_udata.x - spdata.y * tmp_udata.y; // the spdata of the current j\n spdata.y = tmp_x * tmp_udata.y + spdata.y * tmp_udata.x; \n index_shift += J;\n }; // Iterate over dimensions 1 -> Nd - 1\n \n float2 ydata=input[myRow]; // kout[col];\n u.x = spdata.x*ydata.x + spdata.y*ydata.y;\n u.y = - spdata.y*ydata.x + spdata.x*ydata.y;\n \n atomic_add_float2(k + col*Reps + nc, u);//, res + col*Reps + nc);\n LOCAL_BARRIER;\n // atomic_add_float2(k + col*Reps + nc, u, res + col*Reps + nc);\n }; // Iterate for (unsigned int j = 0; j < prodJd; j ++)\n }; // if (m < nRow)\n \n }; // End of xELL_spmvh_mCoil \n \n \n KERNEL void pELL_spmvh_mCoil_new(\n const unsigned int Reps, // number of coils\n const unsigned int nRow, // number of rows\n const unsigned int prodJd, // product of Jd\n const unsigned int sumJd, // sum of Jd\n const unsigned int dim, // dimensionality\n GLOBAL_MEM const unsigned int *Jd, // Jd\n // GLOBAL_MEM const unsigned int *curr_sumJd, // \n GLOBAL_MEM const unsigned int *meshindex, // meshindex, prodJd * dim\n GLOBAL_MEM const unsigned int *kindx, // unmixed column indexes of all dimensions\n GLOBAL_MEM const float2 *udata, // interpolation data before Kronecker product\n GLOBAL_MEM float2 *k, \n GLOBAL_MEM float2 *res,\n GLOBAL_MEM const float2 *input) // y\n {\n unsigned int myRow0= get_global_id(0);\n unsigned int myRow= myRow0/(float)Reps;\n unsigned int nc = myRow0 - myRow*Reps;\n float2 zero;\n zero.x = 0.0;\n zero.y = 0.0;\n if (myRow < nRow){ \n for (unsigned int j = 0; j < prodJd; j ++){\n float2 u = zero;\n\n // now doing the first dimension\n unsigned int index_shift = myRow * sumJd;\n // unsigned int tmp_sumJd = 0;\n unsigned int J = Jd[0];\n unsigned int index = index_shift + meshindex[dim*j + 0];\n unsigned int col = kindx[index] ;\n float2 spdata = udata[index];\n index_shift += J; \n for (unsigned int dimid = 1; dimid < dim; dimid ++ ){\n J = Jd[dimid];\n index = index_shift + meshindex[dim*j + dimid]; // the index of the partial ELL arrays *kindx and *udata\n col += kindx[index];// + 1 ; // the column index of the current j\n float tmp_x = spdata.x;\n float2 tmp_udata = udata[index];\n spdata.x = tmp_x * tmp_udata.x - spdata.y * tmp_udata.y; // the spdata of the current j\n spdata.y = tmp_x * tmp_udata.y + spdata.y * tmp_udata.x; \n index_shift += J;\n }; // Iterate over dimensions 1 -> Nd - 1\n \n float2 ydata=input[myRow*Reps + nc]; // kout[col];\n u.x = spdata.x*ydata.x + spdata.y*ydata.y;\n u.y = - spdata.y*ydata.x + spdata.x*ydata.y;\n atomic_add_float2(k + col*Reps + nc, u);\n \n }; // Iterate for (unsigned int j = 0; j < prodJd; j ++)\n \n }; // if (m < nRow)\n \n }; // End of pELL_spmvh_mCoil \n \"\"\"\n return R",
"def _apply_global_dimmer(self, channels):\n # print(\"\")\n global_dimmer_16bit = self.config['system']['global_dimmer']\n # print(\"global_dimmer_16bit\", global_dimmer_16bit)\n # 65535 = 255\n # gd = gd8\n # global_dimmer_8bit = 255 * global_dimmer_16bit / 65535\n # print(\"global_dimmer_8bit\", global_dimmer_8bit)\n global_dimmer_norm = 1.0 * global_dimmer_16bit / 65535\n # print(\"global_dimmer_norm\", global_dimmer_norm)\n # print(\"\")\n # print(channels)\n for i, ch in enumerate(channels):\n # channels[i] = ch * global_dimmer_8bit\n channels[i] = int(ch * global_dimmer_norm)\n # print(channels)\n return channels",
"def test_disp100(nq, ne):\n\n uc = UnitCell( )\n at1=Atom(symbol='Fe', mass=57) ; pos1=(0.0,0.0,0.0)\n at2=Atom(symbol='Al') ; pos2=(0.5,0.5,0.5)\n site1 = Site(pos1, at1)\n site2 = Site(pos2, at2)\n uc.addAtom( at1, pos1, \"Fe1\" )\n uc.addAtom( at2, pos2, \"Al1\" )\n print uc\n\n kptlist = uc.getMonkhorstPackGrid((20,20,20)).reshape(8000,3)\n sqecalc = AbInitio.kernelGenerator.SqeCalculator.SqeCalculator(uc, kpoints=kptlist)\n\n sqecalc.readIDFeigenvectors(filename='pols_FeAl222.idf')\n sqecalc.readEigenvaluesFromIDFomega2(filename='omega2_FeAl222.idf')\n\n sqecalc._DebyeWallerCalculator._energies = sqecalc._energies\n sqecalc._DebyeWallerCalculator._polvecs = sqecalc._polvecs\n\n estart = 0.0\n deltae = 50.0 / ne\n sqecalc._etransferTol = deltae\n\n deltaqx = 3.0 / nq\n sqecalc._qtransferTolRadius = deltaqx\n qstart = numpy.array([0.0, 0.0, 0.0])\n deltaq = numpy.array([deltaqx, 0.0, 0.0])\n\n sqe = numpy.zeros((nq,ne), dtype='float')\n\n for iq in range(nq):\n for ie in range(ne):\n qtransfer = qstart + iq * deltaq\n etransfer = estart + ie * deltae\n sqe[iq,ie] = sqecalc.calcSqeCohCreateAllmodes(qtransfer, etransfer)\n print iq, ie, sqe[iq,ie]\n\n pylab.imshow(sqe)\n pylab.show()\n end = raw_input()\n return",
"def _compute_out_channels(out_channels, activation):\n\n if activation == Activation.GLU:\n return out_channels * 2\n return out_channels",
"def change_cell(nsys0,X0):\n if X0.dtype != int:\n raise TypeError('X0.dtype is wrong.')\n if X0.shape != (3,3):\n raise TypeError('X0 has wrong shape.')\n X = np.array(X0,dtype=float)\n ncp = np.zeros(3,dtype=int)\n ncp[0] = X0[0,:].max()\n ncp[1] = X0[1,:].max()\n ncp[2] = X0[2,:].max()\n\n nsys = replicate(nsys0,ncp[0],ncp[1],ncp[2])\n hmat0 = nsys0.get_hmat()\n hmat = np.dot(hmat0,X0)\n print(nsys)\n sposs = nsys.get_scaled_positions()\n spnews = np.array(sposs)\n nsys.set_hmat(hmat)\n #...Since hmat0 is that of extended system,\n #...X should correspond to it.\n X[0,:] /= ncp[0]\n X[1,:] /= ncp[1]\n X[2,:] /= ncp[2]\n Xi = np.linalg.inv(X)\n for i,p in enumerate(sposs):\n pnew = np.dot(Xi,p)\n for l in range(3):\n pnew[l] = nappy.util.pbc(pnew[l])\n spnews[i,:] = pnew[:]\n nsys.set_scaled_positions(spnews)\n return nsys",
"def powerdens_core(pcoree,pcorei,ncore,aphpath='.',lyni=[0.05,0.05],nwomin=1e15,lyte=[0.05,0.05],lyti=[0.05,0.05],nwimin=1e16,isplflxl=1,ngbackg=1e10,cngflox=1,cngfloy=1,\n isngcore=1,albedoc=0.5,isupcore=1,xstscal=0.02,ngscal=1,xgscal=0.01,cfloxiplt=1\n\n):\n # Set path to rate data\n aph.aphdir=aphpath # Hydrogen rates\n\n # Initalize arrays\n com.nhsp=2 # N.o. hydrogenic species\n com.ngsp=1 # N.o. hydrogenic gaseous species\n\n\n ''' Charged species setup '''\n bbb.minu[0]=2 # H+ mass in AMU\n bbb.ziin[0]=1 # H+\n bbb.znuclin[0]=1 # H+ nuclear charge \n\n # Scale factors\n #- - - - - - - - - - -\n bbb.cngtgx=0. # X-flux coefficient for gaseous component i in ion energy equation\n bbb.cngtgy=0. # Y-flux coefficient for gaseous component i in ion energy equation\n\n corepower(pcoree,pcorei) # Set power BC for ions at core bound\n coremomentum(isupcore) # Set neumann momentum BC for plasma at core bound\n coredens(ncore) # Set dirichlet BC for plasma at core bound\n \n wall_BC_scalelength(lyni=lyni,nwomin=nwomin,lyte=lyte,lyti=lyti,nwimin=nwimin,isplflxl=isplflxl) # Set gradient scal-length wall BCs\n\n ''' Atom model setup '''\n inertial_atoms( ngbackg=ngbackg,cngflox=cngflox,cngfloy=cngfloy,isngcore=isngcore,albedoc=albedoc, \n isupcore=isupcore,xstscal=xstscal,ngscal=ngscal,xgscal=xgscal,cfloxiplt=cfloxiplt) # Activate intertial atoms \n\n ''' Rate model setup '''\n Stotler_loglog() # Use Stotler log-log rate file\n\n ''' Allocate plasma arrays '''\n if bbb.pyrestart_file[0].decode('UTF-8')!='read':\n bbb.allocate()",
"def scalar_mult_kernel(d_out, d_u, d_c):\n i = cuda.grid(1)\n n = d_u.shape[0]\n if i >= n:\n return \n d_out[i] = d_u[i] * d_c",
"def _update_cell_dim(self, density=1.0):\n\n # Calculate masses of each ingredient in g\n masses = [\n ingredient.n_mol * ingredient.mass / N_A\n for ingredient in self.formulation.ingredients\n ]\n\n # Calculate cell volume in nm3\n cell_volume = 1E21 * sum(masses) / density\n\n # Calculate each dimension in nm, assuming a square cell geometry\n self.cell_dim = [cell_volume ** (1/3) for _ in range(3)]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
insert Employee into the DB
|
def insert_employee(self, employee_id, first_name, last_name, min_shifts):
if not self.check_for_db(): # if DB doesn't exist create it
self.create_db()
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
insret_query = """INSERT INTO Employee
VALUES ({}, {},{},{});""".format(employee_id, first_name, last_name, min_shifts)
crsr.execute(insret_query)
connection.commit()
connection.close()
|
[
"def insertNewEmployee(self):\n try:\n self.takeUserInput()\n self.insertNewEmployeeinDB(self.empId,self.empName,self.jobName,self.managerId,self.hireDate,self.salary,self.commission,self.deptId)\n except Exception as e:\n print(\"Error inserting New Employee,\", e)",
"def insert (self, anObject):\n lock = self.server.acquireLock ()\n self.server.sql (\"\"\"insert Department (\n deptCode,\n name,\n managerID)\n values (%s, %s, %s)\"\"\" % ( \\\n self.sqlInt (anObject.deptCode),\n self.sqlString (anObject.name),\n self.sqlInt (anObject.managerID),\n ))\n anObject.departmentID = self.server.getIdentity ()\n anObject.setInDatabase ()",
"def create_employee(self, new_employee):\n # TO-DO",
"def register():\n emp_data = request.get_json()\n db = get_db()\n db.execute(\"insert into employee\" +\n \"(fname, lname, phoneno, emailid, sal, bdate, jdate)\" +\n \"values (?, ?, ?, ?, ?, ?, ?)\", [emp_data[\"fname\"],\n emp_data[\"lname\"],\n emp_data[\"phoneno\"],\n emp_data[\"emailid\"],\n emp_data[\"salary\"],\n emp_data[\"bdate\"],\n emp_data[\"jdate\"]])\n db.commit()\n response = jsonify({\"response\": \"Registration successful!\"})\n return response",
"def insert_employee_times(self,employee_id,date, start_time=\"NULL\", end_time=\"NULL\"):\n try:\n if not self.employee_time_exists(employee_id, date):\n if not self.check_for_db(): # if DB doesn't exist create it\n self.create_db()\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n start_time=\"NULL\"\n query = \"\"\"INSERT INTO Employee_Times VALUES ({},{},{},{})\"\"\".format(employee_id, date, start_time, end_time)\n\n crsr.execute(query)\n connection.commit()\n connection.close()\n return True\n return False\n\n except IOError:\n print(\" DBError\")",
"def test_add_team_employee_employee(self):\n response = self.client.post(\"/team-employees/\", {\"employee_id\": 1, \"team_id\": 2})\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)",
"def test_create_employee(self):\n\n employee = {\n 'address': 'Kungsgatan 2000',\n 'affiliationType': 'Standard',\n 'bankAccountNumber': '7180982',\n 'bankRegistrationNumber': '5479',\n 'city': 'Copenhagen',\n 'companyID': self.company_id,\n 'email': \"{}@example.com\".format(randomword(6)),\n 'language': 'da',\n 'name': \"{}\".format(randomword(6)),\n 'nationalID': generate_cpr(),\n 'phoneNumber': '4542422325',\n 'postalCode': '2000',\n 'sendLogin': False\n }\n employee = self.api.create_employee(employee=employee)\n self.assertIsNotNone(employee.data)\n self.employee_id = employee.data.id",
"def insert(self, sql):",
"def create(self, validated_data):\n return Employee.objects.create(**validated_data)",
"def add_employee(self, employee):\n\t\tself.employees.add(employee)",
"def save(self, operator):\n self.connect()\n try:\n sql = \"\"\"insert into {0} values ({1},\"{2}\",\"{3}\",\"{4}\",\"{5}\",\"{6}\")\"\"\".format(\n self.tablename, operator.enrolmentNumber, operator.firstName,\n operator.lastName, operator.dob, operator.faculty, operator.email\n )\n\n self.cursor.execute(sql)\n except Exception as err:\n print(err)\n return str(err)\n finally:\n self.disconnect()\n\n return None",
"def insert_in_db(the_json, success):\n DB.session.add(email_record_from_json(the_json, success))\n DB.session.commit()",
"def create_sample_employee(user, company, is_admin=False):\n return models.Employee.objects.create(employee=user,\n company=company,\n is_admin=is_admin)",
"def upload_employee_data_from_file(self):\n os.chdir('../dbms')\n conn = db.create_connection(\"Employee401K.db\") # Create DB Connection\n\n with open('../data/EmployeeFile.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n for row in csv_reader:\n if line_count == 0:\n line_count += 1\n continue\n try:\n employee_object = emp.Employee(row[0], row[1], row[2], row[3], row[4], row[5], row[6])\n except ValueError as err:\n self.write_errors_to_file(row[0], row[1], row[2], row[3], row[4], row[5], row[6], err)\n else:\n employee_detail = (employee_object.ssn, employee_object.first_name, employee_object.last_name,\n employee_object.age, employee_object.start_dt, employee_object.contrib_pct,\n employee_object.acct_balance)\n db.create_employee(conn, employee_detail)\n conn.commit()",
"def insert(cls, env, record):\n with env.db_transaction as db:\n\n cursor = db.cursor()\n sqlString = \"\"\"INSERT INTO ticket_template_store\n (tt_time,tt_user,tt_name,tt_field,tt_value)\n VALUES (%s,%s,%s,%s,%s)\"\"\"\n cursor.execute(sqlString, record)",
"def insert_row(self, data):\n print(\"Inserting row to database\")\n self.cursor.executemany(self.insert_query, data)\n self.connection.commit()",
"def save(self):\r\n person_db = PersonDBConnection()\r\n person_id = person_db.insert_person(\r\n arriv_time=self.arrival_time,\r\n sex=self.sex,\r\n status=self.status,\r\n using_time=self.using_time,\r\n client_address=self.client_address\r\n )\r\n return person_id",
"def insert_to_db(self, db):\n cur = db.cursor()\n id_company, company = self.find_company_by_id(cur)\n if company is not None:\n self.update_data_company(company, cur, db)\n return id_company\n cur.execute(\n \"INSERT INTO companies (name, headquarters_city, \"\n \"headquarters_country, headquarters_currency, \"\n \"rating, rating_count,\"\n \"benefits_rating, benefits_rating_count, nb_of_employees,\"\n \" founded, type, website, competitors) \"\n \"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)\",\n (self.name, self.headquarters_city, self.headquarters_country,\n self.headquarters_currency, self.rating, self.rating_count,\n self.benefits_rating, self.benefits_rating_count,\n self.nb_of_employees,\n self.founded, self.type,\n self.website, self.competitors))\n db.commit()\n id_company, _ = self.find_company_by_id(cur)\n return id_company",
"def insert(self, entity):\n if getattr(entity, \"_id\") is not None:\n raise ValueError(\"_id is not null\")\n entity.pre_persist()\n self.db[self.collect].insert_one(entity.to_dic())"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
log done shifts into DB
|
def log_shift(self, employee_id, date, start_hour, end_hour):
try:
if not self.logged_shift_exists(employee_id,date):
if not self.check_for_db(): # if DB doesn't exist create it
self.create_db()
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
query = """INSERT INTO Employee_Shift VALUES ({},\"{}\",\"{}\",\"{}\")""".format(employee_id,date,start_hour,end_hour)
crsr.execute(query)
connection.commit()
connection.close()
except IOError:
print("DB Error")
|
[
"def updatedb(zone):\n con = framework.lite.connect('/etc/SmartHome/Databases/Security.sqlite')\n cur = con.cursor()\n cur.execute(\"INSERT INTO Log(Time, Zone, State) VALUES(?, ?, ?)\", [zone.lastevent, zone.name, zone.state])\n con.commit()\n con.close()",
"def commit(self):\n if self._dblog:\n self._feedlgr.commit()",
"def post_shift(request):\n\n if request.method == 'POST':\n shift_id = request.POST['shift_id']\n shift = Shift.objects.get(pk=shift_id)\n shift.is_available = True;\n swap = ShiftSwap()\n swap.posted_by = request.user.ta\n swap.shift = Shift.objects.get(pk=shift_id)\n # The swap must be saved first because if it fails then we must not\n # modify the original shift (so that the user can attempt to post the\n # shift again).\n swap.save()\n shift.save()\n\n return redirect('/shifts')",
"def _stash_log(self) -> None:\n self.log.info(f\"Move source log for {self.__api.upload_id} to\"\n f\" '{self.__api.storage.deleted_logs_path}'.\")\n self.log.info(f\"Delete workspace '{self.__api.upload_id}'.\")\n try:\n self.__api.storage.stash_deleted_log(self,\n self.log.file)\n except Exception as e:\n self.log.info(f'Saving source.log failed: {e}')",
"def record_trip_event(client_id,driver_id,start_time,lat,lon,fare,distance,rating):\n\t\n\tsql_insert = \"\"\"INSERT INTO event_trip (client_id,driver_id,start_time,lat,lon,fare,distance,rating) VALUES (%s,%s,'%s',%s,%s,%s,%s,%s)\"\"\" % (client_id,driver_id,start_time,lat,lon,fare,distance,rating)\n\t\n\tprint sql_insert\n\t\n\tcur.execute(sql_insert)\n\tdb.commit()\n\tprint 'success'",
"def start_new_testLog():\n\n open(clientPath+\"yesterdays_testIDs.log\", 'w').close()\n shutil.copyfile(clientPath+\"todays_testIDs.log\", clientPath+\"yesterdays_testIDs.log\")\n \n today= open(clientPath+\"todays_testIDs.log\", 'w')\n today.write(time.strftime(\"%m/%d/%Y\")+\"\\n\")\n today.close()",
"def on_step_begin(self, step, logs):\n pass",
"def _add_state(self, state, date):\n if state.direction == 2:\n self.logger.info(\"[%s] INS STATE: %s\" % (date, str(state)))",
"def log_modification_history(self, start_time, end_time, status=\"success\", message=\"\"):\n db = DB()\n provisioning_type = 'Incremental_provisioning'\n unique_ID = uuid.uuid4()\n table_names = db.get_all_table_names(settings.target_schema)\n\n modified_tables = list(set((table_name for table_name in transaction_mapper.TRANSACTION_RESULTS.keys() if\n table_name in table_names)))\n\n if modified_tables:\n sql_statements = []\n for table_name in modified_tables:\n\n sql = \"\"\"INSERT INTO public.provisioning_history(id, layer, load_type, row_count, start_time, end_time, status, messages) VALUES('{}','{}', '{}', '{}', '{}', '{}','{}','{}');\"\"\".format(unique_ID, table_name, provisioning_type, transaction_mapper.TRANSACTION_RESULTS[table_name], start_time, end_time, status, message)\n\n sql_statements.append(sql)\n sql_statements = \"\".join(sql_statements)\n\n # Write to a temporary json file for future references\n with open(xml_log_history, 'a+') as fp:\n json.dump(sql_statements, fp)\n fp.write(\"\\n\")\n\n # Write to a Database\n try:\n databases = get_databases(\"LoggingDB\")\n for database in databases:\n credentials = eval(config[\"LoggingDB\"][\"dbs\"])[database]\n engine = db.connect(credentials)\n\n with engine.connect() as con:\n res = con.execute(sql_statements)\n logger.info(\"Inserted the modifications for tables successfully into provisioning history table!!\")\n except Exception as error:\n logger.error(error)\n exit()\n else:\n logger.info(\"No Update in Feed\")",
"def updateroster():\n print(\"Update Roster\")\n if not request.form.get(\"user_id\"):\n return apology(\"must provide user_id\")\n if not request.form.get(\"date\"):\n return apology(\"must provide date\")\n if not request.form.get(\"location\"):\n return apology(\"must provide location\")\n if not request.form.get(\"start_time\"):\n return apology(\"must provide start_time\")\n if not request.form.get(\"end_time\"):\n return apology(\"must provide end_time\")\n if not request.form.get(\"break\"):\n return apology(\"must provide break\")\n user_id = request.form.get(\"user_id\")\n date = request.form.get(\"date\")\n location = request.form.get(\"location\").lower()\n start_time = request.form.get(\"start_time\")\n end_time = request.form.get(\"end_time\")\n if (request.form.get(\"break\") == \"None\"):\n sbreak = None\n else:\n sbreak = request.form.get(\"break\")\n if (request.form.get(\"shift_id\")):\n shift_id = request.form.get(\"shift_id\")\n shift = Shift.query.get(shift_id)\n shift.user_id = user_id\n shift.date = datetime.strptime(date, \"%Y-%m-%d\")\n shift.location = location\n shift.start_time = start_time\n shift.end_time = end_time\n shift.sbreak = sbreak\n db.session.commit()\n\n return redirect(request.referrer)\n\n else:\n new_shift = Shift(date, start_time, end_time, location, user_id, sbreak)\n db.session.add(new_shift)\n db.session.commit()\n\n return redirect(request.referrer)",
"def log_status(self, sessionid, responsetime, httpstatus, contentstatus):\n self.cursor.execute(\"INSERT INTO status (sessionid, responsetime, httpstatus, contentstatus) VALUES (?,?,?,?);\", (sessionid, responsetime, httpstatus, contentstatus))\n self.connection.commit()",
"def add_feeding_log(self, myLog: FeedingLog):\n\t\tquery_str = [\n\t\t\t\"INSERT INTO feeding_logs(\",\n\t\t\t\"feeding_id, card_id, open_time, close_time, start_weight,\",\n\t\t\t\"end_weight, synced\",\n\t\t\t\")\",\n\t\t\t\"VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}');\"\n\t\t]\n\t\tself.c.execute(\n\t\t\tstr.join(\" \", query_str).format(\n\t\t\t\tstr(myLog.get_id()), myLog.get_card().get_uid(), time.mktime(myLog.get_open_time()),\n\t\t\t\ttime.mktime(myLog.get_close_time()), myLog.get_start_weight(), myLog.get_end_weight(),\n\t\t\t\tmyLog.get_synced()\n\t\t\t)\n\t\t)\n\t\tself.conn.commit()",
"def test_log_creation(self):\n\t\tself.p2 = Player(game=self.g, money=self.initial_money)\n\t\tself.p2.save()\n\n\t\tlogs_before = Log.objects.count()\n\t\tm2m_before = ConcernedPlayer.objects.count()\n\n\t\tself.g.add_event(event_type=Game.WIRETRANSFER, data=None, players=[self.p, self.p2])\n\n\t\tself.assertEqual(1, Log.objects.count() - logs_before)\n\t\tself.assertEqual(2, ConcernedPlayer.objects.count() - m2m_before)",
"def test_upload_run_logs(self):\n pass",
"def log_now(self):\n\t\tt = time.time()\n\t\tprint(\"logged\", time.ctime(t))\n\t\tself.history.append(t)",
"def save_log(self,text, stype='',svalue=0):\n gui.logs.append(text)\n self.insert_DB(text)",
"def done(self, _):\n session = Session() # Get a database session\n\n performer = self._target(session)\n performer.name = self.name.get_edit_text()\n performer.email = self.email.get_edit_text()\n performer.mobile = self.mobile.get_edit_text()\n performer.nfc = self.nfc.get_edit_text()\n\n session.add(performer)\n session.commit()\n self.callback(None, performer)",
"def insert_log(self, data):\n\t\ttry:\n\t\t\tcursor = self.conn.cursor()\n\n\t\t\tbot_id = int(data['bot_id'])\n\n\t\t\tinsertion_query = \"INSERT INTO logs \"\n\t\t\tif \"target_id\" in data:\n\t\t\t\tinsertion_query += \\\n\t\t\t\t\tf\"(id_bot, action, target_id) values {(bot_id, data['action'], int(data['target_id']))};\"\n\t\t\telse:\n\t\t\t\tinsertion_query += f\"(id_bot, action) values {(bot_id, data['action'])}; \"\n\n\t\t\tcursor.execute(insertion_query)\n\t\t\tlog.debug(f\"Inserted log <{insertion_query}> on database\")\n\t\texcept psycopg2.Error as error:\n\t\t\tself.conn.rollback()\n\n\t\t\tlog.exception(f\"ERROR <{error}> INSERTING NEW LOG <{data}>: \")\n\t\t\treturn {\"success\": False, \"error\": error}\n\t\texcept Exception as error:\n\t\t\tself.conn.rollback()\n\n\t\t\tlog.exception(f\"ERROR <{error}> INSERTING NEW LOG <{data}>: \")\n\t\t\treturn {\"success\": False, \"error\": error}\n\n\t\tself.conn.commit()\n\t\tcursor.close()\n\n\t\ttry:\n\t\t\tsignal.send(sender=PostgresAPI, table_name=\"Log\")\n\t\texcept Exception as error:\n\t\t\tlog.exception(f\"ERROR <{error}> when signaling rest to update cache\")\n\n\t\treturn {\"success\": True}",
"def save(self, db_path):\n conn = sqlite3.connect(db_path)\n c = conn.cursor()\n c.execute('''CREATE TABLE IF NOT EXISTS log (id INTEGER PRIMARY KEY AUTOINCREMENT, start DATETIME, end DATETIME, category VARCHAR, description TEXT)''')\n\n for k, v in self._slices.items():\n if not v[1]: # if not saved\n start_str = v[0].start.strftime(self.DT_FMT)\n end_str = v[0].end.strftime(self.DT_FMT)\n\n data = (start_str, end_str, v[0].category, v[0].description)\n \n c.execute('''INSERT INTO log (start, end, category, description) VALUES (?, ?, ?, ?)''', data)\n conn.commit()\n\n v = (v[0], True) # set slice as saved\n\n conn.close()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
insert the arrangement request by the Employee to DB, Employee_Times table
|
def insert_employee_times(self,employee_id,date, start_time="NULL", end_time="NULL"):
try:
if not self.employee_time_exists(employee_id, date):
if not self.check_for_db(): # if DB doesn't exist create it
self.create_db()
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
start_time="NULL"
query = """INSERT INTO Employee_Times VALUES ({},{},{},{})""".format(employee_id, date, start_time, end_time)
crsr.execute(query)
connection.commit()
connection.close()
return True
return False
except IOError:
print(" DBError")
|
[
"def insert_employee(self, employee_id, first_name, last_name, min_shifts):\n if not self.check_for_db(): # if DB doesn't exist create it\n self.create_db()\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n insret_query = \"\"\"INSERT INTO Employee\n VALUES ({}, {},{},{});\"\"\".format(employee_id, first_name, last_name, min_shifts)\n crsr.execute(insret_query)\n connection.commit()\n connection.close()",
"def register_arrangement(self, solution, sol_num=1):\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n for shift in solution:\n shift_id = shift.get_shift_id()\n employees_in_shift = shift.get_bartenders() + shift.get_waitresses()\n for employee in employees_in_shift:\n e_id = employee.get_id()\n query = \"\"\" INSERT INTO Employees_in_Shift VALUES ({},{},{})\"\"\".format(shift_id, e_id,sol_num)\n crsr.execute(query)\n connection.commit()\n connection.close()",
"def insertNewEmployee(self):\n try:\n self.takeUserInput()\n self.insertNewEmployeeinDB(self.empId,self.empName,self.jobName,self.managerId,self.hireDate,self.salary,self.commission,self.deptId)\n except Exception as e:\n print(\"Error inserting New Employee,\", e)",
"def populate(self, employees):\n # TODO Change return value?\n # TODO Test\n # TODO Fix problem with generating shifts that are too small\n # ^This typically happens when there is a location with say 6.5 hours per day of work\n # TODO Generate more than one schedule (do in another function)\n\n for location in self.schedule.shifts.keys():\n # Assign employees to their preferred times\n # Preferred times start with seniority\n employees = sorted(employees, key=lambda x: 0 if x.seniority else 1)\n for employee in employees:\n for shift in employee.get_preferred_times():\n # attempt to schedule them for their preferred shift\n self.attempt_to_assign_shift(location, employee, shift)\n\n for location in self.schedule.shifts.keys():\n\n # Assign employees to the rest of the times\n for shift in self.schedule.get_sorted_shifts_from_location(location):\n\n # Dont schedule employees to shifts that have >= the maximum number\n # of employees already schedules for it\n if not self.can_assign(location, shift):\n continue\n\n # Sort in order of hours assigned (least to greatest) also\n # factoring in seniority\n employees = sorted(employees, key=lambda x: \\\n x.hours_assigned - (Employee.SENIORITY_EXTRA_HOURS if x.seniority else 0))\n\n for employee in employees:\n self.attempt_to_assign_shift(location, employee, shift)\n # # Old code to assign a single shift at a time\n # if employee.available_for_shift(shift) and \\\n # not employee.get_total_assigned_hours() >= self.get_max_hours():\n # self.assign(location, employee, shift)\n # break",
"def log_shift(self, employee_id, date, start_hour, end_hour):\n try:\n if not self.logged_shift_exists(employee_id,date):\n if not self.check_for_db(): # if DB doesn't exist create it\n self.create_db()\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n query = \"\"\"INSERT INTO Employee_Shift VALUES ({},\\\"{}\\\",\\\"{}\\\",\\\"{}\\\")\"\"\".format(employee_id,date,start_hour,end_hour)\n crsr.execute(query)\n connection.commit()\n connection.close()\n except IOError:\n print(\"DB Error\")",
"def add_timing(prog_name, prob_size, timing) : \n\n cur = conn.cursor() \n\n cur.execute(\"INSERT INTO timings (problem_size, timing, program_name) VALUES (?, ?, ?)\",\n (prob_size, timing, prog_name) )\n\n conn.commit()",
"def add_arrival(self, employee_id, first_name, last_name, arrival_date, arrival_time):\r\n # If the system is new, we initiate ids list.\r\n if len(self.ids) == 0:\r\n self.ids.append(1)\r\n else: # Otherwise we continue to count from the last number in id's list.\r\n index = self.ids[-1]\r\n new_index = index + 1\r\n self.ids.append(new_index)\r\n self.attendance_id = self.ids[-1]\r\n\r\n # And add all necessary data to the instance.\r\n self.employee_id = employee_id\r\n self.first_name = first_name\r\n self.last_name = last_name\r\n self.arrival_date = arrival_date\r\n self.arrival_time = arrival_time",
"def insert_time_records(cur, df):\n time_data = (df['ts'], df['ts'].dt.hour, df['ts'].dt.day, df['ts'].dt.week, df['ts'].dt.month, df['ts'].dt.year, df['ts'].dt.weekday_name)\n column_labels = ('start_time', 'hour', 'day', 'week', 'month', 'year', 'weekday')\n \n # Convert tuples to a dict so they can be converted to a DataFrame\n time_dict = dict(zip(column_labels, time_data)) \n time_df = pd.DataFrame(time_dict)\n\n for i, row in time_df.iterrows():\n cur.execute(time_table_insert, list(row))",
"def create_order(self):\n proxy = self.env['hotel.reservation.order']\n for record in self:\n table_ids = [tableno.id for tableno in record.tableno]\n values = {\n 'reservationno':record.reservation_id,\n 'date1':record.start_date,\n 'table_no':[(6, 0, table_ids)],\n }\n proxy.create(values)\n return True",
"def add_arrival_to_system(employee_id, first_name, last_name, arrival_date, arrival_time):\r\n # First we construct new attendance instance and add data to it.\r\n # The method add arrival id to the instance automatically.\r\n attendance.add_arrival(employee_id, first_name, last_name, arrival_date, arrival_time)\r\n\r\n # Then we receive data back with the attendance instance we've just create.\r\n data = attendance.get_attendance()\r\n # and call the function to write received data to the attendance.csv file.\r\n if os.path.isfile('attendance.csv'):\r\n write_to_file('attendance.csv', data)\r\n else:\r\n write_to_file('attendance.csv', data, header=1)",
"def make_test_data(connection, cursor, num_employees, num_departments, num_cycles, num_expenses_per_day):\n\tprint 'make_test_data: num_departments=%d, num_employees=%d, num_cycles=%d, num_expenses_per_day=%d' \\\n\t % (num_departments, num_employees, num_cycles, num_expenses_per_day)\n\tprint ' (should give expenses of %d * n for department n)' % (num_employees * num_cycles * num_expenses_per_day)\n\t\n\t# Functions to generate values for each field\n\tfirst_name = 'Darren'\n\tdef get_name(employee_num):\n\t\treturn 'Smith.%03d' % employee_num\n\tdef get_date(day_num, fraction_of_day):\n\t\td = day_num % 28\n\t\tm = (day_num//28)%12\n\t\ty = 2000 + day_num//28//12\n\t\tseconds = int(24*60*60*fraction_of_day)\n\t\ts = seconds % 60\n\t\tn = (seconds//60) % 60\n\t\th = seconds//60//60\n\t\treturn '%04d-%02d-%02d %2d:%2d:%2d' % (y, m+1, d+1, h, n, s)\n\tdef get_cost(employee_num, department_num):\n\t\treturn department_num\n\tdef get_department(department_num):\n\t\treturn 'department %03d' % department_num\n\tdef get_description(employee_num, department_num, department_change_num):\n\t\treturn 'expense %03d:%03d for employee %03d' % (department_change_num, department_num, employee_num)\n\t\n\t# Create the employees\n\tdepartment_change_num = 0\n\tfor employee_num in range(num_employees): \n\t\tadd_employee(connection, cursor, first_name, get_name(employee_num), get_department(0))\n\t\n\t# Cycle each employee's department through all available num_cycles times\n\tfor c in range(num_cycles):\n\t\tfor department_num in range(0, num_departments): \n\t\t\tfor employee_num in range(num_employees): \n\t\t\t\tchange_department(cursor, first_name, get_name(employee_num), get_department(department_num), get_date(department_change_num, 0.0))\n\t\t\t\tfor expense_num in range(num_expenses_per_day):\n\t\t\t\t\tadd_expense(cursor, first_name, get_name(employee_num), get_date(department_change_num, (expense_num+1)/(num_expenses_per_day+2)), \n\t\t\t\t\t\t\t\tget_cost(employee_num, department_num), get_description(employee_num,department_num,department_change_num))\n\t\t\tdepartment_change_num += 1",
"def commit_required_time(self):\n for d in self.departments:\n d.commit_required_time()",
"def __insert_into_database(request_data: list, predictions: list) -> None:\n try:\n db_connection = __connect()\n cur = db_connection.cursor()\n try:\n date = datetime.now()\n data_joined = []\n\n # Joining data as tuples\n for input, predict in zip(request_data, predictions):\n row_data = (date, f\"{input}\", predict)\n data_joined.append(row_data)\n\n # Inserting data as a batch into database\n insert_query = \"insert into history (date,features,prediction) values %s\"\n psycopg2.extras.execute_values(\n cur, insert_query, data_joined, template=None, page_size=100\n )\n except:\n print(\"Couldn't insert values\")\n db_connection.close()\n except:\n print(\"Couldn't connect to database\")",
"def insert(cls, env, record):\n with env.db_transaction as db:\n\n cursor = db.cursor()\n sqlString = \"\"\"INSERT INTO ticket_template_store\n (tt_time,tt_user,tt_name,tt_field,tt_value)\n VALUES (%s,%s,%s,%s,%s)\"\"\"\n cursor.execute(sqlString, record)",
"def test_timetable_records_time_given_valid_input(timetable, work_times):\n # no exception should be thrown\n timetable.record_work_time(work_times)",
"def record_trip_event(client_id,driver_id,start_time,lat,lon,fare,distance,rating):\n\t\n\tsql_insert = \"\"\"INSERT INTO event_trip (client_id,driver_id,start_time,lat,lon,fare,distance,rating) VALUES (%s,%s,'%s',%s,%s,%s,%s,%s)\"\"\" % (client_id,driver_id,start_time,lat,lon,fare,distance,rating)\n\t\n\tprint sql_insert\n\t\n\tcur.execute(sql_insert)\n\tdb.commit()\n\tprint 'success'",
"def testPerformanceAgreement(self):\n self.cur.execute('''CREATE TABLE performance_agreements\n (Academic_year INTEGER, Academic_staff_id INTEGER, Status TEXT, Created_at TEXT, Created_by INTEGER, Submitted_at TEXT, Reviewed_at, Reviewed_by INTEGER, Approved_at TEXT, Approved_by INTEGER, Period_start TEXT, Period_end TEXT)''')\n self.con.commit()",
"def testPerformanceAgreement(self):\n self.cur.execute('''CREATE TABLE performance_agreements\n (Academic_year text, Academic_staff_id text, Status text, Created_at text, Created_by text, Submitted_at, Reviewed_at, Reviewed_by, Approved_at, Approved_by Period_start, Period_end)''')\n self.con.commit()",
"def add_record(conn, data):\n\n query = \"\"\" INSERT INTO person(serial_num,fname,lname,birth_date,\n identification_num,street,city,postcode_num,phone_num,\n note,date_created)\n VALUES(?,?,?,?,?,?,?,?,?,?,?) \"\"\"\n data.append(str(datetime.now().strftime(\"%d/%m/%y %H:%M%S.%f\")))\n c = conn.cursor()\n c.execute(query, data)\n conn.commit()\n\n print(f\"SQL: Record ADDED | serial_num = {data[0]}\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
gets positions of employee by id
|
def get_employee_positions(self, employee_id):
try:
if self.check_for_db(): # check fot DB existence
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
query = """SELECT position,seniority
FROM Employee_Positions
WHERE employee_id={}""".format(employee_id)
crsr.execute(query)
data = crsr.fetchall()
connection.close()
return data
except IOError:
print("IO Error")
|
[
"def _retrieve_employee_id(response, id):\n id = int(id)\n if id == 0:\n return None, response\n for employee in response:\n if employee['id'] == id:\n return None, [employee]\n return None, []",
"def get_position(employee):\n if employee.sudo().job_id:\n return employee.sudo().job_id.name\n return \"\"",
"def get_enemy_positions(self):\n return [self.get_agent_position(id_) for id_ in self.enemy_ids]",
"def get_employees(request):\n ids = request.query_params.get('employee_ids', None)\n result = []\n if ids:\n arr = ids.split(',')\n for item in arr:\n employee = Employee.objects.filter(pk=item)\n if employee:\n result.append(employee)\n else:\n raise ValueError('Employee not found')\n return result",
"def getNodeXY(id):\n for n in nodes:\n if n[0] == id:\n return (n[2], n[3])",
"def get(id):\n return Employee_orm.query.get(id)",
"def get_game_roster_positions_by_game_id(self, game_id):\n return sorted(self.query(\n \"https://fantasysports.yahooapis.com/fantasy/v2/game/\" + str(game_id) + \"/roster_positions\",\n [\"game\", \"roster_positions\"]), key=lambda x: x.get(\"roster_position\").position)",
"def test_find_cell_id(self):\n pnt = Point(0, 0, 1)\n out = IntPoint(0, 0, 0)\n\n out = py_find_cell_id(pnt, 1.0)\n self.assertEqual(out.x, 0)\n self.assertEqual(out.y, 0)\n self.assertEqual(out.z, 1)\n\n pnt.x = -2.01\n out = py_find_cell_id(pnt, 1.0)\n self.assertEqual(out.x, -3)\n self.assertEqual(out.y, 0)\n self.assertEqual(out.z, 1)\n\n pnt.y = -1.01\n out = py_find_cell_id(pnt, 1.0)\n self.assertEqual(out.x, -3)\n self.assertEqual(out.y, -2)\n self.assertEqual(out.z, 1)",
"def retrieveCellIds(cls, listOfPoints):",
"def id2coords(self, _id, cell_size):\n cells_to_right = _id % 129600\n cells_down = _id / 129600\n lat_bottom = 90-cells_down * cell_size # Lat3 BOTTOM left corner\n lon_left = cells_to_right * cell_size - 180.0 # Lon3 bottom LEFT corner\n lat_top = lat_bottom + cell_size\n lon_right = lon_left + cell_size\n coords = [[lon_left,lat_top],[lon_right,lat_top],[lon_right,lat_bottom],[lon_left,lat_bottom],[lon_left,lat_top]]\n return coords",
"def lookup_pos(self, pos_id: int) -> str:\n return self._all_pos[pos_id]",
"def findItemId(self, x, y):\n for itemId in self.items:\n coords = self.coords(itemId)\n if self.containsPoint(coords, x, y):\n return itemId\n return None",
"def get_employee(id):\n user_data = requests.get(\n 'https://jsonplaceholder.typicode.com/users/', params={'id': id}\n ).json()\n\n username = user_data[0].get('username')\n\n \"\"\"Records all tasks that are owned by this employee\"\"\"\n tasks_user = requests.get(\n 'https://jsonplaceholder.typicode.com/todos/', params={'userId': id}\n ).json()\n\n \"\"\" Generate JSON data response \"\"\"\n response = {str(id): []}\n for task in tasks_user:\n data = {\n 'task': task.get('title'),\n 'completed': task.get('completed'),\n 'username': username\n }\n response[str(id)].append(data)\n\n \"\"\" Save JSON data in a file \"\"\"\n with open('{}.json'.format(id), 'w') as json_file:\n json.dump(response, json_file)",
"def get_position(khoros_object, identifier=None, category_details=None):\n return get_category_field(khoros_object, 'position', identifier, category_details)",
"def get_occupied_positions(self):\n positions = []\n for x in range(self.width):\n for y in range(self.height):\n if self.isOccupied((x,y)):\n positions.append((x,y))\n return positions",
"def get_employee_data(self, department):\n employee_data = []\n domain = [\n ('department_id', '=', department.id),\n ]\n if department.manager_id:\n domain += [\n '|', ('parent_id', '=', False),\n ('parent_id', '=', department.manager_id.id),\n ('parent_id.department_id', '!=', department.id),\n ]\n else:\n domain += [\n '|', ('parent_id', '=', False),\n ('parent_id.department_id', '!=', department.id),\n ]\n employees = self.env['hr.employee'].search(domain)\n for employee in employees:\n children = self.get_employee_children(employee)\n employee_data.append(children)\n return employee_data",
"def indicesByPdgId(self,pdgIds,useAbs=True,indices=None):\n result = [ ]\n if type(pdgIds)==type(0):\n pdgIds_ = [ pdgIds ]\n else:\n pdgIds_ = pdgIds\n parts = self.genParts\n if indices!=None:\n parts = [ self.genParts[i] for i in indices ]\n for mp in parts:\n id = mp.particle.pdgId()\n if useAbs:\n id = abs(id)\n if id in pdgIds_:\n result.append(mp.index)\n return result",
"def getPositionsDict(self):\n return {ID: self.elements[ID].getPosition() for ID in self.elements}",
"def search_employee(self):"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
method to get all bartenders
|
def get_bartenders(self):
try:
if self.check_for_db(): # check fot DB existence
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
query = """SELECT E.employee_id, first_name, last_name, seniority
FROM Employee E JOIN Employee_Positions EP ON E.employee_id=EP.employee_id
WHERE position="bartender";"""
crsr.execute(query)
data = crsr.fetchall()
connection.close()
return data
except IOError:
print("Failed to get bartenders")
|
[
"def get_all_bodegas():\n bodegas = Bodega.objects.all()\n return bodegas",
"def get_beers():\n\n with engine.connect() as con:\n rs = con.execute('SELECT name, Manufacturer FROM Beers;')\n return [dict(row) for row in rs]",
"def get_beers():\n\n with engine.connect() as con:\n rs = con.execute('SELECT name, id, manf FROM beers;')\n return [dict(row) for row in rs]",
"def get_bids(self):\n return Bid_API.Bid().get()",
"def get_breed_list():\n\n # request a list of all dog breeds\n response = request(\"http://api.petfinder.com/breed.list\", dog_params)\n\n # dig into the returned data to find a list of breed entries\n breed_data_list = response.json()[\"petfinder\"][\"breeds\"][\"breed\"]\n\n # this will hold the list of breed names\n breeds = []\n\n # extract the breed name for each entry and add it to the list\n for breed in breed_data_list:\n breed_name = breed[\"$t\"]\n breeds.append(breed_name)\n\n # return the list of known breeds\n return breeds",
"def perform_listing(self):\n response = self.client.get(\"/api/breeds/list/all\")\n # Retrieve and feed json response\n self.populate_breed_list(response.json())",
"def discover_bulbs():\n return [MusicBulb.from_discovery(d) for d in discover()]",
"def get_all_tender():\n return Tender.objects.all().order_by('-start_date')",
"def get_tempetyres_brands():\n print(\"* getting all tempetyres brands...\", end='\\r')\n url = \"https://www.tempetyres.com.au/tyres\"\n response = requests.get(url)\n soup = BeautifulSoup(response.text, \"html.parser\")\n\n brands = [brand.text for brand in soup.find(id=\"Brand\").findAll(\"option\")][1:]\n\n if DEBUG:\n for brand in brands:\n print(brand)\n\n print(f\"* getting all tempetyres brands: done ({len(brands)} brands)\")\n return brands",
"def test_sub_breeds_list(api_client):\n\n res = api_client.get(path='/breed/hound/list').json()\n assert isinstance(res['message'], list)\n for breed in res['message']:\n assert re.match('^[a-z]*$', breed)\n assert res['status'] == 'success'",
"def get_brothers(self):\n return list(self._get_siblings(self.MALE))",
"def get(self):\n\n try:\n num = request.args.get('num')\n if num:\n num = int(num)\n user_id = current_user.get_id() if request.args.get('current_user') == '1' else None\n result = search_records.get_heat_brands(num, user_id)\n json_res = {\n 'brands': [x.to_json() for x in result]\n }\n return json_res, HTTPStatus.OK\n except Exception as err:\n return handle_internal_error(str(err))",
"def list(cls, context, limit=None, marker=None,\n sort_key=None, sort_dir=None, filters=None):\n db_bays = cls.dbapi.get_bay_list(context, limit=limit,\n marker=marker,\n sort_key=sort_key,\n sort_dir=sort_dir,\n filters=filters)\n return Bay._from_db_object_list(db_bays, cls, context)",
"def boys(self):\n return self._boys",
"def _set_brands(self, session, insert):\n logger = logging.getLogger('outfitter')\n brands = []\n urls = {}\n urls['male'] = \"http://www.asos.com/men/\" \\\n \"a-to-z-of-brands/cat/pgehtml.aspx?cid=1361\"\n urls['female'] = \"http://www.asos.com/Women\" \\\n \"/A-To-Z-Of-Brands/Cat/pgehtml.aspx?cid=1340\"\n for _, gender in enumerate(urls):\n logger.debug(\">> Calling \"+urls[gender])\n req = urllib2.Request(urls[gender], headers=HEADER)\n data = urllib2.urlopen(req).read()\n tree = lxml.html.fromstring(data)\n brandsel = 'div[id*=\\\"brands_section\\\"] div ul li a'\n brand_data = tree.cssselect(brandsel)\n for html_data in brand_data:\n brand = self._get_brand_data(html_data)\n brand['gender'] = gender\n orm_brand = self._insert_brand(session,\n brand,\n insert)\n brands.append(orm_brand)\n # endfor html_data\n # endfor enumerate(urls)\n logger.info(\"< Found \"+str(len(brands))+ \" brands\")\n return brands",
"def get_all_active_tenders():\n today = datetime.datetime.today()\n return Tender.objects.filter(start_date__lt=today, end_date__gt=today).order_by('-start_date')",
"def get_banks(self, request):\n country = self.request.data['country']\n banks = get_banks(country)\n return Response({\"banks\": banks}, status=status.HTTP_200_OK)",
"def get_all(self):\n return [self.item]",
"def banks():\n conn = engine.connect()\n banks_df = pd.read_sql(\"select distinct BankName from bank_data2\", conn)\n all_banks = banks_df.to_json(orient='records')\n # all_banks = [bank for bank in all_banks]\n return all_banks"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
method to get employees between eligible dates
|
def get_employees_by_date_range(self, org_id, start_date, end_date):
try:
if self.check_for_db(): # check fot DB existence
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
query = """SELECT E.employee_id, first_name, last_name, ET.date, ET.start_time,E.min_shifts
FROM Employee E
JOIN Employee_Times ET ON E.employee_id=ET.employee_id
JOIN User_in_Org UIO ON ET.employee_id=UIO.user_id
WHERE org_id={}
AND ET.date BETWEEN \"{}\" AND \"{}\"
ORDER BY E.employee_id,ET.date""".format(org_id,start_date, end_date)
crsr.execute(query)
data = crsr.fetchall()
connection.close()
return data
else:
return None
except IOError:
print("Failed to get bartenders")
|
[
"def get_holidays_between_dates(self, start_date, end_date):",
"def _check_dates(self, cr, uid, ids):\n for employee in self.browse(cr, uid, ids):\n message= \"The %s must be anterior or equal to the current date!\"\n message1= \"The %s must be anterior to the employment date!\"\n message2= \"The %s must be anterior to the first employment date!\"\n if employee.birthday > time.strftime('%Y-%m-%d'):\n raise osv.except_osv(_('ERROR'), _(message %'birth date'))\n if employee.birthday > employee.employment_date:\n raise osv.except_osv(_('ERROR'), _(message1 %'birth date'))\n if employee.first_employement_date:\n if employee.birthday > employee.first_employement_date:\n raise osv.except_osv(_('ERROR'), _(message2 %'birth date'))\n if employee.employment_date > time.strftime('%Y-%m-%d'):\n raise osv.except_osv(_('ERROR'), _(message %'employment date'))\n if employee.first_employement_date:\n if employee.first_employement_date > time.strftime('%Y-%m-%d'):\n raise osv.except_osv(_('ERROR'), _(message %'first employment date'))\n if employee.first_employement_date > employee.employment_date:\n raise osv.except_osv(_('ERROR'), _('first employment date must be anterior or equal to the employment date!'))\n if employee.end_date:\n if employee.end_date < employee.employment_date:\n raise osv.except_osv(_('ERROR'), _('end date must be After the start date!'))\n return True",
"def get_elections_date_between_dates(self, start_date, end_date):",
"def employed_in_period(self, from_date: date, until_date: date):\n\n if not self.employed_from:\n return False\n\n if self.employed_from < until_date and self.employed_until > from_date:\n return True\n else:\n return False",
"def get_by_date(date_from, date_by):\n logger.debug('Filtered by dates list of employees was returned')\n return Employee.query.filter(date_from <= Employee.date_of_birthday,\n date_by >= Employee.date_of_birthday).all()",
"def dateRangeExpenses():\n\n year1 = int(request.args['year1'])\n month1 = int(request.args['month1'])\n day1 = int(request.args['day1'])\n year2 = int(request.args['year2'])\n month2 = int(request.args['month2'])\n day2 = int(request.args['day2'])\n selectedDate1 = date(year1, month1, day1)\n selectedDate2 = date(year2, month2, day2)\n\n date_range_expenses = getDateRangeTotalExpenses(selectedDate1, selectedDate2, current_user.id)\n\n resp = jsonify(status_code=200,\n savings = date_range_expenses)\n return resp",
"def test_available_leave_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Mosh\", last_name=\"Pitt\")\n staff = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n annual_leave = mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staff,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n carried_over_days=0,\n )\n\n months = range(1, 13)\n\n for month in months:\n self.assertEqual(\n month * 1.75, annual_leave.get_available_leave_days(month=month)\n )",
"def target_dates(self):\n if self.target_type == u'allocation':\n return ((self.start, self.end),)\n\n if self.target_type == u'group':\n return self._target_allocations().with_entities(\n self.models.Allocation._start,\n self.models.Allocation._end\n ).all()\n\n if self.target_type == u'recurrence':\n time_start = self.start.time()\n time_end = self.end.time()\n\n date_start = self.start.date()\n from dateutil.rrule import rrulestr\n\n rule = rrulestr(self.rrule, dtstart=date_start)\n return [get_date_range(date, time_start, time_end)\n for date in rule]\n\n raise NotImplementedError",
"def date_range_search():\n start_date = get_date(\"Enter the beginning date in the date range.\\n\\n\")\n end_date = get_date(\"Enter the end date in the date range.\\n\\n\")\n if start_date > end_date:\n start_date, end_date = end_date, start_date\n return Task.select().where(Task.created_at.between(start_date, end_date))",
"def _check_date(self, cr, uid, ids):\n for deleg in self.browse(cr, uid, ids):\n if deleg.dismissal_date <= deleg.employee_id.first_employement_date:\n return False\n return True",
"def find_available_dates(self, number_of_dates):\n\t\tavailable_dates = []\n\t\trule = rrule.rrule(self.repeat_period, dtstart=self.end_date,\n\t\t\t\t\t\t interval=self.repeat_every, count=number_of_dates*4)\n\t\truleset = rrule.rruleset()\n\t\truleset.rrule(rule)\n\t\truleset.exdate(datetime.combine(self.end_date, time()))\n\n\t\texclude_query = Q(end_time__lte=self.start_time) | Q(start_time__gte=self.end_time) | Q(id=self.id)\n\t\tconflict_slots = Appointment.objects.filter(healer=self.healer, confirmed=True).\\\n\t\t\t\t\t\t\t\t\t\t\t\tfilter_by_date(self.end_date).\\\n\t\t\t\t\t\t\t\t\t\t\t\texclude(exclude_query)\n\n\t\texdates = []\n\t\tif len(conflict_slots):\n\t\t\tfrom_date = rule[1]\n\t\t\tto_date = rule[-1]\n\t\t\tfor slot in conflict_slots:\n\t\t\t\tif slot.is_single():\n\t\t\t\t\texdates.append(datetime.combine(slot.start_date, time()))\n\t\t\t\telse:\n\t\t\t\t\texruleset = rrule.rruleset()\n\t\t\t\t\texruleset.rrule(slot.get_rrule_object(skip_time=True))\n\t\t\t\t\tfor timestamp in slot.exceptions:\n\t\t\t\t\t\texruleset.exdate(datetime.utcfromtimestamp(timestamp))\n\t\t\t\t\texdates.extend(exruleset.between(from_date, to_date, inc=True))\n\n\t\trepeat_count = 0\n\t\texceptions = []\n\t\tfor rule_date in ruleset:\n\t\t\trepeat_count += 1\n\t\t\tif rule_date not in exdates:\n\t\t\t\tavailable_dates.append(rule_date)\n\t\t\t\tif len(available_dates) == number_of_dates:\n\t\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\texceptions.append(get_timestamp(rule_date))\n\n\t\tif len(available_dates)==number_of_dates:\n\t\t\treturn {\n\t\t\t\t'dates': available_dates,\n\t\t\t\t'exceptions': exceptions,\n\t\t\t\t'repeat_count': repeat_count\n\t\t\t}",
"def get_attacks_between_dates(self, start_date, end_date):",
"def check_holidays(self):\n\n is_over_allocation_days = False\n for record in self:\n if record.type == 'add':\n continue\n\n # Check leaves balance for leave requests\n # leaves_rest: remaining_leaves of employee by leave type\n # leave_asked: number_of_days of holiday_line\n if record.holiday_type == 'employee'\\\n and record.employee_id:\n for i in record.holiday_line:\n leave_asked = -(i.number_of_days)\n if leave_asked < 0.00:\n if i.holiday_status_id \\\n and not i.holiday_status_id.limit:\n status_id = i.holiday_status_id.id\n employee_id = record.employee_id.id\n status = i.holiday_status_id.get_days(employee_id)\n leaves_rest = status[status_id]['remaining_leaves']\n if (record.state == 'validate' and\n leaves_rest < 0) or \\\n (record.state != 'validate' and\n leaves_rest < -(leave_asked)):\n # 1. For approved leave request,\n # the number of days on holiday lines\n # is taken into the remaining leaves.\n # if remaining leaves < 0, means balance < 0\n # 2. For leave request not approved\n # the remaining leaves must be greater than\n # number of days on leave request line,\n # means balance < 0\n is_over_allocation_days = True\n elif record.holiday_type == 'category' and record.category_id:\n for i in record.holiday_line:\n leave_asked = -(i.number_of_days)\n if leave_asked < 0.00:\n if not i.holiday_status_id.limit:\n cate_id = record.category_id.id\n status_id = i.holiday_status_id.id\n status = i.holiday_status_id.get_days(cate_id)\n leaves_rest = status['remaining_leaves']\n if leaves_rest < -(leave_asked):\n is_over_allocation_days = True\n record.write({'is_over_allocation_days': is_over_allocation_days})\n return True",
"def striked_off_months(self, joining_date,start_date,end_date,last_date_of_month,month_year_obj):\n fee_month_obj = self.env['fee.month']\n if start_date <= joining_date <= end_date:\n cal_date = joining_date\n else:\n cal_date = start_date\n after_joining_months = []\n cal_month = self.months_between(cal_date, last_date_of_month)\n for count_month in cal_month:\n month_data = fee_month_obj.search([('name', '=', count_month[0]),\n ('year', '=', count_month[1]),\n ('leave_month', '=', False),\n ('batch_id', '=', self.academic_year_id.id)])\n if len(month_data) > 1:\n raise except_orm(_(\"Warning!\"), _(\"multiple month's found !\"))\n if month_data.id:\n after_joining_months.append(month_data)\n if len(after_joining_months) > 0:\n return after_joining_months\n else:\n return month_year_obj",
"def show_employees_birthday():\n\n logger.debug('Function show_employees_birthday(). Routed to /employees')\n titles = []\n message = 'No results'\n\n start = request.args.get('start')\n end = request.args.get('end')\n if not end:\n end = start\n if not start:\n start = end\n employees = es.find_by_birthday(start, end)\n logger.debug('Get employees with birthday between %s and %s. Amount = %i',\n start, end, len(employees))\n\n if employees:\n titles = ['Name', 'Birthday', 'In Department']\n message = f'Find {len(employees)} employee(s)'\n\n return render_template('employees.html',\n title='Employees',\n table_title='List of Employees',\n headers=titles,\n message=message,\n employees=employees,\n start=start,\n end=end)",
"def get_events_in_date_range(self, from_, to):\n return # osid.calendaring.EventList",
"def get_by_range():\n range_dict = utils.enter_searching_date_range()\n\n # create both datetime obj out of dict\n dt_start = datetime.datetime.strptime(range_dict['dt_start'], '%d/%m/%Y')\n dt_end = datetime.datetime.strptime(range_dict['dt_end'], '%d/%m/%Y')\n\n # let's make a validation for that range\n if dt_start > dt_end:\n # Do all again, range is wrong\n print(\"Starting date {starting} is greater than ending date {ending}\".format(starting=dt_start, ending=dt_end))\n get_by_range()\n else:\n tasks = utils.find_tasks_by_date_range(dt_start, dt_end)\n print_tasks(tasks)",
"def _get_expired_contracts_by_department(self, cr, uid, expiring_date):\n today = date.today().strftime(DF)\n condition = ''\n if expiring_date > today:\n condition = \"AND (con.date_end <= '%s' AND con.date_end > '%s')\" \\\n % (expiring_date, today)\n else:\n condition = \"AND con.date_end <= '%s'\" % (expiring_date)\n sql = \"\"\"\n SELECT con.id, emp.name_related as employee,\n con.name, con.date_end,\n dept.name as department, job.name as job\n FROM hr_contract con\n JOIN hr_employee emp ON con.employee_id = emp.id\n LEFT JOIN hr_department dept ON emp.department_id = dept.id\n LEFT JOIN hr_job job ON con.job_id = job.id\n WHERE con.no_renewal = False\n AND NOT EXISTS(\n SELECT 1 FROM hr_contract\n WHERE\n (date_start > con.date_end\n OR date_end IS NULL)\n AND employee_id = con.employee_id\n )\n %s\n ORDER BY date_end\n \"\"\" % condition\n cr.execute(sql)\n res = cr.dictfetchall()\n expired_contracts_department = {}\n for line in res:\n department = line['department']\n if department not in expired_contracts_department:\n expired_contracts_department[department] = []\n expired_contracts_department[department].append(line)\n return expired_contracts_department",
"def search_employee(self):"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
get shifts between given dates
|
def get_shifts_by_date_range(self, org_id, start_date, end_date): # in use
try:
if self.check_for_db(): # check fot DB existence
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
query = """SELECT shift_id, S.date, start_time
FROM Shift S
WHERE org_id={} AND
S.date BETWEEN \"{}\" AND \"{}\" """.format(org_id, start_date, end_date)
crsr.execute(query)
data = crsr.fetchall()
connection.close()
return data
except IOError:
print("IO Error")
|
[
"def shift(ts1, ts2, compare_start, compare_end, hours_forward, hours_backward, seconds_forward, seconds_backward):\r\n\r\n dets_time = []\r\n print(\"Started shifting...\")\r\n # We do not need to check every second. just +/- 60 seconds for all hours\r\n for h in range(-hours_backward, hours_forward + 1):\r\n common = shift_seconds(ts1, ts2, compare_start, compare_end, seconds_backward, seconds_forward, at_hour=h) \r\n dets_time.append((h,common))\r\n return dets_time",
"def _computeShift(x, y):\n shift = np.abs(x.data - y.data) # how many days off are we\n shift = (shift > 0.5 * 365) * (365 - shift) + (shift <= 0.5 * 365) * shift\n shift /= 0.5 * 365\n shift = 1.0 - shift\n return shift",
"def _create_schedule_lines(self, employees, date_from, date_to, shift_id, weekend_days):\n date_from = fields.Date.from_string(date_from)\n date_to = fields.Date.from_string(date_to)\n dates = (date_from.month == date_to.month or (date_to.month != date_from.month and \\\n (date_to - date_from).days == 1 and \\\n shift_id.time_to <= shift_id.time_from)) and [[date_from, date_to]] or []\n if not dates:\n while date_to.month != date_from.month:\n last_day = calendar.monthrange(date_from.year, date_from.month)[1]\n last_day_date = datetime(date_from.year, date_from.month, last_day).date()\n dates.append([date_from, last_day_date])\n date_from = last_day_date + relativedelta(days=1)\n if date_from.month == date_to.month:\n dates.append([date_from, date_to])\n for emp in employees:\n for date_interval in dates:\n start_date = date_interval[0]\n end_date = date_interval[1]\n shift = emp.employee_shift_schedule_ids. \\\n filtered(lambda record: fields.Date.from_string(record.date_from) <= start_date <=\n fields.Date.from_string(record.date_to)\n # and int(fields.Date.from_string(record.date_from).strftime(\"%m\")) == start_date.month\n )\n res_calendar = shift and shift.resource_calendar_id or \\\n self._create_month_calendar(emp, start_date)\n if self.assign_flexible_hour:\n res_calendar.flexible_hours = shift_id.flexible_hours\n diff = (end_date - start_date).days\n if diff > 6:\n for i in range(0, 7):\n if i not in weekend_days:\n week_day = [list(day)[1] for day in DAY_OF_WEEK\n if i == int(list(day)[0])][0]\n res_calendar.write({\n 'attendance_ids': [(0, 0, {\n 'name': week_day,\n 'dayofweek': str(i),\n 'date_from': start_date,\n 'date_to': end_date,\n 'hour_from': shift_id.time_from,\n 'hour_to': shift_id.time_to,\n 'single_assign_id': self.id,\n })]\n })\n else:\n if shift_id.time_to <= shift_id.time_from:\n end_date = end_date - relativedelta(days=1)\n while end_date >= start_date:\n day_week_nu = start_date.weekday()\n # weekend_days = self.weekend_ids.mapped('code')\n if day_week_nu not in weekend_days:\n day_week = [list(day)[1] for day in DAY_OF_WEEK\n if day_week_nu == int(list(day)[0])][0]\n shift_end_date = start_date + relativedelta(days=1) \\\n if shift_id.time_to < shift_id.time_from else start_date\n res_calendar.write({\n 'attendance_ids': [(0, 0, {\n 'name': day_week,\n 'dayofweek': str(day_week_nu),\n 'date_from': start_date,\n 'date_to': shift_end_date,\n 'hour_from': shift_id.time_from,\n 'hour_to': shift_id.time_to,\n 'single_assign_id': self.id,\n })]\n })\n start_date = start_date + relativedelta(days=1)",
"def crossOverShift(self, employees):\n for call_date in employees:\n if call_date < self.dateEditFrom.date().toPython().isoformat():\n continue\n\n for emp in employees[call_date]:\n currDate = employees[call_date][emp].loginDate\n strLoginTime = employees[call_date][emp].getTgenStr()\n strLogoutTime = employees[call_date][emp].getLogoutTimeStr()\n strLogoutTimePrev = employees[call_date][emp].getLogoutTimePrevStr()\n sched = employees[call_date][emp].schedule\n\n # check if cross-over shift\n if not sched.startswith('R'): # not a rest day\n if int(sched[:2]) > int(sched[2:]): # in-hour > out-hour\n employees[call_date][emp].isCrossOver = True\n\n #skip absent employee i.e. no timein/timeout\n if strLoginTime == '00:00:00' and strLogoutTime == '00:00:00':\n continue\n\n #check for the logout time of the crossover shift for previous day\n if strLogoutTimePrev != '00:00:00':\n # subtract 1 day from loginDate to get the previous day\n prevDayStr = (employees[call_date][emp].loginDate - timedelta(1)).isoformat()\n #print \"call_date = \", call_date, \"prevDayStr = \", prevDayStr\n\n if emp in employees[prevDayStr]:\n #if employees[prevDayStr][emp].getLogoutTimeStr() == '23:59:59':\n employees[prevDayStr][emp].logoutDate = currDate\n employees[prevDayStr][emp].logoutTime = employees[call_date][emp].logoutTimePrev\n if employees[prevDayStr][emp].doc6ss_break_from == None:\n employees[prevDayStr][emp].doc6ss_break_from = employees[call_date][emp].doc6ss_break_from\n employees[prevDayStr][emp].doc6ss_break_to = employees[call_date][emp].doc6ss_break_to\n\n #print 'call date =', call_date, 'call logout =', employees[emp][call_date].logoutTime, \\\n #'call login =', employees[call_date][emp].loginTime, 'prevLogout = ', employees[call_date][emp].logoutTimePrev\n #print 'prev day =', prevDayStr, 'prev logout =', employees[emp][prevDayStr].logoutTime, \\\n #'prev login =', employees[prevDayStr][emp].loginTime, 'prevLogout = ', employees[prevDayStr][emp].logoutTimePrev",
"def get_sliding_window_indexes(self, window_length, window_shift, unit='m', return_half_dates=False):\n\n from dateutil.relativedelta import relativedelta\n\n if unit == 'm':\n length = relativedelta(months = +window_length)\n shift = relativedelta(months = +window_shift)\n elif unit == 'd':\n length = relativedelta(days = +window_length)\n shift = relativedelta(days = +window_shift)\n elif unit == 'y':\n length = relativedelta(years = +window_length)\n shift = relativedelta(years = +window_shift)\n else:\n raise Exception(\"Unknown time unit! Please, use one of the 'd', 'm', 'y'!\")\n\n ndxs = []\n if return_half_dates:\n half_dates = []\n window_start = self.get_date_from_ndx(0)\n window_end = window_start + length\n while window_end <= self.get_date_from_ndx(-1):\n ndx = self.select_date(window_start, window_end, apply_to_data=False)\n ndxs.append(ndx)\n if return_half_dates:\n half_dates.append(window_start + (window_end - window_start) / 2)\n window_start += shift\n window_end = window_start + length\n\n # add last\n ndxs.append(self.select_date(window_start, window_end, apply_to_data=False))\n if return_half_dates:\n half_dates.append(window_start + (self.get_date_from_ndx(-1) - window_start) / 2)\n\n if np.sum(ndxs[-1]) != np.sum(ndxs[-2]) and self.verbose:\n print(\"**WARNING: last sliding window is shorter than others! (%d vs. %d in others)\"\n % (np.sum(ndxs[-1]), np.sum(ndxs[-2])))\n\n if return_half_dates:\n return ndxs, half_dates\n else:\n return ndxs",
"def find_tasks_to_shift(date):\n\n previous_entry = find_previous_entry(date)\n\n # If there is no previous entry, punt\n if previous_entry == \"\":\n return\n\n # Find all tasks and put in list\n task_section = False\n tasks = []\n\n for line in fileinput.input(os.path.join(NOTES_DIR, previous_entry)):\n if \"## Tasks.todo\" in line:\n task_section = True\n elif line.startswith(\"##\"):\n task_section = False\n elif task_section:\n line = line.strip()\n if line != \"\":\n tasks.append(line)\n\n # Iterate over tasks and fine undated and incomplete tasks to shift\n done_re = re.compile(r\"\\s@(done|cancell?ed)\")\n reminder_re = re.compile(r\"([^\\s\\\"`'\\(\\[])remind(ed)?\\((.*)(\\s\\\"(.*?)\\\")?\\)\")\n\n tasks_to_shift = []\n for task in tasks:\n done = done_re.search(task)\n date_match = reminder_re.search(task)\n\n # Only shift incomplete and untimestamped tasks\n if (not done and not date_match):\n tasks_to_shift.append(task)\n\n return tasks_to_shift",
"def timeshift(self, shift='random'):\n if shift == 'random':\n one_month = pd.Timedelta('30 days').value\n two_years = pd.Timedelta('730 days').value\n random_timedelta = - pd.Timedelta(random.uniform(one_month, two_years)).round('s')\n self.timeshift(random_timedelta)\n\n dfs_to_shift = [self.ECG, self.ACC, self.Marker, self.HRV]\n if self.data is not None:\n dfs_to_shift.append(self.data)\n\n if isinstance(shift, pd.Timestamp):\n self.start_time = shift\n for dataframe in dfs_to_shift:\n timedeltas = dataframe.index - dataframe.index.min()\n dataframe.index = shift + timedeltas\n if isinstance(shift, pd.Timedelta):\n for dataframe in dfs_to_shift: \n dataframe.index += shift",
"def get_attacks_between_dates(self, start_date, end_date):",
"def filter_date(data, start_shift, end_shift):\n filtered = data[data['pickup_datetime'] > start_shift]\n filtered = filtered[filtered['pickup_datetime'] < end_shift]\n\n return filtered",
"def returnShiftedCoordinate(cls, coordinates, shiftingSteps):",
"def daterange(date1, date2):\n for n in range(int((date2 - date1).days) + 1):\n yield date1 + dt.timedelta(n)",
"def get_holidays_between_dates(self, start_date, end_date):",
"def _calculate_timeshifts(self, stretched_ref_signals: pd.DataFrame):\n # Resample again with stretched signal\n df_equi = get_equidistant_signals(stretched_ref_signals, self.sampling_freq)\n segments = self.extractor.get_segments(df_equi)\n helpers.verify_segments(stretched_ref_signals.columns, segments)\n\n for source in df_equi.columns:\n if source == self.ref_source_name:\n continue\n\n timeshifts = Synchronizer._get_timeshift_pair(\n df_equi, self.ref_source_name, source, segments\n )\n timedelta = timeshifts[\"first\"] - timeshifts[\"second\"]\n if timedelta > pd.Timedelta(0):\n logger.warning(\n f\"Timedelta between shifts after stretching: {timedelta}.\"\n f\"This should be very small: the timedelta to the reference signal\"\n f\"should be equal for both start and end so a simple offset aligns the\"\n f\"signals perfectly.\"\n )\n logger.info(\"Timeshift for {}: {}\".format(source, timeshifts[\"first\"]))\n self.sources[source][\"timeshift\"] = timeshifts[\"first\"]",
"def findRiffledSchedule(schedule1, schedule2):\n \n for possibleOffsetLists in map(lambda p: range(p[0], p[1]), indolentPeriods(schedule2, 500)):\n for possibleOffset in possibleOffsetLists:\n updatedSchedule2 = shiftScheduleOffset(schedule2, possibleOffset)\n if not schedulingCollisionsFlag(schedule1, updatedSchedule2):\n return {'d1': schedule1, 'd2': updatedSchedule2}\n return {'d1': schedule1, 'd2': schedule2}",
"def _find_gaps(schedule, starting_from=None):\n gaps = []\n\n current = starting_from if starting_from else timezone.now()\n for shift in models.Shift.objects.filter(schedule=schedule, deleted=False).order_by('start', 'end'):\n if shift.start > current:\n gaps.append((current, shift.start))\n if shift.end > current:\n current = shift.end\n\n return gaps",
"def rest_days(self, games):\n game_dates = pd.concat([\n games[[\"date\", \"team_home\"]].rename(\n columns={\"team_home\": \"team\"}),\n games[[\"date\", \"team_away\"]].rename(\n columns={\"team_away\": \"team\"}),\n ]).sort_values(\"date\")\n\n game_dates['date_prev'] = game_dates.date\n\n game_dates = pd.merge_asof(\n game_dates[['team', 'date']],\n game_dates[['team', 'date', 'date_prev']],\n on='date', by='team', allow_exact_matches=False)\n\n for team in [\"home\", \"away\"]:\n\n game_dates_team = game_dates.rename(\n columns={\n 'date_prev': f'date_{team}_prev',\n 'team': f'team_{team}'})\n\n games = games.merge(game_dates_team, on=['date', f'team_{team}'])\n\n one_day = pd.Timedelta(\"1 days\")\n games[\"tm_rest_days_home\"] = np.clip(\n (games.date - games.date_home_prev) / one_day, 3, 16).fillna(7)\n games[\"tm_rest_days_away\"] = np.clip(\n (games.date - games.date_away_prev) / one_day, 3, 16).fillna(7)\n\n return games",
"def get_islands(islands, input_timespans):\n\n output_timespans = set()\n\n # For every input timespan, we check with wat islands it overlaps\n for input_timespan in input_timespans:\n input_start, input_end = input_timespan\n for island in islands:\n island_start, island_end = island\n\n # If the input timespan starts before the start of island, and the\n # input timespan end after the start of the island.\n # In other words: The island starts during the input timespan\n # Examples:\n # input timespan start\n # |\n # | island start\n # | \\\n # island: \\ ======= OR ======\n # input_timespan: ====== \\ ===========\n # \\ island end\n # |\n # input timespand end\n #\n if input_start <= island_start and input_end >= island_start:\n output_timespans.add(island)\n continue\n\n # If the input timespan starts after the start of the island, and\n # the input timespand ends before the end of the island.\n # In other words: The input timespan starts during the island\n # \n # island: ======= OR ==========\n # input_timespan: ====== =====\n #\n if input_start >= island_start and input_start <= island_end:\n output_timespans.add(island)\n continue\n\n return output_timespans",
"def isoweek_starts(date_start, date_end):\n last_week = isoweek_start(date_end)\n cur_week = isoweek_start(date_start)\n\n week_starts = []\n while cur_week <= last_week:\n week_starts.append(cur_week)\n cur_week += timedelta(days=7)\n return week_starts",
"def get_orbit_dates(dates):\n sorted_dates = sorted(dates)\n result = []\n for d in sorted_dates:\n if len(result) == 0 or d - result[-1][\"to\"] > timedelta(hours=1):\n result.append({\"from\": d, \"to\": d}) # new orbit\n else:\n result[-1][\"to\"] = d # same orbit\n\n return result"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
register solution to DB (Employees_in_Shifts table)
|
def register_arrangement(self, solution, sol_num=1):
connection = sqlite3.connect(self.name)
crsr = connection.cursor()
for shift in solution:
shift_id = shift.get_shift_id()
employees_in_shift = shift.get_bartenders() + shift.get_waitresses()
for employee in employees_in_shift:
e_id = employee.get_id()
query = """ INSERT INTO Employees_in_Shift VALUES ({},{},{})""".format(shift_id, e_id,sol_num)
crsr.execute(query)
connection.commit()
connection.close()
|
[
"def insert_employee(self, employee_id, first_name, last_name, min_shifts):\n if not self.check_for_db(): # if DB doesn't exist create it\n self.create_db()\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n insret_query = \"\"\"INSERT INTO Employee\n VALUES ({}, {},{},{});\"\"\".format(employee_id, first_name, last_name, min_shifts)\n crsr.execute(insret_query)\n connection.commit()\n connection.close()",
"def log_shift(self, employee_id, date, start_hour, end_hour):\n try:\n if not self.logged_shift_exists(employee_id,date):\n if not self.check_for_db(): # if DB doesn't exist create it\n self.create_db()\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n query = \"\"\"INSERT INTO Employee_Shift VALUES ({},\\\"{}\\\",\\\"{}\\\",\\\"{}\\\")\"\"\".format(employee_id,date,start_hour,end_hour)\n crsr.execute(query)\n connection.commit()\n connection.close()\n except IOError:\n print(\"DB Error\")",
"def addHardMinShiftsWorkersPerDay(self):",
"def populate(self, employees):\n # TODO Change return value?\n # TODO Test\n # TODO Fix problem with generating shifts that are too small\n # ^This typically happens when there is a location with say 6.5 hours per day of work\n # TODO Generate more than one schedule (do in another function)\n\n for location in self.schedule.shifts.keys():\n # Assign employees to their preferred times\n # Preferred times start with seniority\n employees = sorted(employees, key=lambda x: 0 if x.seniority else 1)\n for employee in employees:\n for shift in employee.get_preferred_times():\n # attempt to schedule them for their preferred shift\n self.attempt_to_assign_shift(location, employee, shift)\n\n for location in self.schedule.shifts.keys():\n\n # Assign employees to the rest of the times\n for shift in self.schedule.get_sorted_shifts_from_location(location):\n\n # Dont schedule employees to shifts that have >= the maximum number\n # of employees already schedules for it\n if not self.can_assign(location, shift):\n continue\n\n # Sort in order of hours assigned (least to greatest) also\n # factoring in seniority\n employees = sorted(employees, key=lambda x: \\\n x.hours_assigned - (Employee.SENIORITY_EXTRA_HOURS if x.seniority else 0))\n\n for employee in employees:\n self.attempt_to_assign_shift(location, employee, shift)\n # # Old code to assign a single shift at a time\n # if employee.available_for_shift(shift) and \\\n # not employee.get_total_assigned_hours() >= self.get_max_hours():\n # self.assign(location, employee, shift)\n # break",
"def test_vmware_service_resources_vm_workshifts_post(self):\n pass",
"def insert_employee_times(self,employee_id,date, start_time=\"NULL\", end_time=\"NULL\"):\n try:\n if not self.employee_time_exists(employee_id, date):\n if not self.check_for_db(): # if DB doesn't exist create it\n self.create_db()\n connection = sqlite3.connect(self.name)\n crsr = connection.cursor()\n start_time=\"NULL\"\n query = \"\"\"INSERT INTO Employee_Times VALUES ({},{},{},{})\"\"\".format(employee_id, date, start_time, end_time)\n\n crsr.execute(query)\n connection.commit()\n connection.close()\n return True\n return False\n\n except IOError:\n print(\" DBError\")",
"def insertNewEmployee(self):\n try:\n self.takeUserInput()\n self.insertNewEmployeeinDB(self.empId,self.empName,self.jobName,self.managerId,self.hireDate,self.salary,self.commission,self.deptId)\n except Exception as e:\n print(\"Error inserting New Employee,\", e)",
"def add_employee(self):\n try:\n self.log.info(\"Check wether the insite 360 is enabled or not\")\n insite360_enabled = FromInsite360.Check_I360_Connected()\n self.log.info(f\"Insite 360 is enabled [{insite360_enabled}]\")\n\n if not(insite360_enabled):\n FromInsite360.insite360_configure()\n self.proceed_to_test = FromInsite360.Register_UnRegister_Insite360(insite360_enabled)\n insite360_enabled = FromInsite360.Check_I360_Connected()\n self.log.info(f\"After Register_UnRegister_Insite360, latest Insite 360 is enabled [{insite360_enabled}]\")\n self.log.info(\"Calling refresh_site_after_connect method\")\n try:\n if (insite360web.refresh_site_after_connect()):\n tc_fail(\"Failed, Site is not Online, try after some time...\")\n except:\n self.log.error(\"Issue with local function..\")\n self.log.info(\"After register validate at passport.\")\n\n startime_from_script = datetime.now()\n status = False\n\n addStatus = insite360web.is_employee_exists_by_id(self.operator_id)\n if (addStatus == 0):\n # Add new employee with id operator ID\n config = {\n \"First Name\" : \"Auto\", \"Last Name\" : \"Manager\",\n \"Operator Id\" : \"\", \"Birth Date\" : \"07-02-1982\",\n \"Address Line 1\" : \"786 Fake Street\", \"Address Line 2\" : \"Bldg 1\",\n \"Address Line 3\" : \"Suite 234\", \"City\" : \"Greensboro\",\n \"State\" : \"NORTH CAROLINA\", \"Postal Code\" : \"27410\",\n \"Phone\" : \"(897) 867-7737\", \"Security Group\" : \"Manager\",\n \"Assigned Stores\" : \"TestAutoSite1\", \"Clock InOut Required\" : \"False\",\n \"Override the Blind Balancing store option\" : \"True\",\n \"Language\" : \"US English\", \"Theme\" : \"Passport Retro\",\n \"Keypad Calculator\" : \"True\", \"Hand Preference Left\" : \"True\"\n }\n status = insite360web.add_new_employee(self.operator_id, config)\n elif (addStatus == 1):\n # Check and update the assigned stores value\n self.proceed_to_test = True\n self.log.info(\"Employee already exist(s) in insite360.\")\n self.log.info(\"Check and updated the 'Assigned stores' value\")\n status = insite360web.set_assigned_store_validate(self.operator_id, \"TestAutoSite1\")\n # Check the command received from Insite360 or not? if not modify the test auto site\n\n if (status):\n self.proceed_to_test = True\n emp_timeout_status = FromInsite360.JSONReceivedTimeOutStatus(\"employees\", startime_from_script, self.timeout_min, 60, self.operator_id)\n if (emp_timeout_status and addStatus == 0):\n tc_fail(\"Failed, .json data command not received from I360/ event status not sent to I360.\")\n else:\n tc_fail(\"Failed, Unable to add/update the employee from insite360.\")\n except:\n self.log.error(f\"add_new_employee_TC1 - [{sys.exc_info()[0]}]\")\n tc_fail(\"Failed, unable to add new employee/ update assigned stores\")",
"def populate_workers():\n ivanov = Worker(\n name='Ivanov',\n salary=6000,\n )\n petrov = Worker(\n name='Petrov',\n salary=7000,\n )\n nazarov = Worker(\n name='Nazarov',\n salary=5500,\n )\n\n with app.app_context():\n from sales.models import db\n db.session.add(ivanov)\n db.session.add(petrov)\n db.session.add(nazarov)\n\n db.session.commit()\n db.session.close()",
"def workExperience(cls):\n def _add_workTask(work_key, _id):\n \"\"\"Helper method to add work tasks.\n \"\"\"\n for task in WORKTASK_LIST[work_key]:\n entry = {\n 'description': task,\n 'workplace_id': _id\n }\n entry = models.WorkTask(**entry)\n db.session.add(entry)\n\n for _id, workplaceTuple in enumerate(WORKPLACE_LIST):\n _id += 1\n work_key, work_place = workplaceTuple\n\n work_place['id'] = _id\n workplace_entry = models.WorkPlace(**work_place)\n db.session.add(workplace_entry)\n\n # Add work tasks.\n _add_workTask(work_key, _id)",
"def addHardWorkerWithTaskMustHaveShift(self):\n\n\n for d in range(self.num_days):\n for w in range(self.num_workers):\n self.solver.Add((self.task[(w, d)] >= 1) == (self.shift[(w, d)] >= 1))",
"def populateDbWithOccupations():\n \n OCCUPATIONS = [\"agriculteurs\",\n \"artisan - commerçant - chef d\\'entreprise\",\n \"autre\",\n \"cadre\",\n \"employé\",\n \"étudiant\",\n \"ouvrier\",\n \"profession intermédiaire\",\n \"retraité\"]\n \n print \"insertion of new occupations\"\n for occupation in sorted(OCCUPATIONS):\n manager.insertOccupation(Occupation(occupation))",
"def addHardWorkersMustBeAssignedToAllowedTasks(self):\n #Example:\n #At least 2 M shifts must be set on day 0\n #exp1 = [self.shifts[(w, 0)] == 1 for w in range(self.num_workers)]\n #self.solver.Add(self.solver.Sum(exp1) >= 3)\n #numero de supervisores assignados =1 en turno manana\n #exp2 = [self.tasks[(w, 0)] == 1 for w in range(self.num_workers)]\n #self.solver.Add(self.solver.Sum(exp2) == 1)\n #for day in range(self.num_days):\n # self.solver.Add(self.taskworkers[(t, day)] == 1)\n\n exp1 = [(self.tasks[(w, 0)] == 0) * (self.shifts[(w, 0)] == 1) for w in range(self.num_workers)]\n exp2 = [(self.tasks[(w, 0)] == 1) * (self.shifts[(w, 0)] == 1) for w in range(self.num_workers)]\n #print (exp3)\n self.solver.Add(self.solver.Sum(exp1) >=3)\n self.solver.Add(self.solver.Sum(exp2) >= 2)",
"def _create_schedule_lines(self, employees, date_from, date_to, shift_id, weekend_days):\n date_from = fields.Date.from_string(date_from)\n date_to = fields.Date.from_string(date_to)\n dates = (date_from.month == date_to.month or (date_to.month != date_from.month and \\\n (date_to - date_from).days == 1 and \\\n shift_id.time_to <= shift_id.time_from)) and [[date_from, date_to]] or []\n if not dates:\n while date_to.month != date_from.month:\n last_day = calendar.monthrange(date_from.year, date_from.month)[1]\n last_day_date = datetime(date_from.year, date_from.month, last_day).date()\n dates.append([date_from, last_day_date])\n date_from = last_day_date + relativedelta(days=1)\n if date_from.month == date_to.month:\n dates.append([date_from, date_to])\n for emp in employees:\n for date_interval in dates:\n start_date = date_interval[0]\n end_date = date_interval[1]\n shift = emp.employee_shift_schedule_ids. \\\n filtered(lambda record: fields.Date.from_string(record.date_from) <= start_date <=\n fields.Date.from_string(record.date_to)\n # and int(fields.Date.from_string(record.date_from).strftime(\"%m\")) == start_date.month\n )\n res_calendar = shift and shift.resource_calendar_id or \\\n self._create_month_calendar(emp, start_date)\n if self.assign_flexible_hour:\n res_calendar.flexible_hours = shift_id.flexible_hours\n diff = (end_date - start_date).days\n if diff > 6:\n for i in range(0, 7):\n if i not in weekend_days:\n week_day = [list(day)[1] for day in DAY_OF_WEEK\n if i == int(list(day)[0])][0]\n res_calendar.write({\n 'attendance_ids': [(0, 0, {\n 'name': week_day,\n 'dayofweek': str(i),\n 'date_from': start_date,\n 'date_to': end_date,\n 'hour_from': shift_id.time_from,\n 'hour_to': shift_id.time_to,\n 'single_assign_id': self.id,\n })]\n })\n else:\n if shift_id.time_to <= shift_id.time_from:\n end_date = end_date - relativedelta(days=1)\n while end_date >= start_date:\n day_week_nu = start_date.weekday()\n # weekend_days = self.weekend_ids.mapped('code')\n if day_week_nu not in weekend_days:\n day_week = [list(day)[1] for day in DAY_OF_WEEK\n if day_week_nu == int(list(day)[0])][0]\n shift_end_date = start_date + relativedelta(days=1) \\\n if shift_id.time_to < shift_id.time_from else start_date\n res_calendar.write({\n 'attendance_ids': [(0, 0, {\n 'name': day_week,\n 'dayofweek': str(day_week_nu),\n 'date_from': start_date,\n 'date_to': shift_end_date,\n 'hour_from': shift_id.time_from,\n 'hour_to': shift_id.time_to,\n 'single_assign_id': self.id,\n })]\n })\n start_date = start_date + relativedelta(days=1)",
"def employees(self, employees):\n\n\n self._employees = employees",
"def query_new_assignment(name, choosenExos, groups, code):\n \n try:\n assignment = MetalAssignment()\n assignment.name = name\n assignment.code = code \n assignment.created_at = datetime.datetime.now()\n assignment.updated_at = datetime.datetime.now()\n if choosenExos is not None:\n for e in choosenExos:\n q = db.session.query(MetalExercise).get(e)\n assignment.exos.append(q)\n if groups is not None:\n for g in groups:\n q = db.session.query(MetalGroup).get(g)\n assignment.group = q \n \n #the query itself \n db.session.add(assignment)\n db.session.commit()\n lg.warning('Addition done !')\n\n except exc.SQLAlchemyError as e:\n db.session.rollback()\n if e.args == ('(sqlite3.IntegrityError) UNIQUE constraint failed: metal_assignments.code',):\n return flash(\"Veuillez choisir un nouveau code !\", 'danger')\n if e.args == ('(sqlite3.IntegrityError) UNIQUE constraint failed: metal_assignments.name',):\n return flash(\"Ce nom est déjà utilisé !\", 'danger')",
"def create_employee(self, new_employee):\n # TO-DO",
"def addHardWorkersMustBeAssignedToAllowedTasks(self):\n #Example:\n #At least 2 M shifts must be set on day 0\n #exp1 = [self.shifts[(w, 0)] == 1 for w in range(self.num_workers)]\n #self.solver.Add(self.solver.Sum(exp1) >= 3)\n #numero de supervisores assignados =1 en turno manana\n #exp2 = [self.tasks[(w, 0)] == 1 for w in range(self.num_workers)]\n #self.solver.Add(self.solver.Sum(exp2) == 1)\n\n exp1 = [(self.task[(w, 0)] == 1) * (self.shift[(w, 0)] == 1) for w in range(self.num_workers)]\n exp2 = [(self.task[(w, 0)] == 2) * (self.shift[(w, 0)] == 1) for w in range(self.num_workers)]\n self.solver.Add(self.solver.Sum(exp1) >= 4)\n self.solver.Add(self.solver.Sum(exp2) >= 2)",
"def register_for_new_schedule_slots(self):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Determines the smart start state The smart start is determined using the UCB1 algorithm. The UCB1 algorithm is a well known exploration strategy for multiarm bandit problems. The smart start is chosen according to smart_start = \arg\max\limits_s\left(alpha \max\limits_a Q(s, a) + \sqrt{\frac{beta \log |D| }{C(s}} \right) Where \alpha = exploitation_param \beta = exploration_param \max\limits_a Q(s,a) corresponds to V(s) which is determined by the agent |D| is the size of the replay buffer C(s) is the "visitation count" of the state. This is estimated through KDE TODO scipy link KDE uses gaussian functions (could be other functions) around each given point, to create an estimated probability density function. This probability density estimates how likeley (if you were to stop the robot randomly) the robot would be at that state. To create a "count", we find the probability of a given state, by "integrating" over a single stepsize volume (volume of a hyperellipsoid where each radii is the average stepsize in that direction) (intrgration is approximate, we just take PDF at the state point, and multiply by volume) Then with the probability for that state, we multiply by the total number of states (|D|) to get a count
|
def get_smart_start_path(self):
if len(self.replay_buffer) == 0: # if buffer is emtpy, nothing to evaluate
return None
possible_start_indices = self.replay_buffer.get_possible_smart_start_indices(self.n_ss)
if possible_start_indices is None: # no valid states then return None
return None
#find the smart_start state
all_states = self.replay_buffer.get_all_states() # n x d matrix where n is the number of states and d is dim
##################### KERNEL CALCULATIONS AND ELLIPSOID VOLUME ############################
kernel = scipy.stats.gaussian_kde(all_states.T, bw_method='scott') #TODO options for what type of bandwith calc
# plot_2d_density(all_states.T[0], all_states.T[1], kernel) #TODO remove plot
if self.nnd_mb_agent.radii is not None:
one_radii_volume = volume_of_n_dimensional_hyperellipsoid(self.nnd_mb_agent.radii)
# for a n-dimensional state vector, self.nn_mb_agent.radii is a n- long vector which each
# value represents some combination of the mean and std of that index per step
# the volume is the volume of a hyperellipsoid with those radii
else:
one_radii_volume = 1 # 100% arbitrary TODO: get std of last path maybe for both of these
################### PARALLEL UCB CALC #########################################################
# t1 = time.time()
possible_ss_steps = np.array(self.replay_buffer.buffer)[possible_start_indices]
possible_ss_states = np.asarray(self.replay_buffer.steps_to_s2(possible_ss_steps).tolist()) #use tolist, because it renders as a numpy array of objects (not of floats)
ss_state_values = self.agent.get_state_value(possible_ss_states).T # 1 x n matrix (equiv n long list)
probability_densities = (kernel(possible_ss_states.T) * one_radii_volume) # 1 x n matrix (equiv n long list)
C_hats = len(self.replay_buffer) * probability_densities
ucb_list = self.exploitation_param * ss_state_values + \
np.sqrt((self.exploration_param *
np.log(len(self.replay_buffer))) / C_hats)
smart_start_parallel_index = possible_start_indices[np.argmax(ucb_list)]
# ######### For loop setup #####################################################################
# t2 = time.time()
# smart_start_index = None
# max_ucb = -float('inf')
#
# for main_step_index in possible_start_indices:
# # state value
# main_step = self.replay_buffer.buffer[main_step_index]
# state = self.replay_buffer.step_to_s2(main_step)
# state_value = self.agent.get_state_value(state)[0][0]
#
# #SCIPY Kernel Density Estimation TODO document what math was used and what resources
# probability_density = (kernel(state.T) * one_radii_volume)[0]
# C_hat = len(all_states) * probability_density
#
# #ucb calculation
# ucb = self.exploitation_param * state_value + \
# np.sqrt((self.exploration_param *
# np.log(len(self.replay_buffer))) / C_hat)
# if ucb > max_ucb:
# smart_start_index = main_step_index
# max_ucb = ucb
# print("Parallel took: " + str(t2 - t1) + " |Iterative took: " + str(time.time() - t2) + " | " + str(smart_start_index == smart_start_parallel_index))
return self.replay_buffer.get_episodic_path_to_buffer_index(smart_start_parallel_index)
|
[
"def start_new_episode(self, state):\n self.smart_start_pathing = False\n self.smart_start_path = None\n\n if np.random.rand() <= self.eta: #eta is probability of using smartStart\n start_time = time.time()\n self.smart_start_path = self.get_smart_start_path() # new state to navigate to\n end_time = time.time()\n if self.smart_start_path: #ensure path exists\n if self.print_ss_stuff:\n elapsed_time = end_time - start_time\n print(\"Calculate Smart Start Path Time: \" + str(elapsed_time), end='')\n print(\"\\npath exists\")\n # let neural network dynamics model based controller load the path\n self.nnd_mb_agent.start_new_episode_plan(state, self.smart_start_path)\n if not self.nnd_mb_agent.close_enough_to_goal(state): #ensure goal hasn't already been reached\n self.smart_start_pathing = True #this start smart start navigation\n if self.print_ss_stuff:\n print(\"SMART_START START!!!\")\n\n self.agent.start_new_episode(state)\n self.replay_buffer.start_new_episode(self)",
"def build_state(self):\n\n # Collect data about the environment\n waypoint = self.planner.next_waypoint() # The next waypoint \n inputs = self.env.sense(self) # Visual input - intersection light and traffic\n for key, value in iter(inputs.items()):\n if value is None:\n inputs.update({key:'None'})\n deadline = self.env.get_deadline(self) # Remaining deadline\n\n ########### \n ## TO DO ##\n ###########\n \n # NOTE : you are not allowed to engineer features outside of the inputs available.\n # Because the aim of this project is to teach Reinforcement Learning, we have placed \n # constraints in order for you to learn how to adjust epsilon and alpha, and thus learn about the balance between exploration and exploitation.\n # With the hand-engineered features, this learning process gets entirely negated.\n \n # Set 'state' as a tuple of relevant data for the agent \n return self.build_index(inputs,waypoint)",
"def func_state_pre_test(list_float_target_state: List[float], int_shots: int) -> List[float]:\n env = QEnv()\n\n # from QCompute import Define\n # Define.hubToken = ''\n # env.backend(BackendName.CloudBaiduSim2Wind)\n env.backend(BackendName.LocalBaiduSim2)\n\n int_dim = len(list_float_target_state) # the dimension of the input vector\n num_qubit_sys = max(int(np.ceil(np.log2(int_dim))), 1) # the number of qubits we need to encode the input vector\n reg_sys = list(env.Q[idx] for idx in range(num_qubit_sys)) # create the quantum register\n\n # call the quantum circuit to prepare quantum state\n circ_state_pre(reg_sys, [], list_float_target_state, reg_borrowed=[])\n\n # measure the quantum state we have prepared\n MeasureZ(reg_sys, list(reversed(range(num_qubit_sys))))\n\n task_result = env.commit(int_shots, fetchMeasure=True)['counts'] # commit to the task\n\n list_population = [0 for _ in range(2 ** num_qubit_sys)] # register for finial populations\n for idx_key in task_result.keys():\n list_population[int(idx_key, base=2)] = task_result[idx_key]\n return list_population",
"def generate_state(start, diff, state_size, state_name):\n values = []\n increment = float(1) / state_size\n for iteration in range(int(state_size)):\n # Get a value between start + diff\n sample = start + diff * increment * iteration\n values.append(sample)\n\n return {\n \"state_name\": state_name,\n \"values\": values\n }",
"def __init__(self):\n self.action_space = tuple([(pick_up,drop) for pick_up in (1,2,3,4,5) for drop in (1,2,3,4,5) if pick_up!=drop])\n self.state_space = [(loc, time, day) for loc in np.arange(1,m+1) for time in range(t) for day in range(d)]\n self.state_init = random.choice(self.state_space)\n self.state_input = (np.arange(1,m+1) , np.arange(0,t) , np.arange(0,d))\n # Start the first round\n self.reset()",
"def transition_from(self, state):\n a, b, c = state\n tomorrow_state = [(0, 0, 0)]\n if a == 0:\n proba_state = [1.0] # exogenous state is absorbing. Done.\n else:\n proba_state = [self.lam]\n if b < self.N - 1:\n i = 1\n trans = ((1.0 - self.gamma), self.gamma)\n else:\n i = 0\n trans = (1.0, 0)\n while True:\n for cprime in self.c_states:\n if self.c_transition[c][cprime]:\n tomorrow_state.append((a, b + i, cprime))\n proba_state.append((1 - self.lam) * trans[i] *\n self.c_transition[c][cprime])\n if i == 0:\n break\n i -= 1\n return tomorrow_state, proba_state",
"def observe(self, state, action, reward, new_state, done):\n # new_state added (maybe) to buffer\n self.replay_buffer.add(self, state, action, reward, done, new_state)\n\n # agent observation\n self.agent.observe(state, action, reward, new_state, done)\n\n # check if smart_start_state has been reached\n if self.smart_start_pathing:\n self.nnd_mb_agent.observe(state, action, reward, new_state, done)\n if self.nnd_mb_agent.close_enough_to_goal(new_state):\n self.smart_start_pathing = False\n if self.print_ss_stuff:\n print(\"distance to goal: \" + str(self.nnd_mb_agent.distance_function(new_state,self.smart_start_path[-1])))\n print(\"END OF SMART START STUFFS\")",
"def measure(state):\n\t\tstate_z=state\n\t\tn_qubits=QuantumRegister.num_qubits(state)\n\t\tprobs=Probability.get_probabilities(state_z)\n\t\trand=random.random()\n\t\tfor idx,state_desc in enumerate(State.all_state_strings(n_qubits)):\n\t\t\tif rand < sum(probs[0:(idx+1)]):\n\t\t\t\treturn State.state_from_string(state_desc)",
"def _get_initial_state(initial_position, tolerance, expectation_value_function,\n learning_rate, alpha, perturb, gamma, blocking,\n allowed_increase):\n init_args = {\n \"converged\": tf.Variable(False),\n \"num_iterations\": tf.Variable(0),\n \"num_objective_evaluations\": tf.Variable(0),\n \"position\": tf.Variable(initial_position),\n \"objective_value\":\n (tf.cast(expectation_value_function(initial_position), tf.float32)),\n \"objective_value_prev\": tf.Variable(np.inf),\n \"tolerance\": tolerance,\n \"learning_rate\": tf.Variable(learning_rate),\n \"alpha\": tf.Variable(alpha),\n \"perturb\": tf.Variable(perturb),\n \"gamma\": tf.Variable(gamma),\n \"blocking\": tf.Variable(blocking),\n \"allowed_increase\": tf.Variable(allowed_increase),\n }\n return SPSAOptimizerResults(**init_args)",
"def qp_controller(current_state, desired_state, dt, dim=2):\n\n # torque PD controller values\n wheel_kp = 50.0\n wheel_kd = 10.0\n max_torque = 20.0\n\n # cost on obtaining next state and velocity\n kp = 0.0\n kd = 1.0\n\n # half state length\n hl = len(current_state) / 2\n\n mp = MathematicalProgram()\n\n x = mp.NewContinuousVariables(len(current_state), \"x\")\n u = mp.NewContinuousVariables(1, \"u\")\n force = mp.NewContinuousVariables(8, \"force\")\n\n # set the initial state\n set_initial_state(mp, x, current_state, dim)\n # enforce the dynamics with linearized theta\n state = x + get_nd_dynamics(x, u, force, dim, current_state[dim])*dt\n\n # stay on floor\n # add_floor_constraint(mp, state, dim)\n # for corner to ground\n # fix_corner_to_ground(mp, state, 0, -0.5, dim)\n # don't pull on ground\n dont_pull_on_ground(mp, force, dim)\n # bounded to not leave the ground\n # stay_on_ground(mp, state, dim)\n # only force when on ground\n complimentarity_constraint(mp, state, force, dim)\n\n # linearize theta to set this cost\n add_corner_cost(mp, state, 0, -0.5, dim, current_state[dim])\n\n # unpack the states\n x_s = state[0]\n y = state[1]\n theta = state[dim]\n alpha = state[hl-1]\n xdot = state[0+hl]\n ydot = state[1+hl]\n theta_dot = state[dim+hl]\n alpha_dot = state[-1]\n\n # unpack the desired states\n x_des = desired_state[0]\n y_des = desired_state[1]\n theta_des = desired_state[dim]\n alpha_des = desired_state[hl-1]\n xdot_des = desired_state[0+hl]\n ydot_des = desired_state[1+hl]\n theta_dot_des = desired_state[dim+hl]\n alpha_dot_des = desired_state[-1]\n\n # current_pos = np.asarray([x_s,y,theta,alpha])\n # des_pos = np.asarray([x_des,y_des,theta_des,alpha_des])\n # pos_diff = current_pos - des_pos\n current_pos = np.asarray([x_s,y,theta,0])\n des_pos = np.asarray([x_des,y_des,theta_des,0])\n pos_diff = current_pos - des_pos\n\n # current_vel = np.asarray([xdot,ydot,theta_dot,alpha_dot])\n # des_vel = np.asarray([xdot_des,ydot_des,theta_dot_des,alpha_dot_des])\n # vel_diff = current_vel - des_vel\n current_vel = np.asarray([xdot,ydot,theta_dot,0])\n des_vel = np.asarray([xdot_des,ydot_des,theta_dot_des,0])\n vel_diff = current_vel - des_vel\n\n pos = pos_diff.dot(pos_diff)\n vel = vel_diff.dot(vel_diff)\n\n mp.AddQuadraticCost(kp*pos)\n mp.AddQuadraticCost(kd*vel)\n\n # torque PD controller\n input_torque = wheel_kp*(current_state[dim] - np.pi/4.0) + wheel_kd*current_state[dim+hl]\n input_torque = np.clip(input_torque, -max_torque, max_torque)\n mp.AddConstraint(u[0] == input_torque)\n\n sol = mp.Solve()\n # print(sol)\n\n my_torque = mp.GetSolution(u)\n my_force = mp.GetSolution(force)\n my_start = mp.GetSolution(x)\n\n return my_start, my_torque, my_force",
"def estimate_next_state(self):\n return self.__transition_function(self.__state)",
"def get_start(path, n, T, W, U, Nm, L, delta_start=0.12, mustart=-0.85, dtype=np.float64, tol=0.01, mode=['', 'Nm', 'T', 'U', 'W'], deleteNm=False):\n def get_data(f, mode=''):\n \"\"\"This function returns the data corresponding to the given parameter configuration that is necessary to restart the self-consistency cycle from a HDF5 file.\n Arguments:\n n: particle density\n T: temperature\n W: disorder strength\n U: interaction strength\n Nm: number of moments in Chebyshev expansion\n f: file handle\n Keyword Arguments:\n mode: determines how n, T, W, U and Nm are interpreted to get the configuration. 'n', 'T', 'W' and 'U' modes can be combined.\n '': Check for specific configuration W, U, T\n 'n', 'T', 'W', 'U': Check for closest configuration with smaller n, W, U or T\n Return Values:\n delta_i: local pairing amplitude\n n_i: local occupation number\n iterations: number of iterations already performed (only applicable to standard mode '')\n self_con: Tolerance up to which self-consistency has been reached. None is returned if the configuration does not exist. Inf signifies an undetermined tolerance. \n \"\"\"\n\n def closest_key(key, dtype, f, noself=True, smallereq=False):\n \"\"\"This function returns the closest key with a smaller value in a HDF5 file.\n Arguments:\n key: key of which the closest smaller-valued key is searched for (key format: 'Name_Value')\n dtype: data type of the value of the key\n f: file handle of the HDF5 file\n Return Values:\n closest_key: closest smaller key\n \"\"\"\n closest_key=''\n keyname, keyval = key.split('_')\n keyval=np.abs(dtype(keyval))\n keylist=f.keys()\n if noself==1:\n keylist=np.array([dtype(keyel.split('_')[1]) for keyel in keylist if keyname==keyel.split('_')[0] and not np.allclose(dtype(keyel.split('_')[1]), keyval)])\n else: \n keylist=np.array([dtype(keyel.split('_')[1]) for keyel in keylist if keyname==keyel.split('_')[0]])\n if smallereq==1:\n keylist=keylist[np.where((keylist-keyval)<=0)]\n if keylist.size:\n closest_key=keyname+'_'+str(keylist[np.argmin(np.abs(keylist-keyval))])\n else:\n closest_key=key\n return closest_key \n def checkkey(key, pathprefix, pathsuffix, dtype, f, tolerance=0.1):\n \"\"\"This function checks if using the smaller-valued key starting configuration is feasible. It is called, when \"c\" is included in the mode.\n Arguments:\n key: key of which the closest smaller-valued key is searched for (key format: 'Name_Value')\n pathprefix: prefix of the path where to compare key\n pathsuffix: suffix of the path where to compare key\n dtype: data type of the value of the key\n f: file handle of the HDF5 file\n Keyword Arguments:\n tolerance: tolerance of the smallest \n Return Values:\n True/False \n \"\"\"\n check=False\n if pathprefix not in f:\n return check\n closekey=closest_key(key, dtype, f[pathprefix], noself=True)\n delta_i_key=None\n delta_i_closekey=None\n if key in f[pathprefix]:\n if 'self_consistency' in f[pathprefix+key+pathsuffix].attrs:\n self_con=f[pathprefix+key+pathsuffix].attrs['self_consistency']\n if isinstance(self_con, float):\n tolstr='tol_'+str(round(self_con, roundto))\n else:\n if isinstance(self_con[-1], float):\n tolstr='tol_'+str(round(self_con[-1], roundto))\n else:\n tolstr='tol_'+str(round(self_con[-1][0], roundto))\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+key+pathsuffix], noself=False, smallereq=True)\n if tolstr not in f[pathprefix+key+pathsuffix]:\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+key+pathsuffix])\n if tolstr in f[pathprefix+key+pathsuffix]:\n if 'delta_i' in f[pathprefix+key+pathsuffix+'/'+tolstr]:\n delta_i_key=f[pathprefix+key+pathsuffix+'/'+tolstr]['delta_i'][:]\n if 'self_consistency' in f[pathprefix+key+pathsuffix]:\n self_con=f[pathprefix+key+pathsuffix]['self_consistency'][:,:]\n tolstr='tol_'+str(round((self_con[np.nonzero(self_con[:,0]),0])[0,-1], roundto))\n\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+key+pathsuffix], noself=False, smallereq=True)\n if tolstr not in f[pathprefix+key+pathsuffix]:\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+key+pathsuffix])\n if tolstr in f[pathprefix+key+pathsuffix]:\n if 'delta_i' in f[pathprefix+key+pathsuffix+'/'+tolstr]:\n delta_i_key=f[pathprefix+key+pathsuffix+'/'+tolstr]['delta_i'][:]\n if 'self_consistency' in f[pathprefix+closekey+pathsuffix].attrs:\n self_con=f[pathprefix+closekey+pathsuffix].attrs['self_consistency']\n if isinstance(self_con, float):\n tolstr='tol_'+str(round(self_con, roundto))\n else:\n if isinstance(self_con[-1], float):\n tolstr='tol_'+str(round(self_con[-1], roundto))\n else:\n tolstr='tol_'+str(round(self_con[-1][0], roundto))\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+closekey+pathsuffix], noself=False, smallereq=True)\n if tolstr not in f[pathprefix+closekey+pathsuffix]:\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+closekey+pathsuffix])\n if tolstr in f[pathprefix+closekey+pathsuffix]:\n if 'delta_i' in f[pathprefix+closekey+pathsuffix+'/'+tolstr]:\n delta_i_closekey=f[pathprefix+closekey+pathsuffix+'/'+tolstr]['delta_i'][:]\n if 'self_consistency' in f[pathprefix+closekey+pathsuffix]:\n self_con=f[pathprefix+closekey+pathsuffix]['self_consistency'][:,:]\n tolstr='tol_'+str(round((self_con[np.nonzero(self_con[:,0]),0])[0,-1], roundto))\n\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+closekey+pathsuffix], noself=False, smallereq=True)\n if tolstr not in f[pathprefix+closekey+pathsuffix]:\n tolstr=closest_key(tolstr, np.float64, f[pathprefix+closekey+pathsuffix])\n if tolstr in f[pathprefix+closekey+pathsuffix]:\n if 'delta_i' in f[pathprefix+closekey+pathsuffix+'/'+tolstr]:\n delta_i_closekey=f[pathprefix+closekey+pathsuffix+'/'+tolstr]['delta_i'][:]\n if delta_i_key is not None and delta_i_closekey is not None and np.mean(np.abs((delta_i_key-delta_i_closekey)/delta_i_closekey)) < tolerance:\n check=True\n if delta_i_key is None or delta_i_closekey is None:\n check=True\n return check\n\n roundto=6\n self_con=None\n tolstr=None\n iterations=None\n delta_i=None\n n_i=None\n## change to None as soon as all files include mu!\n mu=0.\n## change to None as soon as all files include mu!\n cycle_time=0.\n n_=round(n, roundto)\n T_=round(T,2*roundto)\n W_=round(W,roundto)\n U_=round(U,roundto)\n nstr='n_'+str(str(n_))\n Tstr='T_'+str(T_)\n Wstr='W_'+str(W_)\n Ustr='U_'+str(U_)\n Nmstr='Nm_'+str(Nm)\n# implement in a better way, when time (deleteNm)\n deleteNm=False\n# implement in a better way, when time (deleteNm)\n if 'n' in mode:\n nstr=closest_key(nstr, np.float64, f)\n if nstr in f:\n if 'T' in mode:\n# Tstr=closest_key(Tstr, np.float64, f[nstr])\n Tstr=closest_key(Tstr, np.float64, f[nstr], smallereq=True)\n if Tstr in f[nstr]:\n if 'W' in mode:\n Wstr=closest_key(Wstr, np.float64, f[nstr][Tstr])\n if Wstr in f[nstr][Tstr]:\n if 'U' in mode:\n Ustr=closest_key(Ustr, np.float64, f[nstr][Tstr][Wstr])\n if Ustr in f[nstr][Tstr][Wstr]:\n if 'Nm' in mode:\n# newTstr=closest_key(Tstr, np.float64, f[nstr])\n# newWstr=closest_key(Wstr, np.float64, f[nstr][Tstr])\n newUstr=closest_key(Ustr, np.float64, f[nstr][Tstr][Wstr])\n if ('c' not in mode):\n Nmstr=closest_key(Nmstr, np.int, f[nstr][Tstr][Wstr][Ustr])\n# elif checkkey(Nmstr, nstr+'/'+newTstr+'/'+Wstr+'/'+Ustr+'/', '', np.int, f): \n# elif checkkey(Nmstr, nstr+'/'+Tstr+'/'+newWstr+'/'+Ustr+'/', '', np.int, f): \n elif checkkey(Nmstr, nstr+'/'+Tstr+'/'+Wstr+'/'+newUstr+'/', '', np.int, f): \n# Nmstr=closest_key(Nmstr, np.int, f[nstr][newTstr][Wstr][Ustr], noself=True, smallereq=True)\n# Nmstr=closest_key(Nmstr, np.int, f[nstr][Tstr][newWstr][Ustr], noself=True, smallereq=True)\n Nmstr=closest_key(Nmstr, np.int, f[nstr][Tstr][Wstr][newUstr], noself=True, smallereq=True)\n else:\n Nmstr=''\n deleteNm=True\n if Nmstr in f[nstr][Tstr][Wstr][Ustr]:\n if 'time' in f[nstr][Tstr][Wstr][Ustr][Nmstr].attrs:\n cycle_time=f[nstr][Tstr][Wstr][Ustr][Nmstr].attrs['time']\n if not isinstance(cycle_time, float):\n cycle_time=sum(cycle_time)\n if 'self_consistency' in f[nstr][Tstr][Wstr][Ustr][Nmstr].attrs:\n self_con=f[nstr][Tstr][Wstr][Ustr][Nmstr].attrs['self_consistency']\n if isinstance(self_con, float):\n tolstr='tol_'+str(round(self_con, roundto))\n else:\n if isinstance(self_con[-1], list) or isinstance(self_con[-1], np.ndarray):\n tolstr='tol_'+str(round(self_con[-1][0], roundto))\n else:\n tolstr='tol_'+str(round(self_con[-1], roundto))\n tolstr=closest_key(tolstr, np.float64, f[nstr][Tstr][Wstr][Ustr][Nmstr], noself=False, smallereq=True)\n if tolstr not in f[nstr][Tstr][Wstr][Ustr][Nmstr]:\n tolstr=closest_key(tolstr, np.float64, f[nstr][Tstr][Wstr][Ustr][Nmstr])\n if tolstr in f[nstr][Tstr][Wstr][Ustr][Nmstr]:\n if 'iterations' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs:\n iterations=np.int(f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs['iterations'])\n if 'delta_i' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]:\n delta_i=f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]['delta_i'][:]\n if 'n_i' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]:\n n_i=f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]['n_i'][:]\n if 'mu' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs:\n mu=dtype(f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs['mu'])\n if 'self_consistency' in f[nstr][Tstr][Wstr][Ustr][Nmstr]:\n self_con=f[nstr][Tstr][Wstr][Ustr][Nmstr]['self_consistency'][:,:]\n tolstr='tol_'+str(round((self_con[np.nonzero(self_con[:,0]),0])[0,-1], roundto))\n \n tolstr=closest_key(tolstr, np.float64, f[nstr][Tstr][Wstr][Ustr][Nmstr], noself=False, smallereq=True)\n if tolstr not in f[nstr][Tstr][Wstr][Ustr][Nmstr]:\n tolstr=closest_key(tolstr, np.float64, f[nstr][Tstr][Wstr][Ustr][Nmstr])\n if tolstr in f[nstr][Tstr][Wstr][Ustr][Nmstr]:\n if 'iterations' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs:\n iterations=np.int(f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs['iterations'])\n if 'delta_i' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]:\n delta_i=f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]['delta_i'][:]\n if 'n_i' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]:\n n_i=f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr]['n_i'][:]\n if 'mu' in f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs:\n mu=dtype(f[nstr][Tstr][Wstr][Ustr][Nmstr][tolstr].attrs['mu'])\n if mode!='':\n iterations=0\n self_con=[[np.float64('Inf'), np.float64('Inf'), np.float64('Inf')]]\n cycle_time=0.\n# not valid for n!=0.875\n# if 'U' in mode:\n# Uold=np.float64(Ustr.split('_')[1])\n# if delta_i is not None:\n# delta_i=delta_i*np.exp(-5.*(Uold-U_)/(Uold*U_))\n# not valid for n!=0.875\n return delta_i, n_i, mu, iterations, self_con, cycle_time, deleteNm\n\n seed=None\n n_i=None\n delta_i=None\n iterstart=0\n self_con=None\n mu=mustart\n cycle_time=0.\n deleteNm_=False\n try:\n with h5py.File(path, 'r') as f:\n if 'seed' in f.attrs:\n seed=f.attrs['seed']\n for m in mode:\n datalist=get_data(f, mode=m)\n if None not in datalist:\n delta_i, n_i, mu, iterstart, self_con, cycle_time, deleteNm_=datalist\n break \n \n except IOError:\n sys.exc_clear() \n\n if not deleteNm:\n deleteNm_=False\n if n_i is None:\n n_i=np.ones(L*L, dtype=dtype)*n\n if delta_i is not None and np.mean(delta_i)<(1e-16/tol):\n delta_i=np.ones(L*L, dtype=dtype)*100*(1e-16/tol)\n if delta_i is None:\n delta_i=np.ones(L*L, dtype=dtype)*delta_start\n if self_con is None:\n self_con=np.array([[np.float64('Inf'), np.float64('Inf'), np.float64('Inf')]])\n if mu is None:\n mu=mustart\n return delta_i, n_i, mu, iterstart, self_con, cycle_time, seed, deleteNm_",
"def make_state():\r\n return State(name=\"\", cntBins=0, cntSimulation=0\r\n , intLane=0, valBucket = [], cntBucket = []\r\n , pathDirectionList = []\r\n , pathScoreList = []\r\n ) #returns a State objec\r",
"def get_starting_state(self):\n\t\treturn self._current_state # state 0",
"def determineThrottle(self, state):\n\n eps = self.epsilon\n n = 1 if state.size == 14 else state.shape[1]\n\n throttle = np.zeros(n)\n S = self.switchingFunction(state)\n S = S.reshape(n)\n\n for i, s in enumerate(S):\n if eps > 0:\n midthrottle = (eps - s) / (2 * eps)\n throttle[i] = 0 if s > eps else 1 if s < -eps else midthrottle\n else:\n throttle[i] = 0 if s > eps else 1\n\n return throttle",
"def _startup(self):\n\n end_num = 3 # For last few samples of startup the vehicle will not accelerate.\n startup_acc = self.vopt.get_average()/((self.headstart_samples - end_num)*self.dt)\n\n current_vehicle_index = self.k / self.headstart_samples # Current vehicle starting.\n\n for i, vehicle_id in enumerate(self.vehicle_ids):\n angle_pwm = self._get_angle_pwm(vehicle_id) # Steering input for path tracking.\n\n # Accelerate the current vehicle that is started. Others have acceleration zero.\n if i == current_vehicle_index:\n if self.k % self.headstart_samples == 0:\n print('Starting vehicle {}.'.format(vehicle_id))\n\n if self.k % self.headstart_samples < self.headstart_samples - 3:\n acc = startup_acc\n else:\n acc = 0 # End of startup.\n else:\n acc = 0 # Vehicle already started or not yet started.\n\n # Calculate the new speed control input from the current input and the acceleration.\n new_vel = trxmodel.throttle_input_to_linear_velocity(self.speed_pwms[vehicle_id]) + \\\n acc * self.dt\n self.speed_pwms[vehicle_id] = trxmodel.linear_velocity_to_throttle_input(new_vel)\n\n # Publish speed and steering inputs.\n self.pwm_publisher.publish(vehicle_id, self.speed_pwms[vehicle_id], angle_pwm,\n self.gear2_pwm)\n\n # Update the MPC state information.\n vel = self.positions[vehicle_id][3]\n path_pos = self.path_positions[vehicle_id].get_position()\n self.mpcs[vehicle_id].set_new_x0(numpy.array([vel, path_pos]))\n\n self.k += 1",
"def getStartState(self):\n # print self.G.neighbors(self.start_node)\n return self.start_node\n # util.raiseNotDefined()",
"def test_state_break_smaller():\n sim = Sim()\n sys = VanDerPol()\n sys.add_break_smaller(\"x\",-1.0)\n sim.add_system(sys)\n sim.simulate(20,0.01)\n\n #If correct the simulation should break at time 2.52\n assert sys.res.time[-1] == 2.52",
"def prim_next(self, dt):\n if self.extract_primitive() == False: # if there is no primitive to use\n self.next((0, 0), dt)\n else:\n prim_id, prim_progress = self.extract_primitive()\n # TODO: make this portion of the code more automated\n if prim_id > -1:\n # load primitive data \n list_of_key = ['x0', 'x_ref', 'u_ref', 'alpha', 'K']\n x0, x_ref, u_ref, alpha, K = get_bunch_prim_data(prim_id, list_of_key)\n \n if prim_progress == 0: # compute initial extended state\n x_real = self.state.reshape((-1, 1)) \n x1 = x_real - x0\n x2 = np.matmul(np.linalg.inv(diag([4, 0.02, 4, 4])), x1)\n # initial state, consisting of actual and virtual states for the controller\n self.extended_state = (np.vstack((x_real, x0, x1, x2)))[:, 0] \n \n num_of_inputs = 2 \n G_u = np.diag([175, 1.29]) # this diagonal matrix encodes the size of input set (a constraint)\n dist = get_disturbance()\n k = int(prim_progress * params.num_subprims) # calculate primitive waypoint\n q1 = K[k, 0].reshape((-1, 1), order='F')\n q2 = 0.5 * (x_ref[:, k+1] + x_ref[:, k]).reshape(-1, 1)\n q3 = u_ref[:, k].reshape(-1, 1)\n q4 = u_ref[:, k].reshape(-1, 1)\n q5 = np.matmul(G_u, alpha[k*num_of_inputs: (k+1)*num_of_inputs]).reshape((-1, 1), order='F')\n # parameters for the controller\n q = np.vstack((q1, q2, q3, q4, q5))\n \n self.extended_state = odeint(func=prim_state_dot, y0=self.extended_state, t=[0, dt], args=(dist, q))[-1, :]\n self.state = self.extended_state[0: len(self.state)]\n self.alive_time += dt\n prim_progress = prim_progress + dt / get_prim_data(prim_id, 't_end')[0]\n self.prim_queue.replace_top((prim_id, prim_progress))\n else: # if is stopping primitive\n self.next((0, 0), dt)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Record observation to replay buffer, Allow the base agent to observe the transition If smart start pathing is on, check if the goal is reached
|
def observe(self, state, action, reward, new_state, done):
# new_state added (maybe) to buffer
self.replay_buffer.add(self, state, action, reward, done, new_state)
# agent observation
self.agent.observe(state, action, reward, new_state, done)
# check if smart_start_state has been reached
if self.smart_start_pathing:
self.nnd_mb_agent.observe(state, action, reward, new_state, done)
if self.nnd_mb_agent.close_enough_to_goal(new_state):
self.smart_start_pathing = False
if self.print_ss_stuff:
print("distance to goal: " + str(self.nnd_mb_agent.distance_function(new_state,self.smart_start_path[-1])))
print("END OF SMART START STUFFS")
|
[
"def observe(self, pre_observation, action, reward, post_observation, done):",
"def observe(self, observation):\n if observation.get('reward') is not None:\n reward = observation['reward']\n for log_prob in self.log_probs:\n loss = - log_prob * reward\n self.backward(loss)\n \n self.log_probs = []\n\n for parameter in self.model.parameters(): # pylint: disable=access-member-before-definition\n if parameter.grad is not None:\n parameter.grad.data.clamp_(min=-5, max=5)\n \n if observation.get('model') is not None:\n # Deepcopied and frozen clone of the model\n self.model = observation['model']\n\n return super(RLTorchGeneratorAgent, self).observe(\n observation)",
"def controller_action(self, obs:Dict, take_action:bool=True, DEBUG:bool=False):\n grip_pos = obs['observation'][:3]\n object_pos = obs['observation'][3:6]\n object_rel_pos = obs['observation'][6:9]\n goal_pos = obs['desired_goal']\n # lift the hand little from the table vertically\n if not self.hand_higher:\n action = [0,0,1,0]\n if grip_pos[2]-object_pos[2] > 0.05:\n if take_action:\n self.hand_higher = True\n if DEBUG:\n print('Hand lifted from the table')\n # once above, move it above the puck\n if self.hand_higher and not self.hand_behind:\n goal_grip_pos = object_pos + (0.025 + self.r)*(object_pos - goal_pos)/np.linalg.norm(object_pos - goal_pos)\n # goal_object_vec = object_pos - goal_pos # vector pointing towards object from goal\n # action_pos = list(self.kp * goal_object_vec)\n action_pos = list(self.kp*(goal_grip_pos - grip_pos))\n action = action_pos[:2] + [0,0]\n if np.linalg.norm(grip_pos[:2]-goal_grip_pos[:2]) < 0.001:\n if take_action:\n self.hand_behind = True\n if DEBUG:\n print('Hand has moved behind')\n # now move the hand down\n if self.hand_behind and not self.hand_down:\n action = [0,0,-1,0]\n if grip_pos[2]-object_pos[2] <0.01:\n self.start_time = self.fetch_env.env.sim.data.time # start the time once we are ready to hit\n self.prev_time = self.start_time\n self.d1 = np.linalg.norm(goal_pos[:-1] - object_pos[:-1])/5 # Define d1 wrt the initial gripper pose rather than the object pose\n self.d2 = (np.linalg.norm(goal_pos[:-1] - object_pos[:-1]) - self.d1)\n self.f = self.d2 * self.mu * self.g / self.d1\n\n if take_action:\n self.hand_down = True\n\n v1 = np.sqrt(2*self.d2*self.mu*self.g)\n a = v1**2/(2*self.d1)\n\n if DEBUG:\n print('d2 = ' + str(self.d2))\n print('mu = ' + str(self.mu))\n print('v1 = ' +str(v1))\n print('d1 = ' + str(self.d1))\n print('a = '+str(a))\n print('Ready to HIT')\n # slide the puck\n if self.hand_down:\n v1 = np.sqrt(2*self.d2*self.mu*self.g)\n a = v1**2/(2*self.d1)\n if np.linalg.norm(goal_pos[:-1] - grip_pos[:-1]) > self.d2:\n if DEBUG:\n print('this is the distance ' + str(np.linalg.norm(goal_pos[:-1] - grip_pos[:-1])))\n cur_time = self.fetch_env.env.sim.data.time\n # delta s = sdot * dt, where sdot = f*t and s is measured along direction from puck to goal\n action_pos = list((goal_pos - grip_pos)/np.linalg.norm(goal_pos - grip_pos) * self.f * (cur_time - self.start_time)*(cur_time - self.prev_time))\n self.prev_time = cur_time\n #print('current speed = ' + str(a*(cur_time-self.start_time)))\n else:\n #print('no push')\n action_pos = [0,0]\n action = action_pos[:2] + [0,0]\n if DEBUG:\n print('commanded action = ' + str(np.linalg.norm(action[0:2])))\n # added clipping here\n #return action\n return np.clip(action, -1, 1)",
"def controller_action(self, obs:Dict, take_action:bool=True, DEBUG:bool=False):\n grip_pos = obs['observation'][:3]\n object_pos = obs['observation'][3:6]\n object_rel_pos = obs['observation'][6:9]\n goal_pos = obs['desired_goal']\n\n disp = 0.005 # extra distance to accellerate\n gripper_dim = 0.029 # maximum planar distance across the gripper\n\n # lift the hand little from the table vertically\n if not self.hand_higher:\n action = [0,0,1,0]\n if grip_pos[2]-object_pos[2] > 0.05:\n if take_action:\n self.hand_higher = True\n if DEBUG:\n print('Hand lifted from the table')\n # once above, move it above the puck\n if self.hand_higher and not self.hand_behind:\n goal_grip_pos = object_pos + (disp + gripper_dim + self.r)*(object_pos - goal_pos)/np.linalg.norm(object_pos - goal_pos)\n action_pos = list(self.kp*(goal_grip_pos - grip_pos))\n action = action_pos[:2] + [0,0]\n if np.linalg.norm(grip_pos[:2]-goal_grip_pos[:2]) < 0.001:\n if take_action:\n self.hand_behind = True\n if DEBUG:\n print('Hand has moved behind')\n # now move the hand down\n if self.hand_behind and not self.hand_down:\n action = [0,0,-1,0]\n if grip_pos[2]-object_pos[2] <0.01:\n self.start_time = self.fetch_env.env.sim.data.time # start the time once we are ready to hit\n if take_action:\n self.hand_down = True\n if DEBUG:\n print('Ready to HIT')\n\n # define helper functions\n def calc_direction(pos, goal):\n # calculates unit vector direction from position to goal\n pos = pos[:-1]; goal = goal[:-1]\n return (goal - pos)/np.linalg.norm(goal - pos)\n\n # set the goal speed\n if self.hand_down and not self.set_goal_speed:\n self.dist_to_goal = np.linalg.norm(object_pos[:-1]-goal_pos[:-1])\n self.goal_speed = np.sqrt(2*self.mu*self.g*self.dist_to_goal)\n self.a = (self.goal_speed**2)/(2*disp)\n #print('this is dist to goal ' +str(self.dist_to_goal))\n #print('this is mu ' +str(self.mu))\n #print('this is the goal speed ' + str(self.goal_speed))\n #print('this is the timestep ' + str(self.dt))\n #print('this is a ' + str(self.a))\n #print('this is 0.025+self.r ' +str(0.025+self.r))\n #print('this is the distance between gripper pos and object pos ' +str(np.linalg.norm(object_pos-grip_pos)))\n self.prev_time = self.start_time\n if take_action:\n self.set_goal_speed = True\n\n # slap the puck\n if self.hand_down and self.set_goal_speed:\n time = self.fetch_env.env.sim.data.time\n dtime = time - self.prev_time\n if np.linalg.norm(goal_pos[:-1] - grip_pos[:-1]) > self.dist_to_goal:\n if DEBUG:\n print('this is the distance ' + str(np.linalg.norm(goal_pos[:-1] - grip_pos[:-1])))\n #print(self.prev_speed)\n #print(next_speed)\n next_speed = self.prev_speed + (self.a * dtime)\n action_pos = list((dtime*(next_speed+self.prev_speed)/2)*calc_direction(object_pos,goal_pos))\n self.prev_speed = next_speed\n self.prev_time = time\n else:\n action_pos = [0,0]\n action = action_pos[:2] + [0,0]\n if DEBUG:\n print('commanded action = ' + str(np.linalg.norm(action[0:2])))\n\n # added clipping here\n return np.clip(action, -1, 1)",
"def _add_transition(self, replay):\n self.replay_buffer.append(replay)\n if len(self.replay_buffer) > self.max_buffer_size:\n self.replay_buffer.pop(0)",
"def _add_transition_replay(self, infostate_embedding, time_step):\n prev_timestep = self._last_time_step\n assert prev_timestep is not None\n legal_actions = (\n prev_timestep.observations[\"legal_actions\"][self.player_id])\n legal_actions_mask = np.zeros(self._num_actions)\n legal_actions_mask[legal_actions] = 1.0\n reward = time_step.rewards[self.player_id] if time_step.rewards else 0.0\n transition = ReplayBufferElement(\n embedding=infostate_embedding,\n info_state=(prev_timestep.observations[\"info_state\"][self.player_id]),\n action=self._last_action,\n reward=reward,\n next_info_state=time_step.observations[\"info_state\"][self.player_id],\n is_final_step=float(time_step.last()),\n legal_actions_mask=legal_actions_mask)\n self._replay_buffer.add(transition)",
"def record(self):\n rospy.loginfo(\"Waypoint Recording Started\")\n print(\"Press Navigator 'OK/Wheel' button to record a new joint \"\n \"joint position waypoint.\")\n print(\"Press Navigator 'Rethink' button when finished recording \"\n \"waypoints to begin playback\")\n # Connect Navigator I/O signals\n # Navigator scroll wheel button press\n self._navigator_io.button0_changed.connect(self._record_waypoint)\n # Navigator Rethink button press\n self._navigator_io.button2_changed.connect(self._stop_recording)\n\n # Set recording flag\n self._is_recording = True\n\n # Loop until waypoints are done being recorded ('Rethink' Button Press)\n while self._is_recording and not rospy.is_shutdown():\n rospy.sleep(1.0)\n\n # We are now done with the navigator I/O signals, disconnecting them\n self._navigator_io.button0_changed.disconnect(self._record_waypoint)\n self._navigator_io.button2_changed.disconnect(self._stop_recording)",
"def _populate_gen_replay_buffer(self) -> None:\n gen_rollouts = rollout.generate_transitions(\n self._gen_policy, self.env_train,\n n_timesteps=self._n_disc_samples_per_buffer)[:3]\n self._gen_replay_buffer.store(*gen_rollouts)",
"def _replaying(self):\n return self.__replaying",
"def step(self):\n\n self.history.append(self.output.value[:])\n self.output.value = self.input.getNetInput()",
"def store_trajectory(self, state, action, reward, next_state):\n\n trajectory = (state, action, reward, next_state)\n self.buffer.append(trajectory)",
"def run_policy(self):\n env = make_imitation_env()\n obs_dict = env.reset()\n while True:\n action = self.get_action(self.gymobs_to_inputdict(obs_dict))\n next_obs_dict, reward, done, info = env.step(action)\n if info.get(\"record\"):\n self.record = True\n entry = self.get_repbuf_entry(obs_dict, action, next_obs_dict, reward, done, info)\n self.repbuf.add(*entry)\n else:\n self.record = False\n\n if info.get(\"buttons\")[0] == 1:\n self.send = True\n else:\n self.send = False\n obs_dict = next_obs_dict\n if done:\n obs_dict = env.reset() \n self.repbuf.increase_lastn_priorities(50)",
"def track_trajectory(self, states, controls, close_loop):",
"def collect_rollouts(self):\n assert self._last_obs is not None, \"No previous observation was provided\"\n need_test = False\n timestep = 0\n done = False\n self.rollout_buffer.reset()\n self.policy.eval()\n\n while timestep < self.n_steps:\n\n with torch.no_grad():\n # Convert to pytorch tensor\n edge_loc_features, the_same_features, loc_ID = self._last_obs\n distributions, values, _ = self.policy.forward(\n edge_loc_features=edge_loc_features,\n the_same_features=the_same_features,\n actions=None,\n loc_ID=loc_ID,\n )\n\n # for distribution in distributions:\n # print(distribution.all_probs())\n\n actions, log_probs, edge_loc_features, the_same_features, reward, done, self.episode_time_cost, loc_ID = \\\n self.make_one_step_forward_for_env(\n env=self.env,\n distributions=distributions,\n episode_time_cost=self.episode_time_cost,\n random_policy=False,\n )\n\n if done:\n self.episode += 1\n got_reward = self.env.got_reward\n self.the_last_100_episodes_got_rewards.append(got_reward)\n self.the_best_100_episodes_got_rewards.append(got_reward)\n self.last_100_episodes_mean_got_reward = np.mean(self.the_last_100_episodes_got_rewards)\n if len(self.the_last_100_episodes_got_rewards) > 100:\n if self.last_100_episodes_mean_got_reward > self.the_best_last_100_episodes_mean_got_reward:\n self.the_best_last_100_episodes_mean_got_reward = self.last_100_episodes_mean_got_reward\n need_test = True\n self.the_last_100_episodes_got_rewards.pop(0)\n self.the_best_100_episodes_got_rewards.sort()\n self.the_best_100_episodes_got_rewards.pop(0)\n if self.episode % 100 == 0:\n print('''\n ******************************************************************************************************\n in {}th episode, the number of vehicle is {} \n ------------------------------------------------------------------------------------------------------\n episode_time_cost = {}, got_reward = {}, select_action_times = {}, \n random_policy_100_episodes_mean_got_reward = {}, \n the_best_100_episodes_mean_got_reward = {}, \n last_100_episodes_mean_got_reward = {}, \n the_best_last_100_episodes_mean_got_reward = {}\n ******************************************************************************************************\n '''.format(\n self.episode, self.vehicle_num, self.episode_time_cost, got_reward,\n self.select_action_time, self.random_policy_100_episodes_mean_got_reward,\n np.mean(self.the_best_100_episodes_got_rewards),\n self.last_100_episodes_mean_got_reward,\n self.the_best_last_100_episodes_mean_got_reward,\n ))\n self.episode_time_cost = 0\n self.select_action_time = 0\n edge_loc_features, the_same_features, loc_ID = self.env.reset()\n edge_loc_features, the_same_features, loc_ID = self.change_env_output_feature_shape(\n edge_loc_features, the_same_features, loc_ID)\n new_obs = [edge_loc_features, the_same_features, loc_ID]\n\n self.num_timesteps += 1\n timestep += 1\n self.rollout_buffer.add(self._last_obs, actions, reward,\n self._last_done, values, log_probs)\n self._last_obs = new_obs\n self._last_done = done\n\n with torch.no_grad():\n # Compute value for the last timestep\n edge_loc_features, the_same_features, loc_ID = self._last_obs\n _, values, _ = self.policy.forward(\n edge_loc_features=edge_loc_features,\n the_same_features=the_same_features,\n actions=None,\n loc_ID=loc_ID,\n )\n\n self.rollout_buffer.compute_returns_and_advantage(last_values=values, done=done)\n\n return need_test",
"def observe(self, s1, a, r, s2, isEnd):\n if self.modelType == \"Representational\":\n s1 = parseStateRepresentation(s1)\n s2 = parseStateRepresentation(s2)\n\n self.replayMem.put((s1, a, r, s2, isEnd))\n\n if self.isTraining():\n batch = self.replayMem.getBatch(self.batchSize)\n init_states, actions, rewards, next_states, isFinal = list(zip(*batch))\n # if self.modelType == \"\":\n init_states = np.concatenate(init_states)\n next_states = np.concatenate(next_states)\n # else:\n # init_states = np.array(init_states)\n # next_states = np.array(next_states)\n targets = self.predict(self.TargetQNetwork, init_states)\n estimated_values = self.predict(self.TargetQNetwork, next_states)\n \n targets[range(self.batchSize), actions] = rewards + (self.discountFactor*np.max(estimated_values, axis=1)*np.invert(isFinal))\n loss = self.QNetwork.train_on_batch(init_states, targets)\n \n self.log(\"\\tLoss: {}\".format(loss))\n\n self.updatesToNetwork += 1\n\n if self.updatesToNetwork % 1000 == 0:\n self.saveModel(\"RoutineSave\")\n self.updateTarget()\n print(\"Q Network updated {} times\".format(self.updatesToNetwork))",
"def joint_states_cb(self, data):\n if self.start_recording:\n #self.last_joint_states_data = data\n #self.current_rosbag.write(DEFAULT_JOINT_STATES, data)\n self.joint_states_accumulator.append(data)\n #self.time_accumulator.append(rospy.Time.now())",
"def step_graphical(self, current_view):\n\n # IMPORTANT: The logic must be changed for more than 2 agents\n if self.turn % 2 == 0:\n act = self.agent1.get_action(self.state1)\n cur_player = 1\n else:\n act = self.agent2.get_action(self.state2)\n cur_player = 2\n\n\n self.state = self.problem.next_state(self.state, act)\n n_grid, _, nt01g, nt02g, nt01p, nt02p = self.state\n\n if util.SHOW_INFO:\n print(\"********************\")\n score = self.problem.get_score()\n info = \"Player {0} choosed action: {1}, score: {2}\".format(cur_player,\n act,\n score)\n print(info)\n for line in n_grid:\n print(line)\n print(\"********************\")\n self.state1 = (n_grid, nt01g)\n self.state2 = (n_grid, nt02g)\n self.turn += 1\n self.actions.append(act)\n\n current_view.update(n_grid, [nt01p, nt02p])\n\n if self.problem.is_goal_state(self.state):\n print(\"Game Ended\")\n if util.SHOW_INFO:\n grid, _, _, _, _, _ = self.state\n for line in grid:\n print(line)\n\n score = self.problem.get_score()\n final_info = self.problem.get_game_info(self.state)\n if util.SHOW_INFO:\n self.print_friendly_summary(final_info)\n else:\n print(\"Final score: {0}\".format(score))\n current_view.print_results(final_info, util.GAME_MONEY)\n return score\n return None",
"def step(self):\n\n self.timestep += 1\n self.historyLayer.step()",
"def remember(self, experience):\n self.replay_buffer.add(experience)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
FIRST reset smart start pathing stuffs Then check if smart start will randomly happen, if so set it up (don't return) Finally tell the base_agent that a new episode is happening
|
def start_new_episode(self, state):
self.smart_start_pathing = False
self.smart_start_path = None
if np.random.rand() <= self.eta: #eta is probability of using smartStart
start_time = time.time()
self.smart_start_path = self.get_smart_start_path() # new state to navigate to
end_time = time.time()
if self.smart_start_path: #ensure path exists
if self.print_ss_stuff:
elapsed_time = end_time - start_time
print("Calculate Smart Start Path Time: " + str(elapsed_time), end='')
print("\npath exists")
# let neural network dynamics model based controller load the path
self.nnd_mb_agent.start_new_episode_plan(state, self.smart_start_path)
if not self.nnd_mb_agent.close_enough_to_goal(state): #ensure goal hasn't already been reached
self.smart_start_pathing = True #this start smart start navigation
if self.print_ss_stuff:
print("SMART_START START!!!")
self.agent.start_new_episode(state)
self.replay_buffer.start_new_episode(self)
|
[
"def episode_start(self, parameters=None):\n\n # reset internal initial state\n self.goal_count = 0\n self.value = randint(self.min, self.max)\n\n # print out a message for our first episode\n if not self.started:\n self.started = True\n print('started.')\n\n # return initial external state\n return {\"value\": self.value}",
"def run_agent_on_episode(self, single_episode_env: Episode):\n # An implementation of a random agent\n # YOUR CODE GOES HERE\n _ = single_episode_env.reset()\n done = False\n while not done:\n random_act = single_episode_env.action_space.sample()\n obs, reward, done, info = single_episode_env.step(random_act)",
"def start_episode(self) -> State:\n pass",
"def step(self):\n self.happy = 0 # Reset counter of happy agents\n self.segregation = 0 # Reset counter of segregated agents\n self.schedule.step()\n # collect data\n self.datacollector.collect(self)\n\n # 여기서 terminate 하는거 manage\n if self.happy == self.schedule.get_agent_count():\n self.running = False",
"def train_agent(self,\n episode: int) -> None:\n if episode % 10 == 0:\n self.train_critic(episode)\n self.critic_updates += 1\n\n if self.critic_updates == 3:\n self.train_actor()\n self.critic_updates = 0\n\n return",
"def _did_reset(self):\n # use this method to access the RAM of the emulator \n # and perform setup for each episode. \n # the method returns None\n self._screen_x_last = self._screen_x\n self._lives_last = self._lives\n self._time_last = self._time\n self._hp_last = self._hp\n pass",
"def begin_episode(self, observation):",
"def test_autosample_recover(self):\n # First verify the happy path. We start sampling, stop then reset\n # On reinit state should still be command mode\n self.assert_initialize()\n self.assert_stop_sampling()\n\n log.debug(\"stop data set agent\")\n self.stop_dataset_agent_client()\n\n log.debug(\"restart data set agent\")\n self.init_dataset_agent_client(bootmode='restart')\n self.assert_state_change(ResourceAgentState.COMMAND, 10)\n\n # Now start sampling and then just reset the instrument agent.\n # When we reinitialize go_active should put us in streaming mode.\n self.assert_start_sampling()\n self.stop_dataset_agent_client()\n\n self.init_dataset_agent_client(bootmode='restart')\n self.assert_state_change(ResourceAgentState.STREAMING, 10)",
"def gym_episode_start(self, config: Dict[str, Any]):\n observation = self._env.reset()\n\n return observation",
"def testDisarmedStartShot(self):\n whichShots = [shots.APP_SHOT_ORBIT, shots.APP_SHOT_CABLECAM]\n\n self.mgr.client = 5\n self.mgr.vehicle.armed = False\n self.mgr.last_ekf_ok = True\n\n for i in whichShots:\n self.mgr.buttonManager.getFreeButtonMapping = Mock(return_value = (i, -1))\n self.mgr.currentShot = shots.APP_SHOT_NONE\n self.mgr.buttonManager.handleButtons((btn_msg.ButtonA, btn_msg.Press))\n self.assertEqual(self.mgr.currentShot, shots.APP_SHOT_NONE)\n packetDisallow = struct.pack('<III', app_packet.SOLO_SHOT_ERROR, 4, app_packet.SHOT_ERROR_UNARMED)\n self.mgr.appMgr.sendPacket.assert_any_call(packetDisallow)",
"def reset_agent(self, mode=None):\n if self.start_ind is not None:\n # Spawn the agent at the start state\n self.x = self.get_coords(self.start_ind)\n else:\n # Spawn the agent not too close to the goal\n self.x = self.get_random_pos(self.grid_free_index)\n while np.sum(np.square(self.x - self.g[0,:])) < 0.5:\n self.x = self.get_random_pos(self.grid_free_index)",
"def _warmup(self, env, warmup_steps):\n \n print(f'Warmup: random actions for {warmup_steps} steps to pre-fill experience')\n done = False\n for step in range(warmup_steps):\n if (step == 0) | done: state = env.reset()\n\n # Select random action\n action = np.random.randint(env.action_space.n)\n \n # Execute action, see result (new state, reward and if episode is done)\n state_next, reward, done, _ = env.step(action)\n\n # Save transition\n self._store_transition(state, action, reward, state_next, done)\n state = state_next\n\n self.warmup_finished = True",
"def onNewEpisode(self):\n self.consoleMsg('---> Starting Episode {}/{} <---'.format(self.currentEpisode,self.nEpisodes),topBorder=True)\n if self.resetEnvOnNewEpisode: self.envReset()\n self.done = False\n self.episodeStartTime = time.time()\n self.onNewEpisode_user()",
"def initialize_episode(self, physics):\n # Random joint angles:\n randomizers.randomize_limited_and_rotational_joints(physics, self.random)\n # Random target position.\n close_target = self.random.rand() < .2 # Probability of a close target.\n target_box = .3 if close_target else 2\n xpos, ypos = self.random.uniform(-target_box, target_box, size=2)\n physics.named.model.geom_pos['target', 'x'] = xpos\n physics.named.model.geom_pos['target', 'y'] = ypos\n physics.named.model.light_pos['target_light', 'x'] = xpos\n physics.named.model.light_pos['target_light', 'y'] = ypos\n\n super(Swimmer, self).initialize_episode(physics)",
"def _reset_sim(self):\n\n # pre-reset tasks\n self.gazebo.unpause_sim()\n self._check_all_systems_are_ready()\n self._set_init_pose()\n self.gazebo.pause_sim()\n\n # reset the gazebo\n #self.gazebo.reset_sim()\n\n self.gazebo.clear_all_spawned_models()\n # TODO: sdf_model should randomly change\n sdf_model = 'sample'\n self.gazebo.spawn_sdf_model(sdf_model, Pose())\n\n # set environment variables each time we reset\n self._init_env_variables()\n\n # check if everything working fine after reset\n self.gazebo.unpause_sim()\n self._check_all_systems_are_ready()\n self.gazebo.pause_sim()",
"def episode_start(self, config: Dict[str, Any] = None) -> None:\n log.info(\"- - - - - - - - - - - - - - - - - - -- - - - - -- \")\n log.info(\"-- EPISODE {} START-- \".format(self.episode_count))\n\n if config is not None:\n self._iteration_limit = config.get(\n \"episode_iteration_limit\", self._iteration_limit)\n\n if config is not None:\n self._skip_frame = config.get(\n \"skip_frame\", self._skip_frame) \n\n self.finished = False\n self.iteration_count = 0\n self.episode_reward = 0\n self.last_reward = 0\n\n # reset the environment and set the initial observation\n observation = self.gym_episode_start(config)\n self.gym_to_state(observation)",
"def autospawnRandom(self, dt):\n if not self.paused:\n choice = random.randint(0, 1)\n if choice:\n self.spawnMob(\"Q\", free=True)\n else:\n self.spawnMob(\"E\", free=True)",
"def initialize_episode(self):\r\n self.current_slot_id = 0\r\n self.phase = 0\r\n self.request_set = ['title', 'instructor', 'classroom', 'schedule_str']\r\n # self.request_set = ['required_elective', 'sel_method', 'designated_for',\r\n # 'schedule_str', 'classroom', 'instructor',\r\n # 'title', 'serial_no']\r",
"def start_activity(self):\r\n\t\tself.markov = False\r\n\t\tself.N = 0\r\n\t\tself.pos_to_go = self.getPlaceToGo()\r\n\t\tif self.pos != self.pos_to_go:\r\n\t\t\tself.movements = self.getWay()\r\n\t\telse:\r\n\t\t\tself.movements = [self.pos]\r\n\t\ttime_in_state = self.timeActivity[self.getPeriod()][list(self.positionByState.keys()).index(self.state)]\r\n\t\tif self.timeActivityVariation:\r\n\t\t\ttime_in_state_variation = self.timeActivityVariation[self.getPeriod()][list(self.positionByState.keys()).index(self.state)]\r\n\t\t\tmu = 0\r\n\t\t\tsigma = time_in_state_variation/3\r\n\t\t\tif sigma:\r\n\t\t\t\ttime_in_state = time_in_state + np.random.normal(mu, sigma, 1)\r\n\t\tself.time_activity = (time_in_state*60)/self.model.clock.timeByStep"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Simulate growth for a set of community models.
|
def grow(
models: CommunityModelDirectory,
medium: pd.DataFrame,
tradeoff: float = 0.5,
threads: int = 1,
strategy: str = "pFBA",
) -> mw.core.GrowthResults:
if strategy == "minimal uptake":
strategy = "minimal imports"
model_folder = str(models.model_files.path_maker(model_id="blub")).replace(
"blub.pickle", ""
)
manifest = models.manifest.view(pd.DataFrame)
results = mw.grow(
manifest, model_folder, medium, tradeoff, threads, strategy=strategy
)
return results
|
[
"def grow(self, rate=1, years=1):\n\n for y in range(1, years+1):\n r = rate\n\n # all trees age one year\n self.age()\n\n # aging means root growth, check for new connections\n for t in self.trees:\n t.getnewneighbors()\n\n # birthing process using rate\n while randbelow(100) < r * 100:\n self.spawn()\n r -= 1\n \n self.communicate(plot=True)\n\n # output a graph with the name of the year as identification\n self.graph(name=y)\n print('YEAR ' + str(y))",
"def _fit_growth(self):\n print('fit::adding growth model')\n\n with self.my_model:\n ts = self.data['t'].values\n cpt = np.linspace(start=0, stop=self.changepoint_range * np.max(ts), num=self.n_changepoints + 1)[1:]\n A, ts = self._set_growth(ts, cpt)\n\n # create self.k = pm.Normal('k', 0, self.growth_prior_scale)\n self.check_reserved('k')\n setattr(self, 'k', pm.Normal('k', 0, self.growth_prior_scale, shape=1))\n self.growth_components.append('k')\n\n # create self.delta = pm.Laplace('delta', 0, self.changepoints_prior_scale, shape=self.n_changepoints)\n self.check_reserved('delta')\n setattr(self, 'delta', pm.Laplace('delta', 0, self.changepoints_prior_scale, shape=self.n_changepoints))\n self.growth_components.append('delta')\n\n # create self.m\n self.check_reserved('m')\n setattr(self, 'm', pm.Normal('m', 0, self.offset_prior_scale, shape=1)) # self.m = pm.Normal('m', 0, self.offset_prior_scale)\n self.growth_components.append('m')\n\n gamma = -cpt * self.delta\n trend = pm.Deterministic('trend', (self.k + tt.dot(A, self.delta)) * ts + (self.m + tt.dot(A, gamma)))\n return trend",
"def callGAModel(region):\n year = 2000\n # while(year <= 2006):\n execGaModel(year, region)\n year += 1",
"def test_single_mutant_growth(model, experiments):\n # Set minimal media\n info = experiments[\"growth\"][\"experiments\"][\"single_mutant_growth\"]\n set_medium(model, info[\"medium\"], experiments)\n \n # Get in vivo data for mutant growth\n mutant_growth_df = get_growth_data(info[\"data\"])\n\n prediction_dict = {\n \"FP\": 0, # False positives\n \"TP\": 0, # True positives\n \"FN\": 0, # False negatives\n \"TN\": 0, # True negative\n }\n wrong_predictions = []\n for index, row in mutant_growth_df.iterrows():\n with model as M:\n carbon_exchanges = row[\"Carbon exchange reactions\"].split(\",\")\n nitrogen_exchanges = row[\"Nitrogen exchange reactions\"].split(\",\")\n M = set_carbon_and_nitrogen_uptake(M, carbon_exchanges+nitrogen_exchanges)\n M = _knock_out_genes(row[\"Genes\"], M)\n try:\n s = M.optimize()\n except:\n s = None\n print(index, end = \"\\t\")\n correct_prediction = classify_prediction(s, row[\"In vivo growth\"], prediction_dict, NO_GROWTH_THRESHOLD)\n if not correct_prediction:\n _print_wrong_prediction(row, s, M)\n wrong_predictions.append(\"{0}: {1}\".format(index, row[\"Genes\"]))\n\n FN_and_TN = (prediction_dict[\"TN\"] >= 9) and (prediction_dict[\"FN\"] <= 1)\n FP_and_TP = (prediction_dict[\"FP\"] == 0) and (prediction_dict[\"TP\"] >= 6)\n ann = test_single_mutant_growth.annotation\n ann[\"message\"] = wrapper.fill(\"\"\"Growth predictions for mutants from the litterature \n \\n(expected in parenthesis):\n \\nTP: {0}(6)\\t FN: {1}(1) \n \\nFP: {2}(0)\\t TN: {3}(9)\n \\nThe prediction is wrong for the following mutants: \\n{4}\"\"\".format(\n *[prediction_dict[x] for x in [\"TP\", \"FN\", \"FP\", \"TN\"]], \", \".join(wrong_predictions)))\n ann[\"data\"] = (prediction_dict[\"TN\"]+prediction_dict[\"TP\"])/sum([prediction_dict[x] for x in [\"TP\", \"FN\", \"FP\", \"TN\"]])\n ann[\"metric\"] = (prediction_dict[\"TN\"]+prediction_dict[\"TP\"])/sum([prediction_dict[x] for x in [\"TP\", \"FN\", \"FP\", \"TN\"]])\n assert FN_and_TN and FP_and_TP, ann[\"message\"]",
"def computeEpistasis (model,objective_func = \"default\",heatmap = False, labels = False, export_matrix = False):\n #set model objective function\n if objective_func != \"default\":\n model.objective = [objective_func]\n \n #compute wild type growth rate\n solution = model.optimize()\n wt_grow = solution.objective_value\n \n ## knock-outs loops ##\n # initialize empty matricies\n rxns = len(model.reactions)\n single_ko = np.zeros((rxns))\n v1v2_grow = np.zeros((rxns,rxns))\n\n ## Single knockouts ##\n for i in range(rxns):\n #buffer\n upper_i = model.reactions[i].upper_bound\n lower_i = model.reactions[i].lower_bound\n # set upper and lower bounds to zero\n model.reactions[i].upper_bound = 0\n model.reactions[i].lower_bound = 0\n # solve model and record growth rate\n solution = model.optimize()\n single_ko[i] = solution.objective_value\n # return bounds to their previous state\n model.reactions[i].upper_bound = upper_i\n model.reactions[i].lower_bound = lower_i\n \n ## Combo knockout ##\n for i in range(rxns):\n for j in range(rxns):\n if j > i:\n #buffer\n upper_i = model.reactions[i].upper_bound\n lower_i = model.reactions[i].lower_bound\n upper_j = model.reactions[j].upper_bound\n lower_j = model.reactions[j].lower_bound\n # Set bounds on rxns to zero, now both are zero\n model.reactions[i].upper_bound = 0\n model.reactions[i].lower_bound = 0\n model.reactions[j].upper_bound = 0\n model.reactions[j].lower_bound = 0\n # Solve\n solution = model.optimize()\n v1v2_grow[i,j] = solution.objective_value\n # return them to what they were\n model.reactions[i].upper_bound = upper_i\n model.reactions[i].lower_bound = lower_i\n model.reactions[j].upper_bound = upper_j\n model.reactions[j].lower_bound = lower_j\n\n #adjusting matricies \n v1_grow = single_ko + np.zeros((rxns,rxns))\n v2_grow = np.transpose(v1_grow)\n np.fill_diagonal(v1v2_grow,single_ko)\n \n # distribution of epistatic interactions\n epistasis = (v1v2_grow/wt_grow) - ((v1_grow/wt_grow) * (v2_grow/wt_grow))\n ep_dist = epistasis[np.triu_indices(rxns,1)]\n epistasis_full = np.triu(epistasis)+np.rot90(np.fliplr(np.triu(epistasis,1)))\n \n # heatmap\n if heatmap:\n plt.figure(figsize = (12,9))\n if labels:\n reactions = []\n for x in model.reactions:\n reactions.append(x.id)\n sns.heatmap(epistasis_full,xticklabels = reactions, yticklabels = reactions, cmap='mako', linecolor = 'dimgrey', linewidth = 0.005)\n else:\n sns.heatmap(epistasis_full, cmap='mako', linecolor = 'dimgrey', linewidth = 0.005)\n plt.show\n \n #export distribution and matrix\n if export_matrix:\n return(ep_dist,epistasis_full)\n \n return(ep_dist)",
"def generate(generations, population, nn_param_choices):\n\n optimizer = Optimizer(nn_param_choices)\n networks = optimizer.create_population(population)\n\n # Prepare an array to record average and best losses.\n loss_t = np.empty((generations,))\n loss_bt = np.empty((generations,))\n\n data = get_stock_data()\n\n # Evolve the generation.\n for i in range(generations):\n logging.info(\"***Doing generation %d of %d***\" %\n (i + 1, generations))\n\n # Train and get loss for networks.\n train_networks(networks, data)\n\n # Get and record the average loss for this generation.\n average_loss = get_average_loss(networks)\n loss_t[i] = average_loss\n\n # Get and record the best loss for this generation.\n best_loss = get_best_loss(networks)\n loss_bt[i] = best_loss\n\n # Print out the average and best loss of each generation.\n logging.info(\"Average Generation loss: %.3f\" % (average_loss))\n logging.info('-'*80)\n logging.info(\"Best Generation loss: %.3f\" % (best_loss))\n logging.info('-'*80)\n\n # Evolve, except on the last iteration.\n if i != (generations - 1):\n # Do the evolution.\n networks = optimizer.evolve(networks)\n else:\n pass\n\n # Record elapsed time\n end_time = time.time()\n time_elapsed = end_time - start_time\n minutes, seconds = divmod(time_elapsed, 60)\n hours, minutes = divmod(minutes, 60)\n print(\" Total running time was that of %d h : %d m : %d s\" % (hours, minutes, seconds))\n\n # Sort our final population.\n networks = sorted(networks, key=lambda x: x.loss, reverse=False)\n\n # Print best network\n print \"Best Performing Network:\"\n print network_arch(networks[0].network)\n print \"Network Loss:\"\n print networks[0].loss\n\n # Save best network to hdf5 and JSON\n compile_model(networks[0].network).save(\"bestGeneticModel.hdf5\")\n print(\"Saved best model to disk as HDF5\")\n model_json = compile_model(networks[0].network).to_json()\n with open(\"model.json\", \"w\") as json_file:\n json_file.write(model_json)\n print(\"Saved best model to disk as JSON\")\n\n # Print out the top 5 networks.\n print \"Top 5 Best Performing Networks:\"\n print_networks(networks[:5])\n\n # Make and print plot with average loss history\n plt.figure()\n plt.plot(np.arange(1, generations + 1, 1), loss_t)\n plt.xlabel('Generation')\n plt.ylabel('Average loss')\n plt.grid(True)\n\n plt.figure()\n plt.plot(np.arange(1, generations + 1, 1), loss_bt)\n plt.xlabel('Generation')\n plt.ylabel('Best loss')\n plt.grid(True)\n\n plt.show()",
"def update_all_kmer_models(self):\n for k in range(1024):\n self.update_kmer(k, self.samples[k])",
"def simulate(self):\n self.hours += 1\n for person in self.people:\n person.update()\n self.update_infections_fast()\n for method in self.on_update_methods:\n method(self)",
"def run_simulations():\r\n plot_type = LOGLOG # we choose the logarithmic plot\r\n days = 70\r\n inc_0 = greedy_boss(days, 0, plot_type)\r\n inc_500 = greedy_boss(days, 500, plot_type)\r\n inc_1000 = greedy_boss(days, 1000, plot_type)\r\n inc_2000 = greedy_boss(days, 2000, plot_type)\r\n simpleplot.plot_lines(\"Greedy boss\", 600, 600, \"days\", \"total earnings\", [inc_0, inc_500, inc_1000, inc_2000], False, [\"Bribe increment = 0\", \"Bribe increment = 500\", \"Bribe increment = 1000\", \"Bribe increment = 2000\"])\r\n simpleplot._block()",
"def evolve( population, retain =0.2 ,random_select =0.05 , mutate =0.1):\n graded = [( x.get_fitness(), x) for x in population ]\n #print(graded)\n graded = [ x[1] for x in sorted( graded, reverse=True ) ]\n retain_length = int(len(graded) * retain )\n parents = graded [:retain_length]\n\n # randomly add other individuals to promote genetic\n # diversity\n for individual in graded [retain_length:]:\n if random_select > random.random():\n parents.append( individual )\n \n # crossover parents to create offspring\n desired_length = len( population ) - len( parents )\n children = []\n while len( children ) < desired_length :\n male = random.randint(0, len( parents) - 1)\n female = random.randint(0, len( parents) - 1)\n if male != female :\n male = parents[male]\n female = parents[female]\n half = int(len(male.ABCD) / 2)\n child = wing(male.ABCD[:half] + female.ABCD[half:])\n children.append( child )\n\n for individual in children :\n if mutate > random.random():\n pos_to_mutate = random.randint(0, len(individual.ABCD) - 1)\n # this mutation is not ideal , because it\n # restricts the range of possible values ,\n # but the function is unaware of the min/max\n # values used to create the individuals\n individual.ABCD[ pos_to_mutate ] = random.uniform(min( individual.ABCD ), max( individual.ABCD ))\n \n parents.extend( children )\n return parents",
"def run(self):\n for mod in self.models:\n pYields = mod.getYields(self.validation_freq) #estimated percent yields\n for a in self.alphas:\n n_days = self.stock.n_days_test #number of days we're investing\n dailyCap = self.principal/n_days \n principal, acctStock, cash = self.principal, 0, 0\n acctValue = principal\n snapshots, investments, cashStock = [], [], []\n for i in range(n_days):\n cash = dailyCap #get dailyCap in cash to spend for the day\n principal -= dailyCap \n stockPrice = self.stock.getDayPriceOpen(i)\n stockPriceClose = self.stock.getDayPriceClose(i)\n cash, acctStock, moneySpent = self.buyOrSell(pYields[i], cash, acctStock, stockPrice, alpha=a, beta=a)\n principal += (dailyCap-moneySpent) #return money not spent to principal\n acctValue = principal+acctStock*stockPriceClose #compute account value\n snapshots.append(acctValue)\n investments.append(moneySpent)\n cashStock.append((cash, acctStock*stockPriceClose))\n if self.debug:\n print(\"[debug] percent yield: %f\\n[debug] spent %f on %s at stock price %f\\n[debug] account value is now %f\" %(py, moneySpent, str(stock.testData.index[i]), stockPriceClose, acctValue))\n print(\"[debug] principal: %f dailyCap: %f cash available: %f stock owned: %f\" % (principal, dailyCap, cash, acctStock)) \n mod.addPerformance(a, snapshots)\n mod.addInvestments(a, investments)\n mod.addYield(a, 100*(acctValue-self.principal)/self.principal)\n mod.addCashStock(a, cashStock)\n print(\"[info] Investing $\" + str(self.principal) + \" in \" + self.stock.name + \" using \" + mod.name + ' with alpha=' +str(a)+ ' from ' + str(self.stock.startDate) + ' to ' + str(self.stock.endDate) + ' yielded %' + str(100*(acctValue-self.principal)/self.principal))\n print(\"[info] Total mean error: \" + str(mod.meanError))\n print(\"[info] Stock yield over timeframe: \" + str(self.stockYield))",
"def _update_G(self):\n for module in self.modules:\n g = self.computeG(self.m_g[module], module, self.batch_averaged)\n if self.steps == 0:\n self._init_G(g, module)\n update_running_avg(g, self.m_G[module], self.factor_decay)",
"def mutate(self):\n # chances[0] = chance to mutate property\n chances = [random.randint(1, 100), random.randint(1, 100)]\n for x in range(0, len(self._modules)):\n # chances[2-N] = chance to mutate each module for N modules\n chances.append(random.randint(1, 100))\n\n # chances[N+1] = chance to merge two properties of same type\n chances.append(random.randint(1, 100))\n\n # Mutate Solver properties with set chance\n if chances[0] <= self.property_mutation_chance:\n self.mutateProperty()\n\n # Mutate each module with a set chance to mutate\n for x in range(0, len(self._modules)):\n if chances[x + 1] <= self.module_mutation_chance:\n if random.randint(1, 100) <= self.swap_module_chance:\n if not self.unique:\n self._modules[x] = createUniqueModule(\n \"Fitness\", [self._modules[x].subtype])\n else:\n present_subtypes = [y.subtype\n for y in self._modules]\n self._modules[x] = createUniqueModule(\n \"Fitness\", present_subtypes)\n else:\n self._modules[x].mutate()\n\n if chances[-1] <= self._merge_module_chance:\n self.mergeModules()\n \n # Currently, all solvers have a 100% chance of calling their fitness_calculator mutate\n # Most fitness calculator mutates do nothing, those that did should have their own internal probabilities.\n self._fitness_calculator.mutate()",
"def train(self):\n for ens_mem in self.ensemble_members:\n ens_mem.train()",
"def test_set_Mie_exponents_trivial(tmpdir):\r\n\r\n # Load in cgmodel\r\n cgmodel = pickle.load(open(f\"{data_path}/stored_cgmodel.pkl\", \"rb\" )) \r\n \r\n # Set Mie exponents:\r\n n = 12\r\n m = 6\r\n \r\n cgmodel_new = CGModel(\r\n particle_type_list=cgmodel.particle_type_list,\r\n bond_lengths=cgmodel.bond_lengths,\r\n bond_force_constants=cgmodel.bond_force_constants,\r\n bond_angle_force_constants=cgmodel.bond_angle_force_constants,\r\n torsion_force_constants=cgmodel.torsion_force_constants,\r\n equil_bond_angles=cgmodel.equil_bond_angles,\r\n torsion_periodicities=cgmodel.torsion_periodicities,\r\n torsion_phase_angles=cgmodel.torsion_phase_angles,\r\n nonbond_repulsive_exp=n,\r\n nonbond_attractive_exp=m,\r\n include_nonbonded_forces=cgmodel.include_nonbonded_forces,\r\n include_bond_forces=cgmodel.include_bond_forces,\r\n include_bond_angle_forces=cgmodel.include_bond_angle_forces,\r\n include_torsion_forces=cgmodel.include_torsion_forces,\r\n constrain_bonds=cgmodel.constrain_bonds,\r\n sequence=cgmodel.sequence,\r\n positions=cgmodel.positions,\r\n monomer_types=cgmodel.monomer_types,\r\n )\r\n \r\n native_structure_file=f\"{structures_path}/medoid_0.dcd\"\r\n\r\n native_traj = md.load(native_structure_file,top=md.Topology.from_openmm(cgmodel.topology)) \r\n \r\n positions = native_traj.xyz[0] * unit.nanometer\r\n \r\n output_directory = tmpdir.mkdir(\"output\")\r\n \r\n # Minimize energy of native structure\r\n positions, PE_start, PE_end, simulation = minimize_structure(\r\n cgmodel_new,\r\n positions,\r\n output_file=f\"{output_directory}/medoid_min.dcd\",\r\n )\r\n \r\n # Check that the energy matches the standard LJ 12-6 potential:\r\n # These should be equal to ~4 decimal places (1 Joule/mol)\r\n PE_start_LJ_12_6 = -382.19839163767057\r\n PE_end_LJ_12_6 = -500.99943208890255\r\n \r\n PE_start_Mie_12_6 = PE_start.value_in_unit(unit.kilojoule_per_mole)\r\n PE_end_Mie_12_6 = PE_end.value_in_unit(unit.kilojoule_per_mole)\r\n \r\n assert_almost_equal(PE_start_LJ_12_6,PE_start_Mie_12_6,decimal=3)\r\n assert_almost_equal(PE_end_LJ_12_6,PE_end_Mie_12_6,decimal=3)",
"def main():\n generation = 0\n best_accuracy = 0.0\n networks = [NeuralNetwork() for _ in range(population)]\n best_weights = []\n best_biases = []\n\n \"\"\"Main genetic loop\"\"\"\n while best_accuracy < 0.9 and generation < 100:\n generation += 1\n print(\"========== Generation number \", generation, \" ==========\")\n\n \"\"\"Fitness in genetic - nn accuracy and choice of best one\"\"\"\n for nn in networks:\n current_accuracy = nn.calculate_accuracy(x_train.T, y_train)\n if current_accuracy > best_accuracy:\n best_accuracy = current_accuracy\n print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ Best Accuracy: ', best_accuracy)\n best_weights.clear()\n best_biases.clear()\n for layer in nn.layers:\n best_weights.append(layer.weights)\n best_biases.append(layer.biases)\n\n \"\"\"Sort networks by fitness function value\"\"\"\n networks = sorted(networks, key=lambda z: z.accuracy, reverse=True)\n print(networks[0].layers[0].weights)\n\n \"\"\"Pick top individuals and make crossovers also with some chance to mutation\"\"\"\n new_generation = []\n for i in range(top_pick):\n for j in range(population//top_pick):\n nn1 = copy.deepcopy(networks[i])\n nn2 = copy.deepcopy(networks[random.randint(0, top_pick)])\n locus = random.randint(0, 1)\n for idx, layer in enumerate(nn1.layers):\n for index, neuron in enumerate(layer.weights):\n tmp = neuron[locus]\n neuron[locus] = nn2.layers[idx].weights[index][locus]\n nn2.layers[idx].weights[index][locus] = tmp\n if random.randint(0, 100) < mutation_chance:\n # print(\"MUTATION!\")\n # layer.weights[locus] = np.negative(layer.weights[locus])\n # layer.weights[locus] = np.random.randn(np.size(layer.weights[locus]))\n neuron[locus] = np.random.randn()\n new_generation.append(nn1)\n new_generation.append(nn2)\n networks.clear()\n networks = copy.deepcopy(new_generation)\n\n print(\"Selection accuracy: \")\n print(best_accuracy)\n\n \"\"\"Create new network and set start weights and biases to the best from genetic\"\"\"\n genetic_nn = NeuralNetwork()\n for idx, layer in enumerate(genetic_nn.layers):\n layer.weights = best_weights[idx]\n layer.biases = best_biases[idx]\n genetic_nn.train(x_train, y_train, 10, 10)\n genetic_nn.calculate_accuracy(x_train.T, y_train)\n\n print(\"Prediction accuracy: \")\n print(genetic_nn.accuracy)",
"def evaluate_features():\n # training set is from Stanford Sentiment Training Set\n training_set = parse_stanford(\"data/stanfordSentimentTreebank/stanfordSentimentTreebank/dictionary.txt\", \n \"data/stanfordSentimentTreebank/stanfordSentimentTreebank/sentiment_labels.txt\")\n # train weights for maxent model\n weights = train_maxent(training_set)\n # sort weights in descending order\n sorted_weights = { sentiment: sorted(weights[sentiment].iteritems(), \n key=lambda x:x[1], \n reverse=True) \n for sentiment in weights}\n\n # evaluate model for the top i weights, in this range (There should be # ~130000 weights total)\n for i in range(10000, 130000, 10000):\n # get the top i weights\n new_weights = {\"positive\": {}, \"negative\": {}, \"neutral\": {}}\n for sentiment in sorted_weights:\n new_weights[sentiment] = {w[0]:weights[sentiment][w[0]] \n for w in sorted_weights[sentiment][:i-1]}\n\n # load the episode that has gold standard features already assigned\n episode = parse_goldstandard(\"data/s1e9_gold.txt\", 1, 9)\n # calculate bag of words sentiments\n word_sentiments = parse_NRC(\"data/NRC-Emotion-Lexicon-v0.92/NRC-Emotion-Lexicon-v0.92/NRC-emotion-lexicon-wordlevel-alphabetized-v0.92.txt\")\n bag_of_words(episode, word_sentiments)\n # calculate maxent sentiments\n run_maxent(episode, new_weights)\n\n # evaulate maxent and bag_of_words sentiments against baseline\n print \"%s max_ent vs gold: %s\" % (i, compare_scores(episode, \n score1=\"maxent_score\", \n score2=\"gold_score\"))\n print \"%s bow vs gold: %s\" % (i, compare_scores(episode, \n \"bow_score\", \n score2=\"gold_score\"))",
"def evolve(self, n):\n assert n > 0, \"Cannot evolve 0 or less generations!\"\n\n # Prepare the genomes\n self.reset()\n \n # Whether the performance details should be printed\n verbose = False\n\n for i in range(n):\n\n # Print information every 50th time \n if i % 50 == 0:\n print(\"Generation {0}\".format(i))\n verbose = True\n else:\n verbose = False\n \n # Evolve!\n self.evolve_step(verbose = verbose)\n\n # Termination condition that depends on \n # the maximum performance of a Gym environment\n if i % 50 == 0 and self.get_average() > 180.0:\n break\n\n self.selection()\n self.mutation()\n \n # Reset the score every generation for better performance metric\n self.reset()",
"def evolve(self):\n self.update_fitness()\n global_fitness_sum = float(sum(self.__fitness_sums))\n\n if global_fitness_sum == 0:\n # No progress, mutate everybody\n for species in self.__species:\n for genome in species:\n genome.mutate()\n\n else:\n # 1. Eliminate lowest performing genomes per species\n # 2. Repopulate\n self.cull_genomes(False)\n\n children = []\n for idx, species in enumerate(self.__species):\n ratio = self.__fitness_sums[idx] / global_fitness_sum\n offspring = floor(ratio * (self.__population - self.population))\n\n for j in range(int(offspring)):\n children.append(breed(species))\n\n for genome in children:\n self.classify_genome(genome)\n\n self.__generation += 1"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GetConsumerAction. Get details about a specific consumer action.
|
def get_consumer_action(self, consumer_id, consumer_action_id, publisher_id=None):
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
if consumer_action_id is not None:
route_values['consumerActionId'] = self._serialize.url('consumer_action_id', consumer_action_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ConsumerAction', response)
|
[
"def list_consumer_actions(self, consumer_id, publisher_id=None):\n route_values = {}\n if consumer_id is not None:\n route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('[ConsumerAction]', self._unwrap_collection(response))",
"def get(cls, context, action_id):\n if utils.is_int_like(action_id):\n db_action = cls.dbapi.get_action_description_by_id(\n context, action_id)\n action = ActionDescription._from_db_object(cls(context), db_action)\n return action\n else:\n raise exception.InvalidIdentity(identity=action_id)",
"def choose_action(self):\n if(len(self._action_list) == 0):\n self._logger.write(\"Error! The action_list is empty\")\n else:\n try:\n return self._action_list[0]\n except Exception as e:\n self._logger.write(\"Error! could not fetch an action:\\n %s\" % e)",
"def getAction(self) -> \"SoHandleEventAction *\":\n return _coin.SoEventCallback_getAction(self)",
"def get_pbp_action(self, event_id, action_id):\n\n path = self._db_keywords[\"root\"] + \\\n str(int(event_id)) + \\\n self._db_keywords[\"actions\"] + \"/\" + str(int(action_id))\n\n return self._rtdb.reference(path).get()",
"def getAction(self, nameOrAction):\r\n\t\tif isinstance(nameOrAction, Action):\r\n\t\t\treturn nameOrAction\r\n\t\telse:\r\n\t\t\treturn self.actions[nameOrAction]",
"def get_action(self, action_id):\n return Action.get_object(api_token=self.token, action_id=action_id,\n mocked=self.mocked)",
"def getActionInfo(self, action_chain, object=None, check_visibility=0,\n check_condition=0):\n action_infos = self.listActionInfos(action_chain, object,\n check_visibility=check_visibility,\n check_permissions=False,\n check_condition=check_condition)\n if not action_infos:\n if object is None:\n provider = self\n else:\n provider = object\n msg = 'Action \"{}\" not available for {}'.format(\n action_chain, '/'.join(provider.getPhysicalPath()))\n raise ValueError(msg)\n for ai in action_infos:\n if ai['allowed']:\n return ai\n raise AccessControl_Unauthorized('You are not allowed to access any '\n 'of the specified Actions.')",
"def get_action(self, action):\n if self.parsed_workflow['action'].get(action, None):\n return self.parsed_workflow['action'][action]\n else:\n log.fail(\"Action '{}' doesn\\'t exist.\".format(action))",
"def __get_action(self, index):\n return self.actions[index]",
"def getActionObject(self, action):\n # separate cataegory and id from action\n sep = action.rfind('/')\n if sep == -1:\n raise ValueError('Actions must have the format <category>/<id>.')\n category, id = action[:sep], action[sep+1:]\n\n # search for action and return first one found\n for ai in self.listActions():\n try:\n if id == ai.getId() and category == ai.getCategory():\n return ai\n except AttributeError:\n continue\n\n # no action found\n return None",
"def get_action(self, key):\n saved_actions = self.get('saved_actions', {})\n return saved_actions.get(\"actions\", {}).get(key)",
"def get_consumer(self, consumer_id, publisher_id=None):\n route_values = {}\n if consumer_id is not None:\n route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('Consumer', response)",
"def get_action(self, stateaction):\n return self.get_component(stateaction, self.action_space)",
"def get_action(self, action):\n # If the action is a callable, just use it.\n if callable(action):\n func = action\n action = action.__name__\n\n # Next, look for a method. Grab it off self.__class__ to get an unbound\n # method instead of a bound one; this ensures that the calling\n # conventions are the same for functions and methods.\n elif hasattr(self.__class__, action):\n func = getattr(self.__class__, action)\n\n # Here was some code for global admin site actions\n\n if hasattr(func, 'short_description'):\n description = func.short_description\n else:\n description = capfirst(action.replace('_', ' '))\n return func, action, description",
"def _get_action(self, action, conn):\n if not hasattr(conn, action):\n raise AttributeError\n return getattr(conn, action)",
"def getAction(credentials, name, **opts):\n\n tier = opts.get(\"tier\") or opts.get(\"host\") or None\n session = _Control.get_session(credentials, tier)\n if isinstance(session, Session):\n return session.get_action(name)\n command = etree.Element(\"CdrGetAction\")\n etree.SubElement(command, \"Name\").text = name\n for response in _Control.send_command(session, command, tier):\n if response.node.tag == command.tag + \"Resp\":\n name = get_text(response.node.find(\"Name\"))\n flag = get_text(response.node.find(\"DoctypeSpecific\"))\n comment = get_text(response.node.find(\"Comment\"))\n return Action(name, flag, comment)\n raise Exception(\";\".join(response.errors) or \"missing response\")\n raise Exception(\"missing response\")",
"def get_by_type(cls, context, action_type):\n\n db_action = cls.dbapi.get_action_description_by_type(\n context, action_type)\n action = cls._from_db_object(cls(context), db_action)\n return action",
"def getAction(self) -> \"SoAction *\":\n return _coin.SoState_getAction(self)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
ListConsumerActions. Get a list of consumer actions for a specific consumer.
|
def list_consumer_actions(self, consumer_id, publisher_id=None):
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ConsumerAction]', self._unwrap_collection(response))
|
[
"def list_consumers(self):\n endpoint = self.build_url(\"/consumers\")\n return self.request('get', endpoint)",
"def get_consumer_action(self, consumer_id, consumer_action_id, publisher_id=None):\n route_values = {}\n if consumer_id is not None:\n route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')\n if consumer_action_id is not None:\n route_values['consumerActionId'] = self._serialize.url('consumer_action_id', consumer_action_id, 'str')\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('ConsumerAction', response)",
"def get_board_actions(board_id, data=dict()):\n actions = api(\"GET\", \"boards/%s/actions\" % board_id, data)\n return actions",
"def list_consumers(self, publisher_id=None):\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',\n version='5.1',\n query_parameters=query_parameters)\n return self._deserialize('[Consumer]', self._unwrap_collection(response))",
"def available_actions(self):\n\n #private list holding the available actions\n self.__available_actions = []\n return self.__get_available_actions()",
"def get_card_actions(card_id, data=dict()):\n actions = api(\"GET\", \"cards/%s/actions\" % card_id, data)\n return actions",
"def get_all_consumers(self):\n return self.consumers",
"def test_consumers_list_secret_with_consumers(self):\n secret_ref = self._create_test_secret()\n\n consumers = [{\n 'service': 'service1',\n 'resource_type': 'type1',\n 'resource_id': 'id1'}, {\n 'service': 'service2',\n 'resource_type': 'type2',\n 'resource_id': 'id2'}]\n\n for consumer in consumers:\n _ = self.barbicanclient.secrets.register_consumer(\n secret_ref,\n service=consumer['service'],\n resource_type=consumer['resource_type'],\n resource_id=consumer['resource_id']\n )\n\n consumers_list = self.barbicanclient.secrets.list_consumers(\n secret_ref)\n\n for elem in range(len(consumers)):\n self.assertTrue(\n consumers_list[elem].service ==\n consumers[elem]['service'])\n self.assertTrue(\n consumers_list[elem].resource_type ==\n consumers[elem]['resource_type'])\n self.assertTrue(\n consumers_list[elem].resource_id ==\n consumers[elem]['resource_id'])\n\n self.cleanup.delete_entity(secret_ref)\n self.barbicanclient.secrets.delete(secret_ref, True)",
"def order_actions(context):\n types_tool = api.portal.get_tool('portal_types')\n inbox_fti = types_tool['opengever.inbox.inbox']\n\n actions = inbox_fti._actions\n\n ordered_actions = []\n for action_id in ACTIONS_ORDER:\n action = [a for a in actions if a.id == action_id][0]\n ordered_actions.append(action)\n\n remaining_actions = [a for a in actions if a.id not in ACTIONS_ORDER]\n\n all_actions = ordered_actions + remaining_actions\n inbox_fti._actions = all_actions",
"def get_actions(self) -> List[GameAction]:\n pass",
"def legal_actions(self, state: State, role: str) -> List[Action]:\n pass",
"def getActions(self) -> List[docking.action.DockingAction]:\n ...",
"def test_actions_list_creation(self) -> None:\n\n route = DeliveryRoute()\n action_1 = route._get_actions_list(0, 5, 'E')\n action_2 = route._get_actions_list(4, 1, 'W')\n action_3 = route._get_actions_list(4, 11, 'W')\n\n self.assertEqual(action_1, ['E' for _ in range(abs(0-5))])\n self.assertEqual(action_2, ['W' for _ in range(abs(4-1))])\n self.assertEqual(action_3, ['W' for _ in range(abs(4-11))])",
"def list_actions(self, actions):\n i = 1\n print()\n for action in actions:\n print(f\"{i}: {action}\")\n i += 1",
"def test_consumers_list_secret_without_consumers(self):\n secret_ref = self._create_test_secret()\n\n consumers_list = self.barbicanclient.secrets.list_consumers(\n secret_ref)\n self.assertTrue(len(consumers_list) == 0)\n\n self.cleanup.delete_entity(secret_ref)\n self.barbicanclient.secrets.delete(secret_ref, True)",
"def get_consumers(self):\n pass",
"def authorized_actions(self, actor, resource, allow_wildcard=False) -> List[Any]:\n results = self.policy.query_rule(\"allow\", actor, Variable(\"action\"), resource)\n actions = set()\n for result in results:\n action = result.get(\"bindings\").get(\"action\")\n if isinstance(action, Variable):\n if not allow_wildcard:\n raise exceptions.OsoError(\n \"\"\"The result of authorized_actions() contained an\n \"unconstrained\" action that could represent any\n action, but allow_wildcard was set to False. To fix,\n set allow_wildcard to True and compare with the \"*\"\n string.\"\"\"\n )\n else:\n return [\"*\"]\n actions.add(action)\n\n return list(actions)",
"def enemy_actions(self, history):\n if history is None:\n return []\n return history['%sAction' % self.enemy].tolist()",
"def supported_actions(self):\n actions = []\n for section in self.actions:\n actions += self.actions[section].keys()\n actions += self.deprecated_actions\n return actions"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GetConsumer. Get a specific consumer service. Optionally filter out consumer actions that do not support any event types for the specified publisher.
|
def get_consumer(self, consumer_id, publisher_id=None):
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Consumer', response)
|
[
"def get_consumer_action(self, consumer_id, consumer_action_id, publisher_id=None):\n route_values = {}\n if consumer_id is not None:\n route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')\n if consumer_action_id is not None:\n route_values['consumerActionId'] = self._serialize.url('consumer_action_id', consumer_action_id, 'str')\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('ConsumerAction', response)",
"def test_get_consumer():\n\n consumer = get_consumer(\n cert_folder=CERT_FOLDER, service_uri=SERVICE_URI, topic_name=TOPIC_NAME\n )\n assert isinstance(consumer, KafkaConsumer)",
"def list_consumers(self, publisher_id=None):\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',\n version='5.1',\n query_parameters=query_parameters)\n return self._deserialize('[Consumer]', self._unwrap_collection(response))",
"def consumer(self) -> AIOKafkaConsumer:\n if self._consumer is None:\n raise ClientStoppedError()\n\n return self._consumer",
"def get_consumers(self):\n pass",
"def list_consumer_actions(self, consumer_id, publisher_id=None):\n route_values = {}\n if consumer_id is not None:\n route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('[ConsumerAction]', self._unwrap_collection(response))",
"def get_consumer():\n return KafkaConsumer(\n 'movielog1',\n bootstrap_servers=[\"fall2020-comp598.cs.mcgill.ca:9092\"],\n enable_auto_commit=False,\n auto_offset_reset=\"earliest\",\n value_deserializer=lambda x: x.decode('utf-8'))",
"def create_consumer(self, **kwargs):\n consumer = self._create(**kwargs)\n return consumer",
"def list_consumers(self):\n endpoint = self.build_url(\"/consumers\")\n return self.request('get', endpoint)",
"def get_all_consumers(self):\n return self.consumers",
"def create_consumer(host, port, topic):\n consumer = KafkaConsumer(\n topic,\n bootstrap_servers=f\"{host}:{port}\",\n client_id=\"system-metrics1\",\n security_protocol=\"SSL\",\n ssl_cafile=\"ca.pem\",\n ssl_certfile=\"service.cert\",\n ssl_keyfile=\"service.key\",\n value_deserializer=lambda m: json.loads(m.decode('utf-8')))\n consumer.subscribe([topic])\n return consumer",
"def _CreateConsumer(self, name):\n return Consumer(\n name, self, self.consumer_path_format % name, self.logger.name)",
"def get_task_consumer(self, connection, queues=None, **kwargs):\n return self.ConsumerSet(connection, from_dict=queues or self.queues,\n **kwargs)",
"def oauth_consumer(self):\n if type(self) == OAuthEngagement:\n return self.oauth_consumer\n try:\n return self.oauthengagement.oauth_consumer\n except OAuthConsumer.DoesNotExist:\n return None\n except OAuthEngagement.DoesNotExist:\n return None",
"def create(self):\n\n dependencies = self.create_dependencies()\n\n # Create the consumer.\n consumer = messaging.consuming.consumers.Simple(\n receiver=dependencies['receiver'],\n handler=dependencies['handler'],\n filters=dependencies['filters'])\n\n # Include blocking.\n consumer = messaging.consuming.consumers.Blocking(\n consumer=consumer,\n interval=self._properties['consumer']['interval'])\n\n # Include orchestration.\n logger_factory = Logger(properties=self._properties)\n logger = logger_factory.create()\n consumer = consuming.consumers.Orchestrating(consumer=consumer,\n logger=logger)\n\n return consumer",
"def create_consumer(\n self,\n group_id=None,\n server=\"127.0.0.1\",\n port=\"9092\",\n enable_auto_commit=True,\n auto_offset_reset=\"latest\",\n schema_registry_url=None,\n auto_create_topics=True,\n key_deserializer=None,\n value_deserializer=None,\n legacy=True,\n **kwargs\n ):\n if group_id is None:\n group_id = str(uuid.uuid4())\n\n if schema_registry_url and legacy:\n consumer = AvroConsumer({\n 'bootstrap.servers': '{}:{}'.format(server, port),\n 'group.id': group_id,\n 'enable.auto.commit': enable_auto_commit,\n 'allow.auto.create.topics': auto_create_topics,\n 'auto.offset.reset': auto_offset_reset,\n 'schema.registry.url': schema_registry_url,\n **kwargs})\n elif not legacy:\n consumer = DeserializingConsumer({\n 'bootstrap.servers': '{}:{}'.format(server, port),\n 'group.id': group_id,\n 'enable.auto.commit': enable_auto_commit,\n 'auto.offset.reset': auto_offset_reset,\n 'key.deserializer': key_deserializer,\n 'value.deserializer': value_deserializer,\n **kwargs})\n else:\n consumer = Consumer({\n 'bootstrap.servers': '{}:{}'.format(server, port),\n 'group.id': group_id,\n 'enable.auto.commit': enable_auto_commit,\n 'allow.auto.create.topics': auto_create_topics,\n 'auto.offset.reset': auto_offset_reset,\n **kwargs})\n\n self.consumers[group_id] = consumer\n return group_id",
"def _get_service(self):\n\n service = self._selector.get_service(0) # Don't wait\n if service is None:\n raise err.OctpServiceAllFault('Not one service is available!')\n\n return service",
"def __get_communication_service(args):\n print(\"\\nGet...\")\n\n acs_client = __get_communication_management_client()\n\n try:\n resource = acs_client.communication_service.get(args.resource_group_name, args.resource_name)\n __print_resource(resource)\n except HttpResponseError:\n print(\"Resource was not found.\")",
"def get(self,\n host,\n provider,\n ):\n return self._invoke('get',\n {\n 'host': host,\n 'provider': provider,\n })"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
ListConsumers. Get a list of available service hook consumer services. Optionally filter by consumers that support at least one event type from the specific publisher.
|
def list_consumers(self, publisher_id=None):
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',
version='5.1',
query_parameters=query_parameters)
return self._deserialize('[Consumer]', self._unwrap_collection(response))
|
[
"def list_consumers(self):\n endpoint = self.build_url(\"/consumers\")\n return self.request('get', endpoint)",
"def get_all_consumers(self):\n return self.consumers",
"def get_consumers(self):\n pass",
"def list_vhost_consumers(self, *, vhost: str = None):\n vhost = vhost if vhost is not None else self.vhost\n endpoint = self.build_url(\"/consumers/{vhost}\", vhost=vhost)\n return self.request('get', endpoint)",
"def list_consumer_actions(self, consumer_id, publisher_id=None):\n route_values = {}\n if consumer_id is not None:\n route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')\n query_parameters = {}\n if publisher_id is not None:\n query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('[ConsumerAction]', self._unwrap_collection(response))",
"def test_consumers_list_secret_with_consumers(self):\n secret_ref = self._create_test_secret()\n\n consumers = [{\n 'service': 'service1',\n 'resource_type': 'type1',\n 'resource_id': 'id1'}, {\n 'service': 'service2',\n 'resource_type': 'type2',\n 'resource_id': 'id2'}]\n\n for consumer in consumers:\n _ = self.barbicanclient.secrets.register_consumer(\n secret_ref,\n service=consumer['service'],\n resource_type=consumer['resource_type'],\n resource_id=consumer['resource_id']\n )\n\n consumers_list = self.barbicanclient.secrets.list_consumers(\n secret_ref)\n\n for elem in range(len(consumers)):\n self.assertTrue(\n consumers_list[elem].service ==\n consumers[elem]['service'])\n self.assertTrue(\n consumers_list[elem].resource_type ==\n consumers[elem]['resource_type'])\n self.assertTrue(\n consumers_list[elem].resource_id ==\n consumers[elem]['resource_id'])\n\n self.cleanup.delete_entity(secret_ref)\n self.barbicanclient.secrets.delete(secret_ref, True)",
"def consumers(self):\n ops_set = frozenset(self._ops)\n res = []\n for output in self._output_ts:\n consumers = [op for op in output.consumers() if op not in ops_set]\n util.concatenate_unique(res, consumers)\n return res",
"async def list_listeners(self, ctx):\n await ctx.send(\"\".join([l + '\\n' for l in mlist.LISTENER_LIST]))",
"def create_consumers(self, create_count=1, **kwargs):\n consumer_list = []\n current_create_count = 0\n while current_create_count < create_count:\n consumer = self._create(**kwargs)\n current_create_count += 1\n consumer_list.append(consumer)\n return consumer_list",
"def list_event_types(self, publisher_id):\n route_values = {}\n if publisher_id is not None:\n route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')\n response = self._send(http_method='GET',\n location_id='db4777cd-8e08-4a84-8ba3-c974ea033718',\n version='5.1',\n route_values=route_values)\n return self._deserialize('[EventTypeDescriptor]', self._unwrap_collection(response))",
"def cmd_listener_list(context):\n listeners = get_listeners()\n context.spinner_stop()\n if not listeners:\n click.echo(\"No running listeners\")\n else:\n display_list_listeners(listeners, table_format=context.output_format)",
"def _RestoreConsumers(self):\n data = TryLoadJSON(self.consumers_list_path, self.logger.name)\n if data:\n for name in data:\n self.consumers[name] = self._CreateConsumer(name)",
"def get_oembed_providers():\n global _provider_list, _provider_lock\n if _provider_list is not None:\n return _provider_list\n\n # Allow only one thread to build the list, or make request to embed.ly.\n _provider_lock.acquire()\n try:\n # And check whether that already succeeded when the lock is granted.\n if _provider_list is None:\n _provider_list = _build_provider_list()\n finally:\n # Always release if there are errors\n _provider_lock.release()\n\n return _provider_list",
"def list_subscribers(self):\n return self._persistent_store.list_subscribers()",
"def list_publishers(self):\n response = self._send(http_method='GET',\n location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',\n version='5.1')\n return self._deserialize('[Publisher]', self._unwrap_collection(response))",
"def findConsumersRecursive(self, target=None, out=set()):\n if target is None:\n target = self\n for service in target.consumers:\n out.add(service)\n self.findConsumersRecursive(service, out)\n return out",
"def get_watchlists(self) -> list:\n try:\n result = self.api.get_watchlists()\n except BrokerException as err:\n print('[!] Unable to get watchlists.')\n raise err\n else:\n return result",
"def get_services(self):\n services = self.docker.services.list(filters=self.settings['filter_services'])\n for blacklist_service in self.settings['blacklist_services']:\n for service in services:\n if service.name == blacklist_service:\n log.debug(f'Blacklisted {blacklist_service}')\n services.remove(service)\n return services",
"def test_consumers_list_secret_without_consumers(self):\n secret_ref = self._create_test_secret()\n\n consumers_list = self.barbicanclient.secrets.list_consumers(\n secret_ref)\n self.assertTrue(len(consumers_list) == 0)\n\n self.cleanup.delete_entity(secret_ref)\n self.barbicanclient.secrets.delete(secret_ref, True)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
ListEventTypes. Get the event types for a specific publisher.
|
def list_event_types(self, publisher_id):
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='db4777cd-8e08-4a84-8ba3-c974ea033718',
version='5.1',
route_values=route_values)
return self._deserialize('[EventTypeDescriptor]', self._unwrap_collection(response))
|
[
"def list_event_types():\n print('\\nValid event types:')\n for etype in EVENT_TYPES:\n print(' {0}'.format(etype))",
"def get_notification_event_types(self):\n return []",
"def get_events_by_genus_type(self, event_genus_type):\n return # osid.calendaring.EventList",
"def eventrecorder_eventtypes(self) -> ConfigNodePropertyDropDown:\n return self._eventrecorder_eventtypes",
"def all_event_types():\n\n event_types = Event_Type.query.all()\n\n return jsonify([event_type.serialize() for event_type in event_types])",
"def getEventTypes(self, eventMacro):\n return eventMacro.evtType",
"def list_publishers(self):\n response = self._send(http_method='GET',\n location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',\n version='5.1')\n return self._deserialize('[Publisher]', self._unwrap_collection(response))",
"def get_all_events_by_event_code(code, max_events=MAX_EVENTS):\n query = \"type={0}\".format(code)\n logger.info(\"Get all events with code %s\", code)\n return ll_hosts.EVENT_API.query(constraint=query, max=max_events)",
"def get_calendars_by_genus_type(self, calendar_genus_type):\n return # osid.calendaring.CalendarList",
"def __filter_event_type__(trace_events, event_type):\n filtered = []\n for line in trace_events:\n if line[0] == event_type:\n filtered.append(line)\n return filtered",
"def get_offset_events_by_genus_type(self, offset_event_genus_type):\n return # osid.calendaring.OffsetEventList",
"def subscribe_events(self, event_type_list):\n for event_type in event_type_list:\n self._dispatcher.connect(self._on_event,\n signal=event_type)",
"def get_events_by_parent_genus_type(self, event_genus_type):\n return # osid.calendaring.EventList",
"def eventrecorder_eventtypes(self, eventrecorder_eventtypes: ConfigNodePropertyDropDown):\n\n self._eventrecorder_eventtypes = eventrecorder_eventtypes",
"def events(self):\n return list(self.__events.keys())",
"def get_all_metadata_names_for_publisher(publisher):\n try:\n metadata = MetaDataDB.query.join(Publisher).\\\n with_entities(MetaDataDB.name).\\\n filter(Publisher.name == publisher).all()\n if len(metadata) is 0:\n return handle_error('DATA_NOT_FOUND',\n 'No metadata found for the package',\n 404)\n keys = []\n for d in metadata:\n keys.append(d[0])\n return jsonify({'data': metadata}), 200\n except Exception as e:\n app.logger.error(e)\n return handle_error('GENERIC_ERROR', e.message, 500)",
"def get_publisher_names():\n\n # publisher_names = [str(p) for p in Publisher.query.all()]\n publisher_names = [p.publisher_name for p in Publisher.query.all()]\n return jsonify(publisher_names=publisher_names)",
"def list_events(eventSet):\n number = ffi.new(\"int*\", 0)\n\n rcode = lib.PAPI_list_events(eventSet, ffi.NULL, number)\n\n if rcode < 0:\n return rcode, None\n\n eventCount = ffi.unpack(number, 1)[0]\n events = ffi.new(\"int[]\", eventCount)\n\n rcode = lib.PAPI_list_events(eventSet, events, number)\n\n return rcode, ffi.unpack(events, eventCount)",
"def get_event_sources(cls, event):\n try:\n prefix, _ = event.split('@', 1)\n except ValueError:\n return [event]\n\n try:\n return sorted(cls._META_EVENT_SOURCE[prefix].keys())\n except KeyError:\n return [event]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GetNotification. Get a specific notification for a subscription.
|
def get_notification(self, subscription_id, notification_id):
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
if notification_id is not None:
route_values['notificationId'] = self._serialize.url('notification_id', notification_id, 'int')
response = self._send(http_method='GET',
location_id='0c62d343-21b0-4732-997b-017fde84dc28',
version='5.1',
route_values=route_values)
return self._deserialize('Notification', response)
|
[
"def get_notification(self, notification_id):\r\n return self._notification_manager.get(notification_id)",
"def get_notification(self, notification_id):\n\n response = self._query_api(\"/rest/notifications/\" + str(notification_id))\n if response is None:\n return None\n elif 'error' in response:\n raise FigoException.from_dict(response)\n else:\n return Notification.from_dict(self, response)",
"def get_notification(cls, account, notification_id):\n try:\n accid = account.id\n except:\n accid = None\n\n magic = uuid.uuid4().hex\n\n # notifications may either be in the unrouted or routed indices.\n # start at the routed notification (as they may appear in both)\n rn = models.RoutedNotification.pull(notification_id)\n if rn is not None:\n if accid == rn.provider_id:\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y}; returns the provider's version of the routed notification\".format(z=magic, x=accid, y=notification_id))\n return rn.make_outgoing(provider=True)\n else:\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y}; returns the public version of the routed notification\".format(z=magic, x=accid, y=notification_id))\n return rn.make_outgoing()\n if accid is not None and (account.has_role('publisher') or current_user.is_super):\n urn = models.UnroutedNotification.pull(notification_id)\n if urn is not None:\n if accid == urn.provider_id:\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y}; returns the provider's version of the unrouted notification\".format(z=magic, x=accid, y=notification_id))\n return urn.make_outgoing(provider=True)\n else:\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y}; returns the public version of the unrouted notification\".format(z=magic, x=accid, y=notification_id))\n return urn.make_outgoing()\n\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y}; no distributable notification of that id found\".format(z=magic, x=accid, y=notification_id))\n return None",
"def get_notifications(self, subscription_id, max_results=None, status=None, result=None):\n route_values = {}\n if subscription_id is not None:\n route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')\n query_parameters = {}\n if max_results is not None:\n query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')\n if status is not None:\n query_parameters['status'] = self._serialize.query('status', status, 'str')\n if result is not None:\n query_parameters['result'] = self._serialize.query('result', result, 'str')\n response = self._send(http_method='GET',\n location_id='0c62d343-21b0-4732-997b-017fde84dc28',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('[Notification]', self._unwrap_collection(response))",
"def get_subscription(self, sid):\n with self.subscriptions_lock:\n return self.subscriptions.get(sid)",
"def get_content(cls, account, notification_id, filename=None):\n magic = uuid.uuid4().hex\n urn = models.UnroutedNotification.pull(notification_id)\n if urn is not None and (account.has_role('publisher') or current_user.is_super):\n if filename is not None:\n store_filename = filename\n else:\n pm = packages.PackageFactory.incoming(urn.packaging_format)\n store_filename = pm.zip_name()\n sm = store.StoreFactory.get()\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y} Content:{a}; returns unrouted notification stored file {b}\".format(z=magic, x=account.id, y=notification_id, a=filename, b=store_filename))\n return sm.get(urn.id, store_filename) # returns None if not found\n else:\n rn = models.RoutedNotification.pull(notification_id)\n if rn is not None:\n if ((account.has_role(\"publisher\") and rn.provider_id == account.id) or\n (account.has_role(\"repository\") and account.id in rn.repositories) or\n current_user.is_super):\n if filename is not None:\n store_filename = filename\n else:\n pm = packages.PackageFactory.incoming(rn.packaging_format)\n store_filename = pm.zip_name()\n sm = store.StoreFactory.get()\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y} Content:{a}; returns routed notification stored file {b}\".format(z=magic, x=account.id, y=notification_id, a=filename, b=store_filename))\n return sm.get(rn.id, store_filename)\n else:\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y} Content:{a}; not authorised to receive this content\".format(z=magic, x=account.id, y=notification_id, a=filename))\n raise UnauthorisedException()\n else:\n app.logger.debug(\"Request:{z} - Retrieve request from Account:{x} on Notification:{y} Content:{a}; no suitable content found to return\".format(z=magic, x=account.id, y=notification_id, a=filename))\n return None",
"def find(subscription_id):\n\n try:\n response = Http().get(\"/subscriptions/\" + subscription_id)\n return Subscription(response[\"subscription\"])\n except NotFoundError:\n raise NotFoundError(\"subscription with id \" + subscription_id + \" not found\")",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n instance_id: Optional[pulumi.Input[int]] = None,\n name: Optional[pulumi.Input[str]] = None,\n options: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value: Optional[pulumi.Input[str]] = None) -> 'Notification':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _NotificationState.__new__(_NotificationState)\n\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"options\"] = options\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value\"] = value\n return Notification(resource_name, opts=opts, __props__=__props__)",
"def get_one(self, subscription_id):\n\n subscription = subscription_api.subscription_get(subscription_id)\n current_user = user_api.user_get(request.current_user_id)\n\n if subscription.user_id != request.current_user_id \\\n and not current_user.is_superuser:\n abort(403, _(\"You do not have access to this record.\"))\n\n return Subscription.from_db_model(subscription)",
"def get_content(self, notification_id):\n url = \"%s/%s/content\" % (self.uri, notification_id)\n\n resp = self.session.get(url)\n\n if resp.status_code >= 400:\n raise CourierAPIException(resp)\n\n return resp.json()",
"def getSubscription(subscriber):",
"def get_notifications(self, context):\n module_context.init()\n LOG.info(\"Received RPC GET NOTIFICATIONS \")\n events = self.sc.get_stashed_events()\n notifications = []\n for event in events:\n notification = event.data\n msg = (\"Notification Data: %r\" % notification)\n notifications.append(notification)\n LOG.info(msg)\n return notifications",
"def extract_notification_from_request(request):\n encoded = request.GET.get('notification') or request.POST.get('notification')\n\n if not encoded:\n return ('', '')\n\n try:\n notification = json.loads(base64.b64decode(encoded))\n except (json.JSONDecodeError, binascii.Error):\n notification = ('', '')\n\n return notification",
"def _get_notif_data(self):\n return self._replace_id(self.request.get_data(), 'notification_id')",
"def get_note(self, note_id):\n return self.__get_object('notes', None, note_id)",
"def get_subscription(self, id: UUID) -> Optional[Subscription]:\n subscription = select([subscriptions]).where(subscriptions.c.id == id).execute().first()\n return subscription",
"def get_notification_channel(\n self,\n name,\n retry=google.api_core.gapic_v1.method.DEFAULT,\n timeout=google.api_core.gapic_v1.method.DEFAULT,\n metadata=None,\n ):\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n # Wrap the transport method to add retry and timeout logic.\n if \"get_notification_channel\" not in self._inner_api_calls:\n self._inner_api_calls[\n \"get_notification_channel\"\n ] = google.api_core.gapic_v1.method.wrap_method(\n self.transport.get_notification_channel,\n default_retry=self._method_configs[\"GetNotificationChannel\"].retry,\n default_timeout=self._method_configs[\"GetNotificationChannel\"].timeout,\n client_info=self._client_info,\n )\n\n request = notification_service_pb2.GetNotificationChannelRequest(name=name,)\n if metadata is None:\n metadata = []\n metadata = list(metadata)\n try:\n routing_header = [(\"name\", name)]\n except AttributeError:\n pass\n else:\n routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(\n routing_header\n )\n metadata.append(routing_metadata)\n\n return self._inner_api_calls[\"get_notification_channel\"](\n request, retry=retry, timeout=timeout, metadata=metadata\n )",
"def test_notification_get(self):\n pass",
"def get_bucket_notification(self, bucket_name):\n check_bucket_name(bucket_name)\n\n response = self._url_open(\n \"GET\",\n bucket_name=bucket_name,\n query={\"notification\": \"\"},\n )\n data = response.data.decode('utf-8')\n return parse_get_bucket_notification(data)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GetNotifications. Get a list of notifications for a specific subscription. A notification includes details about the event, the request to and the response from the consumer service.
|
def get_notifications(self, subscription_id, max_results=None, status=None, result=None):
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
query_parameters = {}
if max_results is not None:
query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')
if status is not None:
query_parameters['status'] = self._serialize.query('status', status, 'str')
if result is not None:
query_parameters['result'] = self._serialize.query('result', result, 'str')
response = self._send(http_method='GET',
location_id='0c62d343-21b0-4732-997b-017fde84dc28',
version='5.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Notification]', self._unwrap_collection(response))
|
[
"def get_notifications(self, context):\n module_context.init()\n LOG.info(\"Received RPC GET NOTIFICATIONS \")\n events = self.sc.get_stashed_events()\n notifications = []\n for event in events:\n notification = event.data\n msg = (\"Notification Data: %r\" % notification)\n notifications.append(notification)\n LOG.info(msg)\n return notifications",
"def list_notifications(self):\r\n return self._notification_manager.list()",
"def get_notification(self, subscription_id, notification_id):\n route_values = {}\n if subscription_id is not None:\n route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')\n if notification_id is not None:\n route_values['notificationId'] = self._serialize.url('notification_id', notification_id, 'int')\n response = self._send(http_method='GET',\n location_id='0c62d343-21b0-4732-997b-017fde84dc28',\n version='5.1',\n route_values=route_values)\n return self._deserialize('Notification', response)",
"def notifications(self):\r\n from .._impl.notification import Notification\r\n result = []\r\n url = \"%s/community/users/%s/notifications\" % (self._portal.resturl, self._user_id)\r\n params = {\"f\" : \"json\"}\r\n ns = self._portal.con.get(url, params)\r\n if \"notifications\" in ns:\r\n for n in ns[\"notifications\"]:\r\n result.append(Notification(url=\"%s/%s\" % (url, n['id']),\r\n user=self,\r\n data=n,\r\n initialize=False)\r\n )\r\n del n\r\n return result\r\n return result",
"def list_notifications(request):\n notifications = Notification.objects.filter(\n receiving_user=request.user)\n data = NotificationModelSerializer(notifications, many=True).data\n return Response(data, status=status.HTTP_200_OK)",
"def getNotifications():\n # gets the data from the notifications db\n try:\n conn = sqlite3.connect('notifications.db')\n c = conn.cursor()\n\n # get all the data from the db except id (ie. timestamp, message, division)\n c.execute(\"SELECT division, timestamp, notification FROM notifications\")\n result = c.fetchall()\n logging.debug(\"The database returned {} rows\".format((len(result))))\n c.close()\n except sqlite3.OperationalError as e:\n errorMessage = json.dumps({\"error\": str(e)})\n return bottle.HTTPResponse(body=errorMessage, status=400, headers=getHeaders())\n except Exception as e:\n errorMessage = json.dumps({\"error\": str(e)})\n return bottle.HTTPResponse(body=errorMessage, status=400,\n headers=getHeaders())\n\n # format the data so the front end can consume it easily\n # we know the order of the data because it's the same order we passed into the select statement\n resultDict = [{'division': notification[0], 'timestamp': notification[1], 'notification': notification[2]} for\n notification in result]\n return bottle.HTTPResponse(body=json.dumps(resultDict), status=200, headers=getHeaders())",
"async def notification_list(self, context):\n if self.db == None:\n await self.start() # Initiate DB, because it's not initialized yet\n\n notifications = self.get_notifications(context.message.author.id)\n if not notifications:\n return await self.bot.send_message(context.message.author, 'You have no notifications at this time.')\n else:\n notifications_list_str = ''\n for notification in notifications.values():\n time_until = notification['notification_time'] - int(datetime.now().timestamp()) # Time until notification\n notifications_list_str += '%s %s in %s\\n' % (notification['uid'], notification['notification_message'], self.get_time_string(time_until))\n return await self.bot.send_message(context.message.author, notifications_list_str) # Full list of notifications\n return",
"def ListTopicSubscriptions(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def get_notifications(self, request):\r\n try:\r\n assert self._db_connection, {\r\n STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,\r\n MESSAGE_KEY: DB_ERROR}\r\n\r\n dict_data = {}\r\n\r\n self._psql_session.execute(CHECK_AUTHENTICATION_QUERY.format(self.loggedin_userid_details[LOGIN_ID]))\r\n user = pd.DataFrame(self._psql_session.fetchall())\r\n if not self._psql_session.rowcount:\r\n return JsonResponse({MESSAGE_KEY: \"LOGIN ID NOT REGISTER WITH US\"}, status=HTTP_400_BAD_REQUEST)\r\n self._psql_session.execute(GET_PERMISSION.format(user['user_type'].iloc[0]))\r\n permission = pd.DataFrame(self._psql_session.fetchall())\r\n if not permission.empty:\r\n permissions = list(permission[\"feature\"])\r\n else:\r\n permissions = []\r\n if 'Download Notifications for Selected Dates' in permissions:\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_PERIOD)\r\n download_period = pd.DataFrame(self._psql_session.fetchall())\r\n if not download_period.empty:\r\n download_time_period = int(download_period['value'].iloc[0])\r\n else:\r\n download_time_period = None\r\n\r\n if self.query_params:\r\n query_params = {\r\n START_DATE_REQUEST: self.query_params.GET[START_DATE_REQUEST],\r\n END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]\r\n }\r\n d0 = np.datetime64(query_params[START_DATE_REQUEST]).astype('int64')\r\n d1 = np.datetime64(query_params[END_DATE_REQUEST]).astype('int64')\r\n \"\"\"\r\n Calculating number of days between start date and end date\r\n delta = (d1 - d0) / (24 * 3600000)\r\n \"\"\"\r\n delta = (d1 - d0) / (24 * 3600000)\r\n\r\n if delta <= download_time_period:\r\n tm = t.time()\r\n LAST_MODIFIED_DATE = pd.to_datetime(tm, unit='s').strftime('%d/%b/%Y %H:%M')\r\n start_date = to_datetime(query_params[START_DATE_REQUEST], format='%Y-%m-%dT%H:%M:%S.%fZ')\r\n converted_start_date = pd.to_datetime(start_date).strftime('%d-%b-%Y %H:%M:%S')\r\n end_date = to_datetime(query_params[END_DATE_REQUEST], format='%Y-%m-%dT%H:%M:%S.%fZ')\r\n converted_end_date = pd.to_datetime(end_date).strftime('%d-%b-%Y %H:%M:%S')\r\n notifications_duration = str(converted_start_date) + \" to \" + str(converted_end_date)\r\n dict_data[\"current_time\"] = LAST_MODIFIED_DATE\r\n dict_data[\"duration\"] = notifications_duration\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST.format(OVER_HEAD_PDI_TABLE, OVER_HEAD_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n overhead_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n\r\n if not overhead_notifications.empty:\r\n overhead_notifications = overhead_notifications[['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(OVER_HEAD_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n overhead_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n\r\n if not overhead_alerts.empty:\r\n overhead_alerts['Date Time'] = overhead_alerts['Date Time'].dt.tz_convert(None)\r\n overhead_alerts['Date Time'] = overhead_alerts['Date Time'].dt.strftime('%d/%b/%Y %H:%M')\r\n alert = overhead_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n overhead_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n pdi_df = [overhead_notifications, overhead_alerts]\r\n pdi_dataFrame = pd.concat(pdi_df)\r\n pdi_dataFrame = pdi_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not overhead_notifications.empty or not overhead_alerts.empty:\r\n dict_data[\"overhead_pdi\"] = pdi_dataFrame.render\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATIONS_LIST.format(OUTGAE_TABLE, OUTAGE_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n outage_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not outage_notifications.empty:\r\n outage_notifications = outage_notifications[['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(OUTAGE_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n outage_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not outage_alerts.empty:\r\n outage_alerts['Date Time'] = outage_alerts['Date Time'].dt.tz_convert(None)\r\n outage_alerts['Date Time'] = outage_alerts['Date Time'].dt.strftime('%d/%b/%Y %H:%M')\r\n alert = outage_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n outage_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n outage_df = [outage_notifications, outage_alerts]\r\n outage_dataFrame = pd.concat(outage_df)\r\n outage_dataFrame = outage_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not outage_notifications.empty or not outage_alerts.empty:\r\n dict_data[\"outage\"] = outage_dataFrame.render\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATIONS_LIST.format(HGI_TABLE, HGI_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n hgi_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not hgi_notifications.empty:\r\n hgi_notifications = hgi_notifications[['Date Time', 'Category', 'Notification']]\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(HGI_MODULE,\r\n query_params[\r\n START_DATE_REQUEST],\r\n query_params[\r\n END_DATE_REQUEST]))\r\n hgi_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not hgi_alerts.empty:\r\n hgi_alerts['Date Time'] = hgi_alerts['Date Time'].dt.tz_convert(None)\r\n hgi_alerts['Date Time'] = hgi_alerts['Date Time'].dt.strftime('%d/%b/%Y %H:%M')\r\n alert = hgi_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n hgi_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n hgi_df = [hgi_notifications, hgi_alerts]\r\n hgi_dataFrame = pd.concat(hgi_df)\r\n hgi_dataFrame = hgi_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not hgi_notifications.empty or not hgi_alerts.empty:\r\n dict_data[\"hgi\"] = hgi_dataFrame.render\r\n\r\n \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_RESULT_TABLE, TMT_FURNACE_A_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_A_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_A_notifications.empty:\r\n tmt_furnace_A_notifications = tmt_furnace_A_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_SPALL_RESULT, TMT_FURNACE_A_SPALL_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_spall_A_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_spall_A_notifications.empty:\r\n tmt_furnace_spall_A_notifications = tmt_furnace_spall_A_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_ERROR_DETAILS_TMT.format(ERROR_TMT_A,\r\n query_params[\r\n START_DATE_REQUEST],\r\n query_params[\r\n END_DATE_REQUEST]))\r\n tmt_furnace_A_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_A_alerts.empty:\r\n tmt_furnace_A_alerts['Date Time'] = tmt_furnace_A_alerts['Date Time'].dt.tz_convert(None)\r\n tmt_furnace_A_alerts['Date Time'] = tmt_furnace_A_alerts['Date Time'].dt.strftime(\r\n '%d/%b/%Y %H:%M')\r\n alert = tmt_furnace_A_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n tmt_furnace_A_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n tmt_A_df = [tmt_furnace_A_notifications, tmt_furnace_spall_A_notifications,\r\n tmt_furnace_A_alerts]\r\n tmt_A_dataFrame = pd.concat(tmt_A_df)\r\n tmt_A_dataFrame = tmt_A_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not tmt_furnace_A_notifications.empty or not tmt_furnace_spall_A_notifications.empty or not tmt_furnace_A_alerts.empty:\r\n dict_data[\"furnace_tmt_A\"] = tmt_A_dataFrame.render\r\n\r\n \"\"\" ''''''''''''' \"\"\"\r\n\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_RESULT_TABLE, TMT_FURNACE_B_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_B_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_B_notifications.empty:\r\n tmt_furnace_B_notifications = tmt_furnace_B_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_SPALL_RESULT, TMT_FURNACE_B_SPALL_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_spall_B_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_spall_B_notifications.empty:\r\n tmt_furnace_spall_B_notifications = tmt_furnace_spall_B_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_ERROR_DETAILS_TMT.format(ERROR_TMT_B,\r\n query_params[\r\n START_DATE_REQUEST],\r\n query_params[\r\n END_DATE_REQUEST]))\r\n tmt_furnace_B_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_B_alerts.empty:\r\n tmt_furnace_B_alerts['Date Time'] = tmt_furnace_B_alerts['Date Time'].dt.tz_convert(None)\r\n tmt_furnace_B_alerts['Date Time'] = tmt_furnace_B_alerts['Date Time'].dt.strftime(\r\n '%d/%b/%Y %H:%M')\r\n alert = tmt_furnace_B_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n tmt_furnace_B_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n tmt_B_df = [tmt_furnace_B_notifications, tmt_furnace_spall_B_notifications,\r\n tmt_furnace_B_alerts]\r\n tmt_B_dataFrame = pd.concat(tmt_B_df)\r\n tmt_B_dataFrame = tmt_B_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not tmt_furnace_B_notifications.empty or not tmt_furnace_spall_B_notifications.empty or not tmt_furnace_B_alerts.empty:\r\n dict_data[\"furnace_tmt_B\"] = tmt_B_dataFrame.render\r\n\r\n \"\"\" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\" \"\" \"\"\"\r\n self._psql_session.execute(\r\n DOWNLOAD_BENCH_MARK_ERROR.format(query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n benchmark_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n\r\n if not benchmark_alerts.empty:\r\n benchmark_alerts['Date Time'] = benchmark_alerts['Date Time'].dt.tz_convert(None)\r\n benchmark_alerts['Date Time'] = benchmark_alerts['Date Time'].dt.strftime(\r\n '%d/%b/%Y %H:%M')\r\n alert = benchmark_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n benchmark_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n benchmark_dataFrame = benchmark_alerts\r\n benchmark_dataFrame = benchmark_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n dict_data[\"benchmarking\"] = benchmark_dataFrame.render\r\n SITE_ROOT = os.path.dirname(os.path.realpath(__file__))\r\n # image_1 = \"\\..\\..\\\\templates\\\\p66logo.png\"\r\n image_1 = \"/../..//templates//p66logo.png\"\r\n image_1_path = SITE_ROOT + image_1\r\n # image_2 = \"\\..\\..\\\\templates\\\\ingenero_logo.png\"\r\n image_2 = \"/../..//templates//ingenero_logo.png\"\r\n image_2_path = SITE_ROOT + image_2\r\n dict_data[\"image_1\"] = image_1_path\r\n dict_data[\"image_2\"] = image_2_path\r\n pdf = render_to_pdf('invoice.html', dict_data)\r\n if pdf:\r\n response = HttpResponse(pdf, content_type='application/pdf')\r\n filename = \"Notifications.pdf\"\r\n content = \"inline; filename=%s\" % filename\r\n download = request.GET.get(\"download\")\r\n if download:\r\n content = \"attachment; filename=%s\" % filename\r\n response['Content-Disposition'] = content\r\n return response\r\n return HttpResponse(\"Not found\")\r\n else:\r\n return JsonResponse(\r\n {MESSAGE_KEY: \"The days to download exceeds the default download time period\"}, safe=False)\r\n else:\r\n return JsonResponse({MESSAGE_KEY: \"FORBIDDEN ERROR\"}, status=HTTP_403_FORBIDDEN)\r\n except AssertionError as e:\r\n log_error(\"Exception occurred due to\" + str(e))\r\n return asert_res(e)\r\n\r\n except Exception as e:\r\n log_error(\"Exception occurred due to\" + str(e))\r\n return json_InternalServerError",
"def get_live_notifications(self) -> TodoistLiveNotificationsResponse:\n api = self._get_api()\n return TodoistLiveNotificationsResponse(api.state['live_notifications'])",
"def get_notifications(self, limit=10):\n return self.notifications.sort(key=lambda notify: notify.created)[:limit]",
"def getNotifications(nodeIdentifier, items):",
"def notifications(self) -> pulumi.Output[Optional[Sequence['outputs.BudgetNotification']]]:\n return pulumi.get(self, \"notifications\")",
"def query_notifications(self, query):\n content = self._serialize.body(query, 'NotificationsQuery')\n response = self._send(http_method='POST',\n location_id='1a57562f-160a-4b5c-9185-905e95b39d36',\n version='5.1',\n content=content)\n return self._deserialize('NotificationsQuery', response)",
"def getSubscriptions(state=None):",
"def notifications(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BudgetNotificationArgs']]]]:\n return pulumi.get(self, \"notifications\")",
"def get_for_user(cls, user):\n notifications = cls.objects.filter(user = user)\n notifications = notifications.order_by('-created')\n notifications = notifications.prefetch_related('notification')\n\n return notifications",
"def list_subscriptions(self):\n models = self.get_db_model(self.engine)\n subscriptions = models.classes.subscriptions\n session = self.Session()\n return session.query(subscriptions).all()",
"def get_notification_list(user):\n\tnow = datetime.datetime.now()\n\tdays = 3 \n\n\ttransactions = db.get_transaction_notifications(user, days)\n\tresolutions = db.get_recent_resolutions(user, days)\n\tnews_posts = models.NewsPost.objects.filter(\n\t\ttime_created__gte=now - datetime.timedelta(days),\n\t\tsite=config.SITE_ID\n\t)\n\n\tdef build_transaction_message(trans):\n\t\tif trans.status == 'pending':\n\t\t\taction = 'created a'\n\t\telif trans.status == 'rejected':\n\t\t\taction = 'rejected your'\n\t\telse:\n\t\t\taction = 'confirmed your'\n\t\treturn \"%s %s transaction for a %s of %s.\" % (\n\t\t\ttrans.creator_person,\n\t\t\taction,\n\t\t\ttrans.targets_transaction_type,\n\t\t\ttrans.value_repr\n\t\t), trans.time_created\n\n\tdef build_resolution_message(res):\n\t\treturn \"Your balance with %s was resolved for %s.\" % (\n\t\t\tres.other_person, res.relative_value_repr\n\t\t), res.resolution.time_confirmed\n\n\tdef build_news_post_message(post):\n\t\treturn \"%s posted '%s'.\" % (\n\t\t\tpost.author, post.title\n\t\t), post.time_created\n\n\treturn [i[0] for i in sorted(\n\t\titertools.chain(\n\t\t\t(build_transaction_message(t) for t in transactions),\n\t\t\t(build_resolution_message(r) for r in resolutions),\n\t\t\t(build_news_post_message(n) for n in news_posts)\n\t\t),\n\t\tkey=itemgetter(1),\n\t\treverse=True\n\t)]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
QueryNotifications. Query for notifications. A notification includes details about the event, the request to and the response from the consumer service.
|
def query_notifications(self, query):
content = self._serialize.body(query, 'NotificationsQuery')
response = self._send(http_method='POST',
location_id='1a57562f-160a-4b5c-9185-905e95b39d36',
version='5.1',
content=content)
return self._deserialize('NotificationsQuery', response)
|
[
"def get_notifications(self, context):\n module_context.init()\n LOG.info(\"Received RPC GET NOTIFICATIONS \")\n events = self.sc.get_stashed_events()\n notifications = []\n for event in events:\n notification = event.data\n msg = (\"Notification Data: %r\" % notification)\n notifications.append(notification)\n LOG.info(msg)\n return notifications",
"def list_notifications(self):\r\n return self._notification_manager.list()",
"def get_notifications(self, request):\r\n try:\r\n assert self._db_connection, {\r\n STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,\r\n MESSAGE_KEY: DB_ERROR}\r\n\r\n dict_data = {}\r\n\r\n self._psql_session.execute(CHECK_AUTHENTICATION_QUERY.format(self.loggedin_userid_details[LOGIN_ID]))\r\n user = pd.DataFrame(self._psql_session.fetchall())\r\n if not self._psql_session.rowcount:\r\n return JsonResponse({MESSAGE_KEY: \"LOGIN ID NOT REGISTER WITH US\"}, status=HTTP_400_BAD_REQUEST)\r\n self._psql_session.execute(GET_PERMISSION.format(user['user_type'].iloc[0]))\r\n permission = pd.DataFrame(self._psql_session.fetchall())\r\n if not permission.empty:\r\n permissions = list(permission[\"feature\"])\r\n else:\r\n permissions = []\r\n if 'Download Notifications for Selected Dates' in permissions:\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_PERIOD)\r\n download_period = pd.DataFrame(self._psql_session.fetchall())\r\n if not download_period.empty:\r\n download_time_period = int(download_period['value'].iloc[0])\r\n else:\r\n download_time_period = None\r\n\r\n if self.query_params:\r\n query_params = {\r\n START_DATE_REQUEST: self.query_params.GET[START_DATE_REQUEST],\r\n END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]\r\n }\r\n d0 = np.datetime64(query_params[START_DATE_REQUEST]).astype('int64')\r\n d1 = np.datetime64(query_params[END_DATE_REQUEST]).astype('int64')\r\n \"\"\"\r\n Calculating number of days between start date and end date\r\n delta = (d1 - d0) / (24 * 3600000)\r\n \"\"\"\r\n delta = (d1 - d0) / (24 * 3600000)\r\n\r\n if delta <= download_time_period:\r\n tm = t.time()\r\n LAST_MODIFIED_DATE = pd.to_datetime(tm, unit='s').strftime('%d/%b/%Y %H:%M')\r\n start_date = to_datetime(query_params[START_DATE_REQUEST], format='%Y-%m-%dT%H:%M:%S.%fZ')\r\n converted_start_date = pd.to_datetime(start_date).strftime('%d-%b-%Y %H:%M:%S')\r\n end_date = to_datetime(query_params[END_DATE_REQUEST], format='%Y-%m-%dT%H:%M:%S.%fZ')\r\n converted_end_date = pd.to_datetime(end_date).strftime('%d-%b-%Y %H:%M:%S')\r\n notifications_duration = str(converted_start_date) + \" to \" + str(converted_end_date)\r\n dict_data[\"current_time\"] = LAST_MODIFIED_DATE\r\n dict_data[\"duration\"] = notifications_duration\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST.format(OVER_HEAD_PDI_TABLE, OVER_HEAD_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n overhead_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n\r\n if not overhead_notifications.empty:\r\n overhead_notifications = overhead_notifications[['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(OVER_HEAD_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n overhead_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n\r\n if not overhead_alerts.empty:\r\n overhead_alerts['Date Time'] = overhead_alerts['Date Time'].dt.tz_convert(None)\r\n overhead_alerts['Date Time'] = overhead_alerts['Date Time'].dt.strftime('%d/%b/%Y %H:%M')\r\n alert = overhead_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n overhead_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n pdi_df = [overhead_notifications, overhead_alerts]\r\n pdi_dataFrame = pd.concat(pdi_df)\r\n pdi_dataFrame = pdi_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not overhead_notifications.empty or not overhead_alerts.empty:\r\n dict_data[\"overhead_pdi\"] = pdi_dataFrame.render\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATIONS_LIST.format(OUTGAE_TABLE, OUTAGE_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n outage_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not outage_notifications.empty:\r\n outage_notifications = outage_notifications[['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(OUTAGE_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n outage_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not outage_alerts.empty:\r\n outage_alerts['Date Time'] = outage_alerts['Date Time'].dt.tz_convert(None)\r\n outage_alerts['Date Time'] = outage_alerts['Date Time'].dt.strftime('%d/%b/%Y %H:%M')\r\n alert = outage_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n outage_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n outage_df = [outage_notifications, outage_alerts]\r\n outage_dataFrame = pd.concat(outage_df)\r\n outage_dataFrame = outage_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not outage_notifications.empty or not outage_alerts.empty:\r\n dict_data[\"outage\"] = outage_dataFrame.render\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATIONS_LIST.format(HGI_TABLE, HGI_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n hgi_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not hgi_notifications.empty:\r\n hgi_notifications = hgi_notifications[['Date Time', 'Category', 'Notification']]\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(HGI_MODULE,\r\n query_params[\r\n START_DATE_REQUEST],\r\n query_params[\r\n END_DATE_REQUEST]))\r\n hgi_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not hgi_alerts.empty:\r\n hgi_alerts['Date Time'] = hgi_alerts['Date Time'].dt.tz_convert(None)\r\n hgi_alerts['Date Time'] = hgi_alerts['Date Time'].dt.strftime('%d/%b/%Y %H:%M')\r\n alert = hgi_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n hgi_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n hgi_df = [hgi_notifications, hgi_alerts]\r\n hgi_dataFrame = pd.concat(hgi_df)\r\n hgi_dataFrame = hgi_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not hgi_notifications.empty or not hgi_alerts.empty:\r\n dict_data[\"hgi\"] = hgi_dataFrame.render\r\n\r\n \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_RESULT_TABLE, TMT_FURNACE_A_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_A_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_A_notifications.empty:\r\n tmt_furnace_A_notifications = tmt_furnace_A_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_SPALL_RESULT, TMT_FURNACE_A_SPALL_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_spall_A_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_spall_A_notifications.empty:\r\n tmt_furnace_spall_A_notifications = tmt_furnace_spall_A_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_ERROR_DETAILS_TMT.format(ERROR_TMT_A,\r\n query_params[\r\n START_DATE_REQUEST],\r\n query_params[\r\n END_DATE_REQUEST]))\r\n tmt_furnace_A_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_A_alerts.empty:\r\n tmt_furnace_A_alerts['Date Time'] = tmt_furnace_A_alerts['Date Time'].dt.tz_convert(None)\r\n tmt_furnace_A_alerts['Date Time'] = tmt_furnace_A_alerts['Date Time'].dt.strftime(\r\n '%d/%b/%Y %H:%M')\r\n alert = tmt_furnace_A_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n tmt_furnace_A_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n tmt_A_df = [tmt_furnace_A_notifications, tmt_furnace_spall_A_notifications,\r\n tmt_furnace_A_alerts]\r\n tmt_A_dataFrame = pd.concat(tmt_A_df)\r\n tmt_A_dataFrame = tmt_A_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not tmt_furnace_A_notifications.empty or not tmt_furnace_spall_A_notifications.empty or not tmt_furnace_A_alerts.empty:\r\n dict_data[\"furnace_tmt_A\"] = tmt_A_dataFrame.render\r\n\r\n \"\"\" ''''''''''''' \"\"\"\r\n\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_RESULT_TABLE, TMT_FURNACE_B_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_B_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_B_notifications.empty:\r\n tmt_furnace_B_notifications = tmt_furnace_B_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n self._psql_session.execute(\r\n DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(TMT_SPALL_RESULT, TMT_FURNACE_B_SPALL_MODULE,\r\n query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n tmt_furnace_spall_B_notifications = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_spall_B_notifications.empty:\r\n tmt_furnace_spall_B_notifications = tmt_furnace_spall_B_notifications[\r\n ['Date Time', 'Category', 'Notification']]\r\n\r\n self._psql_session.execute(DOWNLOAD_NOTIFICATION_ERROR_DETAILS_TMT.format(ERROR_TMT_B,\r\n query_params[\r\n START_DATE_REQUEST],\r\n query_params[\r\n END_DATE_REQUEST]))\r\n tmt_furnace_B_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n if not tmt_furnace_B_alerts.empty:\r\n tmt_furnace_B_alerts['Date Time'] = tmt_furnace_B_alerts['Date Time'].dt.tz_convert(None)\r\n tmt_furnace_B_alerts['Date Time'] = tmt_furnace_B_alerts['Date Time'].dt.strftime(\r\n '%d/%b/%Y %H:%M')\r\n alert = tmt_furnace_B_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n tmt_furnace_B_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n\r\n tmt_B_df = [tmt_furnace_B_notifications, tmt_furnace_spall_B_notifications,\r\n tmt_furnace_B_alerts]\r\n tmt_B_dataFrame = pd.concat(tmt_B_df)\r\n tmt_B_dataFrame = tmt_B_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n if not tmt_furnace_B_notifications.empty or not tmt_furnace_spall_B_notifications.empty or not tmt_furnace_B_alerts.empty:\r\n dict_data[\"furnace_tmt_B\"] = tmt_B_dataFrame.render\r\n\r\n \"\"\" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\" \"\" \"\"\"\r\n self._psql_session.execute(\r\n DOWNLOAD_BENCH_MARK_ERROR.format(query_params[START_DATE_REQUEST],\r\n query_params[END_DATE_REQUEST]))\r\n benchmark_alerts = pd.DataFrame(self._psql_session.fetchall())\r\n\r\n if not benchmark_alerts.empty:\r\n benchmark_alerts['Date Time'] = benchmark_alerts['Date Time'].dt.tz_convert(None)\r\n benchmark_alerts['Date Time'] = benchmark_alerts['Date Time'].dt.strftime(\r\n '%d/%b/%Y %H:%M')\r\n alert = benchmark_alerts[['tag_name', 'Date Time', 'Notification']]\r\n alert_group = alert.groupby(['Date Time', 'Notification'])['tag_name'].apply(\r\n ', '.join).reset_index()\r\n alert_group['Notification'] = alert_group['Notification'].str.cat(alert_group['tag_name'],\r\n sep=\" - \")\r\n alert_group['Category'] = 'Alert'\r\n benchmark_alerts = alert_group[['Date Time', 'Category', 'Notification']]\r\n benchmark_dataFrame = benchmark_alerts\r\n benchmark_dataFrame = benchmark_dataFrame.style.set_properties(subset=['Notification'],\r\n **{'width': '400px'})\r\n\r\n dict_data[\"benchmarking\"] = benchmark_dataFrame.render\r\n SITE_ROOT = os.path.dirname(os.path.realpath(__file__))\r\n # image_1 = \"\\..\\..\\\\templates\\\\p66logo.png\"\r\n image_1 = \"/../..//templates//p66logo.png\"\r\n image_1_path = SITE_ROOT + image_1\r\n # image_2 = \"\\..\\..\\\\templates\\\\ingenero_logo.png\"\r\n image_2 = \"/../..//templates//ingenero_logo.png\"\r\n image_2_path = SITE_ROOT + image_2\r\n dict_data[\"image_1\"] = image_1_path\r\n dict_data[\"image_2\"] = image_2_path\r\n pdf = render_to_pdf('invoice.html', dict_data)\r\n if pdf:\r\n response = HttpResponse(pdf, content_type='application/pdf')\r\n filename = \"Notifications.pdf\"\r\n content = \"inline; filename=%s\" % filename\r\n download = request.GET.get(\"download\")\r\n if download:\r\n content = \"attachment; filename=%s\" % filename\r\n response['Content-Disposition'] = content\r\n return response\r\n return HttpResponse(\"Not found\")\r\n else:\r\n return JsonResponse(\r\n {MESSAGE_KEY: \"The days to download exceeds the default download time period\"}, safe=False)\r\n else:\r\n return JsonResponse({MESSAGE_KEY: \"FORBIDDEN ERROR\"}, status=HTTP_403_FORBIDDEN)\r\n except AssertionError as e:\r\n log_error(\"Exception occurred due to\" + str(e))\r\n return asert_res(e)\r\n\r\n except Exception as e:\r\n log_error(\"Exception occurred due to\" + str(e))\r\n return json_InternalServerError",
"def query_payment_notifications(\n end_date: Optional[str] = None,\n external_id: Optional[str] = None,\n limit: Optional[int] = None,\n notification_source: Optional[\n Union[str, QueryPaymentNotificationsNotificationSourceEnum]\n ] = None,\n notification_type: Optional[str] = None,\n offset: Optional[int] = None,\n payment_order_no: Optional[str] = None,\n start_date: Optional[str] = None,\n status: Optional[Union[str, QueryPaymentNotificationsStatusEnum]] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = QueryPaymentNotifications.create(\n end_date=end_date,\n external_id=external_id,\n limit=limit,\n notification_source=notification_source,\n notification_type=notification_type,\n offset=offset,\n payment_order_no=payment_order_no,\n start_date=start_date,\n status=status,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"async def notification_list(self, context):\n if self.db == None:\n await self.start() # Initiate DB, because it's not initialized yet\n\n notifications = self.get_notifications(context.message.author.id)\n if not notifications:\n return await self.bot.send_message(context.message.author, 'You have no notifications at this time.')\n else:\n notifications_list_str = ''\n for notification in notifications.values():\n time_until = notification['notification_time'] - int(datetime.now().timestamp()) # Time until notification\n notifications_list_str += '%s %s in %s\\n' % (notification['uid'], notification['notification_message'], self.get_time_string(time_until))\n return await self.bot.send_message(context.message.author, notifications_list_str) # Full list of notifications\n return",
"def list_notifications(request):\n notifications = Notification.objects.filter(\n receiving_user=request.user)\n data = NotificationModelSerializer(notifications, many=True).data\n return Response(data, status=status.HTTP_200_OK)",
"def notifications(self):\r\n from .._impl.notification import Notification\r\n result = []\r\n url = \"%s/community/users/%s/notifications\" % (self._portal.resturl, self._user_id)\r\n params = {\"f\" : \"json\"}\r\n ns = self._portal.con.get(url, params)\r\n if \"notifications\" in ns:\r\n for n in ns[\"notifications\"]:\r\n result.append(Notification(url=\"%s/%s\" % (url, n['id']),\r\n user=self,\r\n data=n,\r\n initialize=False)\r\n )\r\n del n\r\n return result\r\n return result",
"def notifications(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BudgetNotificationArgs']]]]:\n return pulumi.get(self, \"notifications\")",
"def getNotifications():\n # gets the data from the notifications db\n try:\n conn = sqlite3.connect('notifications.db')\n c = conn.cursor()\n\n # get all the data from the db except id (ie. timestamp, message, division)\n c.execute(\"SELECT division, timestamp, notification FROM notifications\")\n result = c.fetchall()\n logging.debug(\"The database returned {} rows\".format((len(result))))\n c.close()\n except sqlite3.OperationalError as e:\n errorMessage = json.dumps({\"error\": str(e)})\n return bottle.HTTPResponse(body=errorMessage, status=400, headers=getHeaders())\n except Exception as e:\n errorMessage = json.dumps({\"error\": str(e)})\n return bottle.HTTPResponse(body=errorMessage, status=400,\n headers=getHeaders())\n\n # format the data so the front end can consume it easily\n # we know the order of the data because it's the same order we passed into the select statement\n resultDict = [{'division': notification[0], 'timestamp': notification[1], 'notification': notification[2]} for\n notification in result]\n return bottle.HTTPResponse(body=json.dumps(resultDict), status=200, headers=getHeaders())",
"def test_notifications_all(self):\n i = self.instance.notifications(all=True)\n self.get_next(i)\n\n self.session.get.assert_called_once_with(\n url_for(\"notifications\"),\n params={\"per_page\": 100, \"all\": \"true\"},\n headers={},\n )",
"def processNotifications(self, notifications):\n aggregator = service.IService(self.store).getServiceNamed('aggregator')\n aggregator.processNotifications(self.handle, notifications)",
"def get_for_user(cls, user):\n notifications = cls.objects.filter(user = user)\n notifications = notifications.order_by('-created')\n notifications = notifications.prefetch_related('notification')\n\n return notifications",
"def get_notifications(self, subscription_id, max_results=None, status=None, result=None):\n route_values = {}\n if subscription_id is not None:\n route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')\n query_parameters = {}\n if max_results is not None:\n query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')\n if status is not None:\n query_parameters['status'] = self._serialize.query('status', status, 'str')\n if result is not None:\n query_parameters['result'] = self._serialize.query('result', result, 'str')\n response = self._send(http_method='GET',\n location_id='0c62d343-21b0-4732-997b-017fde84dc28',\n version='5.1',\n route_values=route_values,\n query_parameters=query_parameters)\n return self._deserialize('[Notification]', self._unwrap_collection(response))",
"def list_configured_notifications_on_checkpoint(self, checkpoint_id):\n\n return self.query(\"\"\"\n query checkpointQuery($id: ID!) {\n checkpoint(id: $id) {\n configuredNotifications {\n edges {\n node {\n id\n notificationType\n value\n notifyOn\n }\n }\n }\n }\n }\n \"\"\", variables={'id': checkpoint_id})",
"def last(self):\n\t\tparams = {'per_page': 5, '_': int(round(time.time(), 3)*1000)}\n\t\theaders = {'x-csrf-token': repr(self._connection)}\n\n\t\trequest = self._connection.get('notifications.json', headers=headers, params=params)\n\n\t\tif request.status_code != 200:\n\t\t\traise Exception('status code: {0}: cannot retrieve notifications'.format(request.status_code))\n\t\treturn self._finalise(request.json())",
"def statuses(self, request):\n data = request.data\n user = request.user\n\n if 'ids' not in data:\n return Response(None, status=status.HTTP_400_BAD_REQUEST)\n\n ids = data.get('ids')\n\n if isinstance(ids, str) and ids == 'all':\n notifications = NotificationMessage.objects.filter(\n is_archived=False,\n is_read=False,\n user=user\n )\n else:\n notifications = NotificationMessage.objects.filter(\n id__in=ids,\n user=user\n )\n\n if 'is_archived' in data:\n notifications.update(\n is_archived=data['is_archived']\n )\n\n if 'is_read' in data:\n notifications.update(\n is_read=data['is_read']\n )\n\n serializer = self.get_serializer(notifications, many=True)\n\n return Response(serializer.data, status=status.HTTP_200_OK)",
"def fetch_notifications_esi(self, user: User = None) -> None:\n notifications_count_all = 0\n self.notifications_last_update_ok = None\n self.notifications_last_update_at = now()\n self.save()\n token = self.fetch_token(rotate_characters=True)\n\n try:\n notifications = self._fetch_notifications_from_esi(token)\n except OSError as ex:\n message_id = (\n f\"{__title__}-fetch_notifications-{self.pk}-{type(ex).__name__}\"\n )\n title = f\"{__title__}: Failed to update notifications for {self}\"\n message = f\"{self}: Failed to update notifications from ESI due to {ex}\"\n logger.exception(message)\n notify_admins_throttled(\n message_id=message_id,\n title=title,\n message=message,\n level=\"danger\",\n timeout=STRUCTURES_NOTIFY_THROTTLED_TIMEOUT,\n )\n self.notifications_last_update_ok = False\n self.save()\n raise ex\n else:\n notifications_count_new = self._store_notifications(notifications)\n self._process_moon_notifications()\n if notifications_count_new > 0:\n logger.info(\n \"%s: Received %d new notifications from ESI\",\n self,\n notifications_count_new,\n )\n self._process_timers_for_notifications(token)\n notifications_count_all += notifications_count_new\n\n else:\n logger.info(\"%s: No new notifications received from ESI\", self)\n\n self.notifications_last_update_ok = True\n self.save()\n\n if user:\n self._send_report_to_user(\n topic=\"notifications\",\n topic_count=notifications_count_all,\n user=user,\n )",
"def notifications(self) -> pulumi.Output[Optional[Sequence['outputs.BudgetNotification']]]:\n return pulumi.get(self, \"notifications\")",
"def notifications(request):\n my_notifications = models.Notification.objects.order_by('-id').filter(\n person_notifying=request.user)\n return render(request,\n 'notifications.html',\n {'my_notifications': my_notifications})"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GetPublisher. Get a specific service hooks publisher.
|
def get_publisher(self, publisher_id):
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',
version='5.1',
route_values=route_values)
return self._deserialize('Publisher', response)
|
[
"def publisher(self):\n if \"publisher\" in self._prop_dict:\n return self._prop_dict[\"publisher\"]\n else:\n return None",
"def get_live_publisher():\n\n live_publisher_class = get_setting_or_raise(\n setting=\"WAGTAIL_LIVE_PUBLISHER\", setting_str=\"live publisher\"\n )\n return import_string(live_publisher_class)",
"def find_one_publisherbot(self, filters: dict) -> PublisherBot: \n result = self.mongo.greenhouse_publisher_bots.find_one(filters) \n if not result:\n return None\n return self.marshall_publisherbot(result)",
"def get(self, hook_id, user=None, repo=None):\n request = self.make_request('repos.hooks.get',\n id=hook_id, user=user, repo=repo)\n return self._get(request)",
"def webhooks_get(self, full_name):\n return self.get('/repos/{}/hooks'.format(full_name))",
"def getPublishedPlugs(*args, **kwargs):\n \n pass",
"def publisher_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"publisher_id\")",
"def _get_topic():\n return local_config.ProjectConfig().get('bisect_service.pubsub_topic')",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Publisher':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = PublisherArgs.__new__(PublisherArgs)\n\n __props__.__dict__[\"accept_terms_and_conditions\"] = None\n __props__.__dict__[\"connection_arn\"] = None\n __props__.__dict__[\"identity_provider\"] = None\n __props__.__dict__[\"publisher_id\"] = None\n __props__.__dict__[\"publisher_profile\"] = None\n __props__.__dict__[\"publisher_status\"] = None\n return Publisher(resource_name, opts=opts, __props__=__props__)",
"def publisher_id(self):\n return self.get('publisher_id', decode=True, default=None)",
"def create_pubsub_publisher_client():\n return pubsub.PublisherClient()",
"def get_webhook(self, webhook):\r\n return self.manager.get_webhook(self.scaling_group, self, webhook)",
"def get_publisher(self, log_path):\n if log_path not in self.http_publishers:\n self.log.debug(\"Creating a Http Log publisher for path \\\"%s\\\"\" % log_path)\n self.http_publishers[log_path] = HttpLogPublisher(\n log_path,\n self.tenant_id,\n self.cluster_id,\n self.date_time,\n Config.member_id,\n Config.application_id,\n Config.cartridge_alias)\n\n return self.http_publishers[log_path]",
"def publisher_uri(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"publisher_uri\")",
"def get_service():\n\n service = build(\"customsearch\", \"v1\",\n developerKey=api_key)\n return service",
"def get_published_pipeline(ws,name,version):\n published_pipelines = PublishedPipeline.list(ws)\n for pipe in published_pipelines: \n p_name = pipe.name\n p_version = pipe.version\n if(p_name == name and p_version is not None and p_version==version):\n return pipe \n else:\n return None",
"def get_publisher_project(obj):\n p_p = obj.publisherproject_set.first()\n if p_p:\n return {\n 'name': p_p.name,\n 'canonical_url': p_p.get_canonical_url()\n }",
"def get_publisherinfo(self, header=None, ccancel=None):\n\n requesturl = self.__get_request_url(\"publisher/0/\")\n return self._fetch_url(requesturl, header, ccancel=ccancel)",
"def digsig_publisher(self):\n return self._attribute('digsig_publisher', \"\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
ListPublishers. Get a list of publishers.
|
def list_publishers(self):
response = self._send(http_method='GET',
location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',
version='5.1')
return self._deserialize('[Publisher]', self._unwrap_collection(response))
|
[
"def publishers(self):\n\n base = self.get_part(self.__BASE_PART, must_exist=True)\n if base is None:\n # Catalog contains nothing.\n return set()\n return set(p for p in base.publishers())",
"def list_distributions(self, public_repo_name):\n local_repo = self.get_local_repo(public_repo_name)\n publications = self.get_publications()\n publications_for_repo = [x for x in publications if x['Prefix'] == public_repo_name]\n return sorted(publications_for_repo)",
"def publications(self) -> List[Publication]:\n return [Publication.from_dict(pub) for pub in self.item.properties.get(PUBLICATIONS, [])]",
"def get_published_repos(self):\n\n if self.verbose:\n print('Listing repos at: %s' % self.publish_url)\n\n r = self.__do_get(self.publish_url)\n\n # Create a distinct list of publications\n if r.status_code == requests.codes.ok:\n publications = r.json()\n return sorted(set([x['Prefix'] for x in publications]))\n else:\n raise AptlyApiError(r.status_code,\n 'Aptly API Error - %s - HTTP Error: %s' % (self.publish_url, r.status_code))",
"def get_watchlists(self) -> list:\n try:\n result = self.api.get_watchlists()\n except BrokerException as err:\n print('[!] Unable to get watchlists.')\n raise err\n else:\n return result",
"def GetAllExporters(self):\n return self.native.get_all_exporters()",
"def list_subscribers(self):\n return self._persistent_store.list_subscribers()",
"def speakers(self):\n return self._request('GET', '/speakers')",
"def query_publishers(self, query):\n content = self._serialize.body(query, 'PublishersQuery')\n response = self._send(http_method='POST',\n location_id='99b44a8a-65a8-4670-8f3e-e7f7842cce64',\n version='5.1',\n content=content)\n return self._deserialize('PublishersQuery', response)",
"def publishers(self, pubs=EmptyI):\n\n self.load()\n for pub in self.__data:\n # Any entries starting with \"_\" are part of the\n # reserved catalog namespace.\n if not pub[0] == \"_\" and (not pubs or pub in pubs):\n yield pub",
"def get_publisher_names():\n\n # publisher_names = [str(p) for p in Publisher.query.all()]\n publisher_names = [p.publisher_name for p in Publisher.query.all()]\n return jsonify(publisher_names=publisher_names)",
"def pkg_list(self, public_repo_name, distribution):\n\n if self.verbose:\n print('Listing packages from repo: %s in distribution: %s' % (public_repo_name, distribution))\n\n matching_publication = self.find_publication(distribution, public_repo_name)\n\n return self.find_packages(matching_publication)",
"def publisher_count(self):\n return len(self._publishers)",
"def all_speakers():\n return [_Speaker(id=s['id']) for s in _pulse.sink_list]",
"def get_list(self):\n\n return self._providers.keys()",
"def get_multiple_publications(self, scopus_id_list, caching=True):\n publication_list = []\n for scopus_id in scopus_id_list:\n publication = self.get_publication(scopus_id, caching=caching)\n publication_list.append(publication)\n return publication_list",
"def _list_printers(self):\n\n\t\tresult = []\n\t\texpr = re.compile('printer\\s+(\\S+)\\s.*?(\\S+abled)')\n\t\t(stdout,stderr,status) = self._shell_command(['/usr/bin/lpstat','-p'],{'LANG':'C'})\n\t\tif status == 0:\n\t\t\tfor line in stdout.split(\"\\n\"):\n\t\t\t\tmobj = expr.match(line)\n\t\t\t\tif mobj:\n\t\t\t\t\tentry = { 'printer' : mobj.group(1), 'status': mobj.group(2) }\n\t\t\t\t\tresult.append(entry)\n\t\treturn result",
"def getPublishedPlugs(*args, **kwargs):\n \n pass",
"def _setup_publishers(self):\n # mavros publishers\n self._local_vel_pub = \\\n rospy.Publisher(\n 'mavros/setpoint_velocity/cmd_vel', TwistStamped, queue_size=1)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
QueryPublishers. Query for service hook publishers.
|
def query_publishers(self, query):
content = self._serialize.body(query, 'PublishersQuery')
response = self._send(http_method='POST',
location_id='99b44a8a-65a8-4670-8f3e-e7f7842cce64',
version='5.1',
content=content)
return self._deserialize('PublishersQuery', response)
|
[
"def publishers(self):\n\n base = self.get_part(self.__BASE_PART, must_exist=True)\n if base is None:\n # Catalog contains nothing.\n return set()\n return set(p for p in base.publishers())",
"def list_publishers(self):\n response = self._send(http_method='GET',\n location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',\n version='5.1')\n return self._deserialize('[Publisher]', self._unwrap_collection(response))",
"def list_printers(self,request):\n\n\t\t# ----------- DEBUG -----------------\n\t\tMODULE.info(\"printers/query invoked with:\")\n\t\tpp = pprint.PrettyPrinter(indent=4)\n\t\tst = pp.pformat(request.options).split(\"\\n\")\n\t\tfor s in st:\n\t\t\tMODULE.info(\" << %s\" % s)\n\t\t# -----------------------------------\n\n\t\tkey = request.options.get('key','printer')\n\t\tpattern = request.options.get('pattern','*')\n\n\t\tquota = self._quota_enabled()\t\t# we need it later\n\n\t\tresult = []\n\t\tplist = self._list_printers()\n\t\tfor element in plist:\n\t\t\ttry:\n\t\t\t\tprinter = element['printer']\n\t\t\t\tdata = self._printer_details(printer)\n\t\t\t\tfor field in data:\n\t\t\t\t\telement[field] = data[field]\n\t\t\t\t# filter according to query\n\t\t\t\tif fnmatch(element[key],pattern):\n\t\t\t\t\tif printer in quota:\n\t\t\t\t\t\telement['quota'] = quota[printer]\n\t\t\t\t\telse:\n\t\t\t\t\t\telement['quota'] = False\n\t\t\t\t\tresult.append(element)\n\t\t\texcept:\n\t\t\t\tpass\n\n\t\t# ---------- DEBUG --------------\n\t\tMODULE.info(\"printers/query returns:\")\n\t\tpp = pprint.PrettyPrinter(indent=4)\n\t\tst = ''\n\t\tif len(result) > 5:\n\t\t\ttmp = result[0:5]\n\t\t\tMODULE.info(\" >> %d entries, first 5 are:\" % len(result))\n\t\t\tst = pp.pformat(tmp).split(\"\\n\")\n\t\telse:\n\t\t\tst = pp.pformat(result).split(\"\\n\")\n\t\tfor s in st:\n\t\t\tMODULE.info(\" >> %s\" % s)\n\t\t# --------------------------------\n\n\t\tself.finished(request.id,result)",
"def _check_all_publishers_ready(self):\n self._check_publisher_ready(\n self._local_vel_pub.name, self._local_vel_pub)",
"def getPublishedPlugs(*args, **kwargs):\n \n pass",
"def get_queryset(self):\n self.author = get_object_or_404(\n Author, username=self.kwargs['username'])\n return self.author.entries_published()",
"def all_by_publisher(cls, publisher_key):\n return Collection.query(ancestor=publisher_key).fetch()",
"def filterPublisher(self):\n\n dd_pub = self.filterDatasetPublisherCB.currentText()\n\n if dd_pub != '':\n selection = self.check_results()\n filtered = []\n\n for dataset in selection:\n dataset_pub = get_publisher(dataset)\n if dataset_pub == dd_pub:\n filtered.append(dataset)\n\n self.results = filtered\n\n return",
"def query_all_objects( self ):\n return self._k8s.query_daemonsets( filter=self._filter )",
"def get_queryset(self):\n\t\treturn PortalWidget.objects.filter(show_delegate=True\n\t\t\t).order_by('-pinned', '-pk')",
"def getQPackages(self):\n return q.qp.find(domain=self.domainname)",
"def speakers(self):\n return self._request('GET', '/speakers')",
"def run_publishers(type_name='', publisher_type=PRE_PUBLISHER_TYPE):\n if type_name != '':\n for f in publishers[publisher_type].get('', []): # run generic\n f() # publishers first\n\n for f in publishers[publisher_type].get(type_name.lower(), []):\n f()",
"def find_professors():\n daemo.publish(\n project_key=PROJECT_KEY,\n tasks=[\n {\n \"stream\": \"Computer Science\",\n \"institute\": \"Stanford University\"\n },\n {\n \"stream\": \"Bioengineering\",\n \"institute\": \"Stanford University\"\n },\n ],\n approve=approve_correct_response,\n completed=rate_workers\n )",
"def get_targets(self, query: str) -> typing.List[PDataStore]:\n return Query(query).filter(list(self.pdata_store.clients(active=self.__peers.keys())))",
"def publishers(self, pubs=EmptyI):\n\n self.load()\n for pub in self.__data:\n # Any entries starting with \"_\" are part of the\n # reserved catalog namespace.\n if not pub[0] == \"_\" and (not pubs or pub in pubs):\n yield pub",
"def publish_last(self):\n\n for topic in self.publishers:\n data = self.publishers[topic]['data']\n self.publishers[topic]['publisher'].publish(data)",
"def query(self) -> typing.Iterable[typing.Any]: # pragma: no cover\n pass",
"def _setup_publishers(self):\n # mavros publishers\n self._local_vel_pub = \\\n rospy.Publisher(\n 'mavros/setpoint_velocity/cmd_vel', TwistStamped, queue_size=1)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
CreateSubscription. Create a subscription.
|
def create_subscription(self, subscription):
content = self._serialize.body(subscription, 'Subscription')
response = self._send(http_method='POST',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='5.1',
content=content)
return self._deserialize('Subscription', response)
|
[
"def create_subscription(self, subscription_info, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.create_subscription_with_http_info(subscription_info, **kwargs)\n else:\n (data) = self.create_subscription_with_http_info(subscription_info, **kwargs)\n return data",
"def create(subdomain, record_id, email, language):\n key_name = '%s:%s:%s' % (subdomain, record_id, email)\n return Subscription(key_name=key_name, subdomain=subdomain,\n person_record_id=record_id,\n email=email, language=language)",
"def create(self, validated_data):\n subscription = Subscription.objects.create(**validated_data)\n request = self._get_request()\n subscription.user = request.user\n subscription.save()\n return subscription",
"def create_subscription(\n self,\n offer_type: str,\n subscription_name: str,\n billing_enrollment_name: str = None,\n deployment_user_id: str = None):\n \n #if self.get_subscription(subscription_name) is None:\n if billing_enrollment_name is None:\n # No billing enrollment passed, get the first one by default\n\n all_billing_enrollments = \\\n self._billing_integration_service\\\n .get_all_billing_enrollments()\n\n if all_billing_enrollments is None or \\\n len(all_billing_enrollments) == 0:\n raise ValueError('No billing enrollments found')\n else:\n billing_enrollment_name = \\\n all_billing_enrollments[0]\n else:\n\n # Validate that billing enrollment exists\n billing_enrollment = \\\n self._billing_integration_service\\\n .get_billing_enrollment_name(\n billing_enrollment_name)\n\n if billing_enrollment == '':\n raise \\\n ValueError('No billing enrollments found, searched: {}'\\\n .format(billing_enrollment_name))\n\n return self._subscription_integration_service\\\n .create_subscription(\n offer_type=offer_type,\n subscription_name=subscription_name,\n billing_enrollment_name=billing_enrollment_name,\n deployment_user_id=deployment_user_id)",
"def subscription_created(sender, **kwargs):\n print('sub created')\n # 'sender' argument sent with the signal. In this case, is an instance of\n # the instant payment notification.\n ipn_obj = sender # ipn - instant payment notification\n # custom was defined in the PayPayPaymentForm in magazines/templatetags/magazine_extras.py\n magazine_id = ipn_obj.custom.split('-')[0]\n user_id = ipn_obj.custom.split('-')[1]\n # Create record in the db\n Purchase.objects.create(magazine_id=magazine_id,\n user_id=user_id,\n subscription_end=arrow.now().replace(weeks=+4).datetime)",
"def post(self, *args, **kwargs):\n\n sub_type = args[0]\n sub_id = uuid.UUID(args[1]) if len(args) > 1 else uuid.uuid4()\n\n sub = self.service.add_subscription(sub_type=sub_type, sub_id=sub_id,\n **kwargs)\n\n self.set_header(\"Location\", \"/rni/v1/subscriptions/%s/%s\" %\n (sub.SUB_TYPE, sub.service_id))",
"def create(self, request, *args, **kwargs):\n try:\n plan_id = request.data.get(\"plan\")\n app_id = request.data.get(\"app\")\n active = request.data.get(\"active\")\n user = request.user\n subscription = Subscription.objects.create(\n plan_id=plan_id, app_id=app_id, user_id=user.id, active=active\n )\n serializer = self.get_serializer(subscription)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n except Exception as err:\n return Response({\"message\": str(err)}, status=status.HTTP_400_BAD_REQUEST)",
"def create(customer, plan, quantity=None, trial_days=None, token=None, coupon=None, tax_percent=None):\n quantity = hooks.hookset.adjust_subscription_quantity(customer=customer, plan=plan, quantity=quantity)\n cu = customer.stripe_customer\n\n subscription_params = {}\n if trial_days:\n subscription_params[\"trial_end\"] = datetime.datetime.utcnow() + datetime.timedelta(days=trial_days)\n if token:\n subscription_params[\"source\"] = token\n\n subscription_params[\"plan\"] = plan\n subscription_params[\"quantity\"] = quantity\n subscription_params[\"coupon\"] = coupon\n subscription_params[\"tax_percent\"] = tax_percent\n resp = cu.subscriptions.create(**subscription_params)\n\n return sync_subscription_from_stripe_data(customer, resp)",
"def cmd_subscription_add_subscription(context, destination_identity,\n filter_identity, options):\n csm = get_CmdSubscriptionManager(context, options)\n\n owned_flag_opt = options['owned']\n select_opt = options['select']\n\n # Search the existing filters and destinations to find instances\n # that match the destination_identity and filter_identity\n sub_dest_inst, sub_filter_inst = get_insts_for_subscription_identities(\n csm, destination_identity, filter_identity, 'add-subscription',\n select_opt)\n\n # Duplicates test in SubscriptionManager but with message for parameters of\n # the command rather than the pywbem API.\n if (csm.is_owned_filter(sub_filter_inst) or\n csm.is_owned_destination(sub_dest_inst)) and not owned_flag_opt:\n raise click.ClickException(\n \"Permanent subscriptions cannot be created with owned filters \"\n \"or destinations. Create an owned subscription or use a \"\n \"permanent filter and destination. Destination Name={0}, \"\n \"Filter Name={1}\".format(sub_dest_inst['Name'],\n sub_filter_inst['Name']))\n\n rslt = csm.add_subscriptions(sub_filter_inst.path,\n sub_dest_inst.path, owned_flag_opt)\n\n context.spinner_stop()\n click.echo(\"Added {0} subscription: DestinationName={1}, FilterName={2}\".\n format(owned_flag_str(owned_flag_opt),\n sub_dest_inst.path['Name'],\n sub_filter_inst.path[\"Name\"]))\n if context.verbose:\n click.echo(\"\\n\\n{0}\".format(rslt[0].tomof()))",
"def create_subscription(self, topic_arn, protocol, endpoint):\n\n if not all([topic_arn, protocol, endpoint]):\n raise RuntimeError(\"You must send valid topic ARN, Protocol and Endpoint to add a subscription\")\n\n self.client.subscribe(\n TopicArn=topic_arn,\n Protocol=protocol,\n Endpoint=endpoint\n )",
"def tryCreateSubscription(self, show, subscription):\n if not subscription[\"enabled\"].isChecked():\n return\n\n try:\n show.createSubscription(\n subscription[\"allocation\"],\n float(subscription[\"size\"].value()),\n float(subscription[\"burst\"].value())\n )\n except opencue.exception.CueException as e:\n QtWidgets.QMessageBox.critical(\n self,\n \"Failed To Create Subscription\",\n str(e),\n QtWidgets.QMessageBox.Ok\n )",
"def create_subscription(self, contact_id, list_id, status='normal', account_id=None, client_folder_id=None):\n account_id, client_folder_id = self._required_values(account_id, client_folder_id)\n data = dict(subscription=dict(contactId=contact_id,listId=list_id, status=status))\n result = self._do_request('a/%s/c/%s/subscriptions/' % (account_id, client_folder_id),\n parameters=data,\n method='post')\n return result",
"def create_webhook_subscription(webhook_body):\r\n MSGRAPH.base_url = config.RESOURCE\r\n subscription = MSGRAPH.post(config.ISG_VERSION + '/subscriptions', data=webhook_body, headers=request_headers(), format='json').data\r\n print(\"Create subscription response\", subscription)\r\n if b'' in subscription:\r\n print(\"Please Sign-in using a on.microsoft.com account for demo data\")\r\n subscription = None\r\n elif 'error' in subscription:\r\n if subscription['error']['code'] == 'InvalidAuthenticationToken':\r\n return flask.redirect(flask.url_for('login'))\r\n if subscription['error']['message'] == 'Subscription validation request failed. Must respond with 200 OK to this request.':\r\n message = \"<strong>Error:</strong> Please run 'ngrok' to allow the webhook notification sevice to access your app, then update the config.py file to the correct ngrok url.\"\r\n flask.flash(message, category='danger')\r\n else:\r\n message = '<strong>Success</strong> Webhook subscription created. Id: ' + subscription.get('id')\r\n flask.flash(message, category='success')\r\n\r\n MSGRAPH.base_url = config.RESOURCE + config.API_VERSION + '/'\r\n return subscription",
"def add_new_subscription(telegram_id, user_data):\r\n logging.info(\"Add subscription new in DB\")\r\n new_subscription = Subscription(telegram_id=telegram_id,\r\n check_in=user_data[\"check_in\"],\r\n check_out=user_data[\"check_out\"],\r\n city=user_data[\"city\"], currency=user_data[\"currency\"],\r\n max_price=user_data[\"max_price\"],\r\n adults=user_data['adults'],\r\n room_type=user_data[\"room_type\"])\r\n session.add(new_subscription)\r\n session.commit()\r\n return new_subscription.id",
"def test_create_subscriptions(self):\n response = self.post_json(self.resource, {\n 'target_id': 1,\n 'target_type': 'project'\n })\n subscription = response.json\n\n self.assertEqual('project', subscription['target_type'])\n self.assertEqual(1, subscription['target_id'])\n self.assertEqual(2, subscription['user_id'])\n self.assertIsNotNone(subscription['id'])\n\n response2 = self.post_json(self.resource, {\n 'user_id': 2,\n 'target_id': 2,\n 'target_type': 'project'\n })\n subscription2 = response2.json\n\n self.assertEqual('project', subscription2['target_type'])\n self.assertEqual(2, subscription2['target_id'])\n self.assertEqual(2, subscription2['user_id'])\n self.assertIsNotNone(subscription2['id'])",
"def insert_subscription(self, subscription: Subscription):\n if not subscription.filter:\n raise ValueError(\"Subscription filter cannot be falsy.\")\n self.insert_table_data(\n \"subscriptions\",\n dict(\n guild_id=subscription.guild_id,\n channel_id=subscription.channel_id,\n filter=subscription.filter))",
"def create_subscription(self, account, subscription, current_period, next_period):\n\t\t# determine amount of charge, normaly it is just price *-1\n\t\t# but in case of first time use, there may be prorated charges\n\t\tstart_date = subscription.start_date \n\t\t# datetime.datetime.strptime(subscription.start.date,\"%Y-%m-%d\")\n\t\tstart_period = get_next_billing_period_for_date(start_date)\n\n\t\tif (start_period == current_period and start_date.day > 1):\n\t\t# we need to prorate\n\t\t\tnum_of_day_in_month = monthrange(start_date.year, start_date.month)[1]\n\t\t\tprice_per_day = subscription.price / num_of_day_in_month\n\n\t\t\tnum_days_to_charge = (num_of_day_in_month + 1) - start_date.day\n\n\t\t\tamount = ((price_per_day * num_days_to_charge) + subscription.price) * -1\n\t\t\tcharge_memo = _(\"%(description)s charge %(current_period)s - %(next_period)s \"\n\t\t\t\t\t\t\"plus first %(num_days)s days\") % \\\n\t\t\t\t\t\t{'description': subscription.product.description,\n\t\t\t\t\t\t\t'current_period': current_period, 'next_period': next_period, \n\t\t\t\t\t\t\t'num_days': num_days_to_charge}\n\t\telse:\n\t\t\tamount = subscription.price * -1\n\t\t\tcharge_memo = _(\"%(description)s charge %(current_period)s - %(next_period)s\") % \\\n\t\t\t\t\t\t\t{'description': subscription.product.description, \n\t\t\t\t\t\t\t'current_period': current_period, 'next_period': next_period}\n\t\t# append location to memo if needed\n\t\tif (subscription.practice_location != None):\n\t\t\tlocation = \" (%s)\" % (subscription.practice_location.practice_name)\n\t\t\tcharge_memo = charge_memo + location\n\n\t\tsubscription_transaction = AccountTransaction(account=account,\n\t\t\t\t\t\t\t\ttx_type='2', # monthly product charge\n\t\t\t\t\t\t\t\tamount=amount,\n\t\t\t\t\t\t\t\tperiod_start=current_period,\n\t\t\t\t\t\t\t\tperiod_end=next_period,\n\t\t\t\t\t\t\t\tmemo=charge_memo)\n\t\tsubscription_transaction.save()\n\n\t\treturn subscription_transaction",
"def addSubscription(subscriber, state, config):",
"def _add_subscription(self, node, subscription_type, subscription_obj, subscription_handle):\n self.subscriptions[node] = {\"subscription\": subscription_obj, \"handle\": subscription_handle, \"type\": subscription_type}"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
ReplaceSubscription. Update a subscription. ID for a subscription that you wish to update.
|
def replace_subscription(self, subscription, subscription_id=None):
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
content = self._serialize.body(subscription, 'Subscription')
response = self._send(http_method='PUT',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='5.1',
route_values=route_values,
content=content)
return self._deserialize('Subscription', response)
|
[
"def update_subscription(self, id: UUID, data: Dict):\n subscriptions.update().where(subscriptions.c.id == id).values(data).execute()\n return data",
"async def update_subscription(\n self,\n topic_name: str,\n subscription: Union[SubscriptionProperties, Mapping[str, Any]],\n **kwargs: Any\n ) -> None:\n\n _validate_entity_name_type(topic_name, display_name=\"topic_name\")\n # we should not mutate the input, making a copy first for update\n subscription = deepcopy(\n create_properties_from_dict_if_needed(subscription, SubscriptionProperties)\n )\n to_update = subscription._to_internal_entity(\n self.fully_qualified_namespace, kwargs\n )\n\n create_entity_body = CreateSubscriptionBody(\n content=CreateSubscriptionBodyContent(\n subscription_description=to_update,\n )\n )\n request_body = create_entity_body.serialize(is_xml=True)\n await self._create_forward_to_header_tokens(to_update, kwargs)\n with _handle_response_error():\n await self._impl.subscription.put(\n topic_name, subscription.name, request_body, if_match=\"*\", **kwargs\n )",
"def update_subscription(self, token, subscribe):\n customer = Customer.get_by_id(token.customer_id)\n if not customer:\n raise errors.CustomerNotFound()\n customer.subscriptions_update(subscribe, None)\n return {'subscribe': customer.subscription_info()}",
"def update_subscriptions(self, case_id, subscriptions):\n self.subscriptions.update_subscriptions(case_id, subscriptions)",
"def change_subscription(plan: Plan) -> bool:\n if not current_user.stripe:\n return False\n sub_id = current_user.stripe.subscription_id\n if not sub_id or current_user.plan == plan:\n return False\n sub = stripe.Subscription.retrieve(sub_id)\n sub.modify(\n sub_id,\n cancel_at_period_end=False,\n items=[{\"id\": sub[\"items\"][\"data\"][0].id, \"plan\": plan.stripe_id}],\n )\n current_user.stripe.subscription_id = sub.id\n current_user.plan = plan\n current_user.save()\n return True",
"def modify_subscription(email):\n\n user = get_current_user()\n check_user_access(email)\n\n sub_id = request.form.get(\"subscription\") or request.args.get(\"subscription\") or abort(404)\n\n subscription = get_by_key(ndb.key.Key(urlsafe=sub_id))\n\n if not subscription:\n abort(404)\n\n if request.method == \"POST\":\n action = request.form.get(\"action\", None)\n feed_removed = False\n try:\n if action == \"save\":\n subscription.title = request.form.get(\"title\", subscription.title)\n subscription.put()\n user_subscriptions(user, force_refresh=True)\n flash(_(\"Feed updated\"))\n elif action == \"remove\":\n unsubscribe(subscription)\n flash(_(\"Feed removed.\"))\n feed_removed = True\n\n except Exception as e:\n flash(_(\"Could not save feed\"), \"error\")\n app.logger.exception(e)\n\n if feed_removed:\n return redirect(url_for(\"user.page_profile\", email=user.email))\n\n return render_template(\"modify-subscription.html.j2\", subscription=subscription)",
"def update_webhook_subscription(subscription_id, webhook_body):\r\n MSGRAPH.base_url = config.RESOURCE \r\n subscription = MSGRAPH.patch(config.ISG_VERSION + '/subscriptions/' + subscription_id , data=webhook_body, headers=request_headers(), format='json').data\r\n print(\"Update subscription response\", subscription)\r\n if b'' in subscription:\r\n print(\"Please Sign-in using a on.microsoft.com account for demo data\")\r\n subscription = None\r\n elif 'error' in subscription:\r\n if subscription['error']['code'] == 'InvalidAuthenticationToken':\r\n return flask.redirect(flask.url_for('login'))\r\n else:\r\n message = '<strong>Success</strong> Webhook subscription updated. Id: ' + subscription.get('id')\r\n flask.flash(message, category='success')\r\n\r\n MSGRAPH.base_url = config.RESOURCE + config.API_VERSION + '/'\r\n return subscription",
"def update_subscription_audit(self, id: UUID, data: Dict):\n subscription_audit.update().where(subscription_audit.c.id == id).values(data).execute()",
"def change_renewal_date(auth, subscription_id, date,\n base_url='https://api.cratejoy.com/v1/'):\n\n payload = json.dumps({u'end_date': date})\n\n subscriptions_endpoint = '{}subscriptions/{}/'.format(\n base_url, subscription_id)\n\n resp = requests.put(\n subscriptions_endpoint,\n data=payload,\n auth=auth\n )\n\n print('PUT request to {} responded with status '\n 'code: {}'.format(subscriptions_endpoint,\n resp.status_code))",
"def update_subscription_by_user(self, token, customer, subscribe):\n customer.subscriptions_update(subscribe, token.user_id)\n return {'subscribe': customer.subscription_info()}",
"def update(self, subscribed_sku):\n return self.request().update(subscribed_sku)",
"def subscription_patching(self, subscription):\n\n if not subscription:\n raise ValidationFailed(_(u'No subscription to create.'))\n\n if not isinstance(subscription, dict):\n msg = _('Subscriptions must be a dict.')\n raise ValidationFailed(msg)\n\n subscriber = subscription.get('subscriber')\n subscriber_type = None\n\n if subscriber:\n parsed_uri = urllib_parse.urlparse(subscriber)\n subscriber_type = parsed_uri.scheme\n\n if subscriber_type not in self._limits_conf.subscriber_types:\n msg = _(u'The subscriber type of subscription must be '\n u'supported in the list {0}.')\n raise ValidationFailed(msg, self._limits_conf.subscriber_types)\n\n options = subscription.get('options')\n if options and not isinstance(options, dict):\n msg = _(u'Options must be a dict.')\n raise ValidationFailed(msg)\n\n self._validate_retry_policy(options)\n\n ttl = subscription.get('ttl')\n if ttl:\n if not isinstance(ttl, int):\n msg = _(u'TTL must be an integer.')\n raise ValidationFailed(msg)\n\n if ttl < MIN_SUBSCRIPTION_TTL:\n msg = _(u'The TTL for a subscription '\n 'must be at least {0} seconds long.')\n raise ValidationFailed(msg, MIN_SUBSCRIPTION_TTL)\n\n # NOTE(flwang): By this change, technically, user can set a very\n # big TTL so as to get a very long subscription.\n now = timeutils.utcnow_ts()\n now_dt = datetime.datetime.utcfromtimestamp(now)\n msg = _(u'The TTL seconds for a subscription plus current time'\n ' must be less than {0}.')\n try:\n # NOTE(flwang): If below expression works, then we believe the\n # ttl is acceptable otherwise it exceeds the max time of\n # python.\n now_dt + datetime.timedelta(seconds=ttl)\n except OverflowError:\n raise ValidationFailed(msg, datetime.datetime.max)",
"def create_subscription(self, subscription):\n content = self._serialize.body(subscription, 'Subscription')\n response = self._send(http_method='POST',\n location_id='fc50d02a-849f-41fb-8af1-0a5216103269',\n version='5.1',\n content=content)\n return self._deserialize('Subscription', response)",
"def insert_subscription(self, subscription: Subscription):\n if not subscription.filter:\n raise ValueError(\"Subscription filter cannot be falsy.\")\n self.insert_table_data(\n \"subscriptions\",\n dict(\n guild_id=subscription.guild_id,\n channel_id=subscription.channel_id,\n filter=subscription.filter))",
"def reallocate_subscription( recurring_subscription_id, reallocate_to ):\n\n # Configure Braintree.\n init_braintree_credentials( current_app )\n\n merchant_account_id = {\n 'NERF': current_app.config[ 'NUMBERSUSA' ],\n 'ACTION': current_app.config[ 'NUMBERSUSA_ACTION' ]\n }\n\n # This is an administrative function and we allow them to grab a default payment method.\n subscription = braintree.Subscription.find( recurring_subscription_id )\n\n # Getting this far, we can now update the subscription to the new plan and merchant account ID as required.\n # The original Braintree transaction maintains the same merchant account ID for historical significance.\n # The original Braintree transaction that is reallocated will have the new subscription plan ID.\n # New Braintree transactions from the subscription will have new merchant account ID/subscription plan ID.\n braintree_subscription = braintree.Subscription.update(\n recurring_subscription_id,\n {\n 'id': recurring_subscription_id,\n 'payment_method_token': subscription.payment_method_token,\n 'plan_id': merchant_account_id[ reallocate_to ],\n 'merchant_account_id': merchant_account_id[ reallocate_to ]\n }\n )\n if not braintree_subscription.is_success:\n errors = handle_braintree_errors( braintree_subscription )\n logging.exception( AdminUpdateSubscriptionPathError( errors=errors ).message )\n\n return braintree_subscription",
"def removeSubscription(subscriber):",
"def unregister(self, subscription):\n with self.subscriptions_lock:\n try:\n del self.subscriptions[subscription.sid]\n except KeyError:\n pass",
"def configure_subscription_instance(\n self,\n request: dts_20200101_models.ConfigureSubscriptionInstanceRequest,\n ) -> dts_20200101_models.ConfigureSubscriptionInstanceResponse:\n runtime = util_models.RuntimeOptions()\n return self.configure_subscription_instance_with_options(request, runtime)",
"def ex_toggle_subscription_auto_renew(self, subscription):\r\n path = '/subscriptions/%s/action/' % (subscription.id)\r\n response = self._perform_action(path=path, action='auto_renew',\r\n method='POST')\r\n return response.status == httplib.OK"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
CreateSubscriptionsQuery. Query for service hook subscriptions.
|
def create_subscriptions_query(self, query):
content = self._serialize.body(query, 'SubscriptionsQuery')
response = self._send(http_method='POST',
location_id='c7c3c1cf-9e05-4c0d-a425-a0f922c2c6ed',
version='5.1',
content=content)
return self._deserialize('SubscriptionsQuery', response)
|
[
"def getSubscriptions(state=None):",
"def getSubscriptions(entity):",
"def getUserSubscriptions(user, request):\n subscriptions = user.get('subscribedTo', [])\n\n search_params = searchParams(request)\n tags = set(search_params.pop('tags', []))\n\n # XXX Whhen refactoring subscriptions storage to a different collection\n # Change this for a search on subscriptions collection\n if tags:\n filtered_subscriptions = []\n for subscription in subscriptions:\n if tags.intersection(set(subscription.get('tags', []))) == tags:\n filtered_subscriptions.append(subscription)\n subscriptions = filtered_subscriptions\n\n handler = JSONResourceRoot(request, subscriptions)\n return handler.buildResponse()",
"def query_all_subscriptions():\n logger.info('Attempting to fetch all the subscriptions')\n subscriptions = db.session.query(SubscriptionModel) \\\n .options(joinedload(SubscriptionModel.network_filter),\n joinedload(SubscriptionModel.measurement_groups),\n joinedload(SubscriptionModel.nfs)) \\\n .all()\n db.session.remove()\n return subscriptions",
"def list_subscriptions(self):\n models = self.get_db_model(self.engine)\n subscriptions = models.classes.subscriptions\n session = self.Session()\n return session.query(subscriptions).all()",
"def ListTopicSubscriptions(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def apnSubscriptionsByToken(token): # @NoSelf",
"def get_webhook_subscriptions():\r\n MSGRAPH.base_url = config.RESOURCE \r\n # print(\"MSGRAPH.base_url\", MSGRAPH.base_url) \r\n subscriptions = MSGRAPH.get(config.ISG_VERSION + '/subscriptions').data\r\n print(\"Active subscriptions :\", subscriptions)\r\n if b'' in subscriptions:\r\n print(\"Please Sign-in using a on.microsoft.com account for demo data\")\r\n subscriptions = None\r\n elif 'error' in subscriptions:\r\n if subscriptions['error']['code'] == 'InvalidAuthenticationToken':\r\n\r\n return flask.redirect(flask.url_for('login'))\r\n\r\n MSGRAPH.base_url = config.RESOURCE + config.API_VERSION + '/'\r\n return subscriptions",
"def get_webhook_subscriptions(self, webhook_id):\n collection = self._get_collection('subscriptions')\n subs = collection.find({'webhook_id': webhook_id})\n return subs",
"def getSubscriptions(self):\n response = self.stub.GetSubscriptions(\n show_pb2.ShowGetSubscriptionRequest(show=self.data), timeout=Cuebot.Timeout)\n subscriptionSeq = response.subscriptions\n return [opencue.wrappers.subscription.Subscription(subs)\n for subs in subscriptionSeq.subscriptions]",
"def getSubscription(subscriber):",
"def process_subscriptions():\n\n # TODO: don't process subscriptions that don't have any active\n # user subscriptions\n active_subs = Subscription.objects.all()\n notify_users = set()\n time_started = now()\n hit_count = 0\n\n for s in active_subs:\n try:\n last_hit_date = s.subscriptionhit_set.latest().created\n except SubscriptionHit.DoesNotExist:\n last_hit_date = s.created\n\n if s.search_backend == Subscription.HAYSTACK:\n results = (\n VoikkoSearchQuerySet()\n .auto_query(s.search_term)\n .filter(pub_date__gt=last_hit_date)\n .load_all()\n )\n\n hits = [\n SubscriptionHit.objects.get_or_create(\n subject=r.title,\n link=r.object.get_absolute_url(),\n defaults={\"hit\": r.object}\n )\n for r in results\n ]\n elif s.search_backend == Subscription.GEO:\n hits = []\n point = Point(*s.extra[\"point\"])\n distance = D(m=s.extra[\"distance_meters\"])\n\n points = PointIndex.objects.filter(\n point__distance_lte=(point, distance),\n content_date__gt=last_hit_date\n ).annotate(distance=Distance('point', point))\n\n for p in points:\n hit, created = hit_tuple = (\n SubscriptionHit.objects.get_or_create(\n subject=p.title,\n link=p.content_object.get_absolute_url(),\n defaults={\n \"hit\": p.content_object,\n \"extra\": {\n \"point\": list(p.point),\n }\n }\n )\n )\n if not created:\n # ensure geographic metadata\n hit.extra.update(point=list(p.point))\n hit.save()\n hits.append(hit_tuple)\n\n polygons = PolygonIndex.objects.filter(\n polygon__distance_lte=(point, distance),\n content_date__gt=last_hit_date\n )\n\n for p in polygons:\n hit, created = hit_tuple = (\n SubscriptionHit.objects.get_or_create(\n subject=p.title,\n link=p.content_object.get_absolute_url(),\n defaults={\n \"hit\": p.content_object,\n \"extra\": {\n \"point\": list(p.polygon.centroid),\n }\n }\n )\n )\n if not created:\n # ensure geographic metadata\n hit.extra.update(point=list(p.polygon.centroid))\n hit.save()\n hits.append(hit_tuple)\n else:\n logger.error(\"Unknown search backend %d\" % s.search_backend)\n\n for hit, created in hits:\n hit.subscriptions.add(s)\n\n hit_count += len(hits)\n\n if hits:\n users = s.subscribed_users.filter(\n subscriptionuser__active=True\n )\n\n for hit, created in hits:\n hit.notified_users.add(*users)\n\n\n email_users = s.subscribed_users.filter(\n subscriptionuser__active=True,\n subscriptionuser__send_mail=True,\n profile__email_confirmed__isnull=False\n )\n notify_users.update(email_users)\n\n\n for u in notify_users:\n notifications = (\n SubscriptionHit.objects\n .filter(\n created__gte=time_started,\n notified_users=u\n )\n .order_by('-created')\n )\n notify_count = notifications.count()\n notifications = notifications[:10]\n\n # TODO activate user's preferred language here\n send_mail(\n ungettext(\n \"[%(SITE_NAME)s] %(event_count)s new event\",\n \"[%(SITE_NAME)s] %(event_count)s new events\",\n notify_count\n ) % {\n \"SITE_NAME\": settings.SITE_NAME,\n \"event_count\": notify_count,\n },\n loader.get_template(\"subscriptions/emails/new_events.txt\").render({\n \"notifications\": notifications,\n \"more_notifications\": max(0, notify_count-10),\n \"user\": u,\n \"SITE_URL\": settings.SITE_URL,\n \"SITE_NAME\": settings.SITE_NAME,\n }),\n settings.DEFAULT_FROM_EMAIL,\n [u.email],\n )\n\n return hit_count",
"def __list_communication_service_by_subscription(args):\n print(\"\\nList by subscription...\")\n\n acs_client = __get_communication_management_client()\n resources = acs_client.communication_service.list_by_subscription()\n print(\"Found resources: \")\n for resource in resources:\n print(\"\")\n __print_resource(resource)",
"async def subscribe(self, query, *, variables=None, wait_confirmation=True):\n msg_id = await self.start(query, variables=variables)\n if wait_confirmation:\n await self.receive(wait_id=msg_id)\n return msg_id",
"def list_for_subscription(\n self,\n query_options=None, # type: Optional[\"_models.QueryOptions\"]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"_models.RemediationListResult\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"_models.RemediationListResult\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n \n _top = None\n _filter = None\n if query_options is not None:\n _top = query_options.top\n _filter = query_options.filter\n api_version = \"2019-07-01\"\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_for_subscription.metadata['url'] # type: ignore\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self._config.subscription_id\", self._config.subscription_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if _top is not None:\n query_parameters['$top'] = self._serialize.query(\"top\", _top, 'int', minimum=0)\n if _filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"filter\", _filter, 'str')\n query_parameters['api-version'] = self._serialize.query(\"api_version\", api_version, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('RemediationListResult', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(_models.ErrorResponse, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )",
"def addSubscription(subscriber, state, config):",
"def list_subscriptions(connection: Connection, project_id: Optional[str] = None,\n project_name: Optional[str] = None, to_dictionary: bool = False,\n limit: Optional[int] = None,\n **filters) -> Union[List[\"Subscription\"], List[dict]]:\n project_id = Subscription._project_id_check(connection, project_id, project_name)\n msg = 'Error getting subscription list.'\n # NOTE DE208094 x-mstr-total-count is not working correctly for this\n # endpoint the chunk_size is thus increased to allow downloading all\n # subscriptions at once. Change to 1000 for async chunking once it is\n # working\n objects = helper.fetch_objects_async(\n connection=connection,\n api=subscriptions.list_subscriptions,\n async_api=subscriptions.list_subscriptions_async,\n limit=limit,\n chunk_size=100000,\n filters=filters,\n error_msg=msg,\n dict_unpack_value=\"subscriptions\",\n project_id=project_id,\n )\n\n if to_dictionary:\n return objects\n else:\n return [\n Subscription.from_dict(\n source=obj,\n connection=connection,\n project_id=project_id,\n ) for obj in objects\n ]",
"def retrieve_subscriptions(\n self, where: str=None, where_values: tuple=None, group_by: str=None,\n order_by: str=None, limit: int=None\n ) -> Generator[Subscription, None, None]:\n fetched_rows = self.retrieve_table_data(\n table = \"subscriptions\",\n where = where,\n where_values = where_values,\n selection = \"guild_id, channel_id, filter\",\n group_by = group_by,\n order_by = order_by,\n limit = limit\n )\n for row in (fetched_rows or []):\n guild_id = row[0]\n channel_id = row[1]\n _filter = row[2]\n yield Subscription(guild_id, channel_id, _filter)",
"def view_subscriptions(list_id):\n list_ = _get_list_or_404(list_id)\n\n totals = newsletter_service.count_subscriptions_by_state(list_.id)\n\n return {\n 'list_': list_,\n 'totals': totals,\n 'State': SubscriptionState,\n }"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
CreateTestNotification. Sends a test notification. This is useful for verifying the configuration of an updated or new service hooks subscription.
|
def create_test_notification(self, test_notification, use_real_data=None):
query_parameters = {}
if use_real_data is not None:
query_parameters['useRealData'] = self._serialize.query('use_real_data', use_real_data, 'bool')
content = self._serialize.body(test_notification, 'Notification')
response = self._send(http_method='POST',
location_id='1139462c-7e27-4524-a997-31b9b73551fe',
version='5.1',
query_parameters=query_parameters,
content=content)
return self._deserialize('Notification', response)
|
[
"def send_test_event_notification(Notification=None, TestEventType=None):\n pass",
"def test_notification(self, notification=None, notification_type=None,\r\n details=None):\r\n if notification:\r\n # Test an existing notification\r\n uri = \"/%s/%s/test\" % (self.uri_base, utils.get_id(notification))\r\n body = None\r\n else:\r\n uri = \"/test-notification\"\r\n body = {\"type\": utils.get_id(notification_type),\r\n \"details\": details}\r\n resp, resp_body = self.api.method_post(uri, body=body)",
"def test_notification(self, notification=None, notification_type=None,\r\n details=None):\r\n return self._notification_manager.test_notification(\r\n notification=notification, notification_type=notification_type,\r\n details=details)",
"def test_badge_create_sends_notification(self):\n with patch('notifications.models.send_notification') as send:\n badge = badge_api.create_badge(*self.badge_values)\n self.assertTrue(send.called)",
"def test_update_notification(self):\n pass",
"def test_creates_in_app_notification_successfully(self):\n\n notification = self.fetch_all_notifications(token=self.user_token)\n\n self.assertEqual(notification.status_code, status.HTTP_200_OK)\n\n self.assertTrue(notification.data[\"count\"] == 1)\n\n follow = self.follow_user(self.control_username, self.user_token)\n\n self.assertEqual(follow.status_code, status.HTTP_200_OK)\n\n article = self.create_article(token=self.control_token)\n\n self.assertEqual(article.status_code, status.HTTP_201_CREATED)\n\n notification = self.fetch_all_notifications(token=self.user_token)\n\n self.assertEqual(notification.status_code, status.HTTP_200_OK)\n\n self.assertTrue(notification.data[\"count\"] == 2)",
"def send_test_email(self, config, as_json=True, timestamp_format=APITimestampFormat.NANOSECOND):\n return self._xjtrans(\"/settings/notification/send_test_mail\", \"POST\", config, as_json, timestamp_format)",
"def handle_test_email(self, msg):\n logger.debug(\"sending test email\")\n txt = \"%s test email\" % msg.fabric\n msg.wf.send_notification(\"any_email\", txt, txt)",
"def _SendNotificationEmail(old_test_key, new_test_key):\n body = _SHERIFF_ALERT_EMAIL_BODY % {\n 'old_test_path': utils.TestPath(old_test_key),\n 'new_test_path': utils.TestPath(new_test_key),\n }\n mail.send_mail(\n sender='gasper-alerts@google.com',\n to='chrome-performance-monitoring-alerts@google.com',\n subject='Sheriffed Test Migrated',\n body=body)",
"def test_notification_createproject(self):\n setup_temp_cache({}, {})\n\n url = \"/v1/actions/CreateProject\"\n data = {'project_name': \"test_project\", 'email': \"test@example.com\"}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n new_task = Task.objects.all()[0]\n\n headers = {\n 'project_name': \"test_project\",\n 'project_id': \"test_project_id\",\n 'roles': \"admin,_member_\",\n 'username': \"test@example.com\",\n 'user_id': \"test_user_id\",\n 'authenticated': True\n }\n\n url = \"/v1/notifications\"\n response = self.client.get(url, headers=headers)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n response.json()['notifications'][0]['task'],\n new_task.uuid)",
"def test_many_notifications() -> None:\n tester = Notifications()\n for _ in range(100):\n tester.add(Notification(\"test\", timeout=60))\n assert len(tester) == 100",
"def testOnlineNotificationEmail(self):\n client_id = self.SetupClient(0)\n self.email_messages = []\n\n def SendEmail(address, sender, title, message, **_):\n self.email_messages.append(\n dict(address=address, sender=sender, title=title, message=message))\n\n with mock.patch.object(email_alerts.EMAIL_ALERTER, \"SendEmail\", SendEmail):\n client_mock = action_mocks.ActionMock(admin.Echo)\n flow_test_lib.TestFlowHelper(\n administrative.OnlineNotification.__name__,\n client_mock,\n args=administrative.OnlineNotificationArgs(email=\"test@localhost\"),\n creator=self.test_username,\n client_id=client_id)\n\n self.assertLen(self.email_messages, 1)\n email_message = self.email_messages[0]\n\n # We expect the email to be sent.\n self.assertEqual(email_message.get(\"address\", \"\"), \"test@localhost\")\n self.assertEqual(email_message[\"title\"],\n \"GRR Client on Host-0.example.com became available.\")\n self.assertIn(\"This notification was created by %s\" % self.test_username,\n email_message.get(\"message\", \"\"))",
"def test_zrc_send_notif(self, state, zrc_client, ztc_client):\n zaaktype = ztc_client.retrieve('zaaktype', catalogus_uuid=CATALOGUS_UUID, uuid=ZAAKTYPE_UUID)\n state.zaaktype = zaaktype\n\n zaak = zrc_client.create('zaak', {\n 'zaaktype': zaaktype['url'],\n 'bronorganisatie': '517439943',\n 'verantwoordelijkeOrganisatie': '223122166',\n 'startdatum': '2018-06-01',\n 'registratiedatum': '2018-06-18',\n })\n\n assert 'url' in zaak\n state.zaak = zaak\n\n # TODO check if the notif message was delivered to subscriber (drc) ???",
"def test_notification_get(self):\n pass",
"def new_notification(message):\r\n message_text = \"Пожалуйста, введите описание уведомления\"\r\n chat_id = message.chat.id\r\n BOT.send_message(chat_id, message_text)\r\n BOT.current_event[str(chat_id)] = \"NOTIF_GET_TITLE\"\r\n BOT.current_data[str(chat_id)] = notif.Notification()\r\n BOT.current_data[str(chat_id)].chat_id = chat_id",
"def test_notification_list(self):\n pass",
"def test_notification_acknowledge(self):\n pass",
"def test_ready(self):\n notification = self.notification\n notification.destination[\"frequency\"] = 0\n self.assertTrue(notification.ready())",
"def test_creates_in_app_notification_if_comment(self):\n\n notification = self.fetch_all_notifications(token=self.user_token)\n\n self.assertEqual(notification.status_code, status.HTTP_200_OK)\n\n self.assertTrue(notification.data[\"count\"] == 1)\n\n article = self.create_article(token=self.control_token)\n\n self.assertEqual(article.status_code, status.HTTP_201_CREATED)\n\n slug = article.data[\"data\"].get(\"slug\", None)\n\n favorite_article = self.client.post('/api/articles/{}/favorite/'.\n format(slug),\n self.base_data.article_data,\n HTTP_AUTHORIZATION='Bearer ' +\n self.user_token,\n format='json')\n\n self.assertEqual(favorite_article.status_code, status.HTTP_200_OK)\n\n comment = self.client.post('/api/articles/{}/comments/'.format(slug),\n self.base_data.comment_data,\n HTTP_AUTHORIZATION='Bearer ' +\n self.control_token,\n format='json')\n\n self.assertEqual(comment.status_code, status.HTTP_201_CREATED)\n\n notification = self.fetch_all_notifications(token=self.user_token)\n\n self.assertEqual(notification.status_code, status.HTTP_200_OK)\n\n self.assertTrue(notification.data[\"count\"] == 2)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Task to seed the database.
|
def seed_db():
Seed().run()
|
[
"def run(self):\n self.call(ClienteTableSeeder)\n self.call(ProductoTableSeeder)\n self.call(PedidoTableSeeder)",
"def test_database_seed(self):\n\n sys.stdout.write('Testing database seed process...')\n user = User.query.filter_by(user_id=1).one()\n house = House.query.filter_by(house_id=2).one()\n assert user.email == \"kae@gmail.com\"\n assert house.address == \"410 Forney Ave Jacksonville, AL 36265\"",
"def seed_data():\n typer.echo(\"Creating initial data\")\n db = SessionLocal()\n init_db(db)\n typer.echo(\"Initial data created\")",
"def run(self):\n self.call(UserTypesTableSeeder)\n self.call(RegistrationSituationsTableSeeder)\n self.call(BudgetSituationsTableSeeder)\n self.call(ServiceTypesTableSeeder)",
"def run_seed(self, mode):\n if mode is None:\n mode = MODE_CREATE\n # Clear data from tables\n if mode != MODE_CREATE:\n clear_data()\n if mode == MODE_CLEAR:\n return\n\n # Creating OauthProviders\n create_oauthproviders()",
"def seed(self, seed):\n self._env.seed(seed)",
"def syncdb():\n _syncdb()\n migrate()",
"def __call__(self):\n self.create_database()",
"def setup_db():\n manage(\"syncdb --all\")\n manage(\"migrate --fake\")",
"async def init_db_async(self) -> None:\n pool = await self.get_pool()\n async with pool.acquire() as conn:\n await conn.execute('''\n CREATE TABLE IF NOT EXISTS dump(\n hash varchar PRIMARY KEY,\n plaintext varchar NOT NULL,\n created timestamp DEFAULT current_timestamp\n );\n CREATE TABLE IF NOT EXISTS used(\n fk_hash varchar,\n total INTEGER DEFAULT 0,\n FOREIGN KEY (fk_hash) REFERENCES dump (hash)\n );\n ''')",
"def setUp(self):\n db.create_all()\n self.db = db",
"def setupAllDB():\n createDatabase(CONFIG_DB['db_name'])\n runMigrations()\n setupJobTrackerDB()\n setupErrorDB()\n setupUserDB()\n setupJobQueueDB()\n setupValidationDB()",
"def setup():\n \n if os.path.exists(settings.DATABASE_NAME):\n os.remove(settings.DATABASE_NAME)\n \n call_command(\"syncdb\")\n\n for user in [User(username='test'), User(username='test2')]:\n user.set_password('password')\n user.save()",
"def test_travel_seeded_to_db(self):\n\n seed_travels = seed_database.seed_travels_table()\n self.assertEqual(1, seed_travels[0].travel_id)",
"def init_db():\n import models\n Base.metadata.create_all(bind=engine)",
"def setup_db():\n create_service_db()",
"def _initialize_db():\n # TODO(metzman): Most of the strings in this function should probably be\n # configurable.\n\n db_utils.initialize()\n # One time set up for any db used by FuzzBench.\n models.Base.metadata.create_all(db_utils.engine)\n\n # Now set up the experiment.\n with db_utils.session_scope() as session:\n experiment_name = 'oss-fuzz-on-demand'\n experiment_exists = session.query(models.Experiment).filter(\n models.Experiment.name == experiment_name).first()\n if experiment_exists:\n raise Exception('Experiment already exists in database.')\n\n db_utils.add_all([\n db_utils.get_or_create(models.Experiment,\n name=experiment_name,\n git_hash='none',\n private=True,\n experiment_filestore='/out/filestore',\n description='none'),\n ])\n\n # Set up the trial.\n trial = models.Trial(fuzzer=os.environ['FUZZER'],\n experiment='oss-fuzz-on-demand',\n benchmark=os.environ['BENCHMARK'],\n preemptible=False,\n time_started=scheduler.datetime_now(),\n time_ended=scheduler.datetime_now())\n db_utils.add_all([trial])",
"async def run(self) -> None:\n main_database_configuration: dict = await self.__get_main_database_configuration()\n\n MigrationManager(await self.__get_migration_configuration(\n url=main_database_configuration.get(\"url\"),\n path=main_database_configuration.get(\"path\"),\n )).run()",
"def seed(user_datastore, db):\n roleAdmin = user_datastore.create_role(\n name='admin',\n description='Manage other users on the system')\n roleStandard = user_datastore.create_role(\n name='standard',\n description='Manage the system')\n userAdmin = user_datastore.create_user(\n username='admin',\n first_name='admin',\n surname='admin',\n email='admin@aimlackies.com',\n password=hash_password('password'),\n confirmed_at=func.now()\n )\n userAdmin.roles.append(roleAdmin)\n db.session.commit()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
djangoclite by Leo Neto A CLI to handle the creation and management of your Django projects. The CLI has some opinions about how your project should be structured in order for it to maximize the amount of automatic configuration it can provide you. Since Django itself is highly configurable, you are free to bypass conventions of the CLI if you so choose.
|
def cli(ctx, dry, force, verbose, debug):
ctx.ensure_object(dict)
ctx.obj['dry'] = dry
ctx.obj['force'] = force
ctx.obj['verbose'] = verbose
ctx.obj['debug'] = debug
ctx.obj['project_files'] = FileHandler.find_files(path=os.getcwd(), patterns=['manage.py', 'wsgi.py', 'apps.py'])
# Note for contributors:
#
# Commands are auto-discovered if they are placed under the commands directory.
# But please be sure to do the following for this to work:
# 1. Name your package and click command the same.
# 2. Place your command definition within your package's main.py module
# 3. Any sub-commands of your command should be added to the top-most command in the package's main.py module.
#
# Access your command like so:
# `django-clite my-command my-command-sub-command`
#
# If you would like to skip a plugin/command from being auto-discovered,
# simply rename the package by either prepending or appending any number of underscores (_).
# Any code contained within the package will be ignored.
|
[
"def autoconfigure(\n repo_url: str = typer.Argument(..., help=\"url of remote git repository of your django project\"),\n domain_name: str = typer.Option(\n \"your-username.pythonanywhere.com\",\n \"-d\",\n \"--domain\",\n help=\"Domain name, eg www.mydomain.com\",\n ),\n python_version: str = typer.Option(\n \"3.8\",\n \"-p\",\n \"--python-version\",\n help=\"Python version, eg '3.9'\",\n ),\n nuke: bool = typer.Option(\n False,\n help=\"*Irrevocably* delete any existing web app config on this domain. Irrevocably.\",\n ),\n):\n domain = ensure_domain(domain_name)\n project = DjangoProject(domain, python_version)\n project.sanity_checks(nuke=nuke)\n project.download_repo(repo_url, nuke=nuke),\n project.create_virtualenv(nuke=nuke)\n project.create_webapp(nuke=nuke)\n project.add_static_file_mappings()\n project.find_django_files()\n project.update_wsgi_file()\n project.update_settings_file()\n project.run_collectstatic()\n project.run_migrate()\n project.webapp.reload()\n typer.echo(snakesay(f\"All done! Your site is now live at https://{domain_name}\\n\"))\n project.start_bash()",
"def startproject():\n usage_descr = 'django Organice setup. Start getting organiced!'\n\n if sys.version_info < (2, 7):\n from optparse import OptionParser # Deprecated since version 2.7\n\n parser = OptionParser(description=usage_descr)\n (options, args) = parser.parse_args()\n if len(args) != 1:\n parser.error('Please specify a projectname')\n projectname = args[0]\n else:\n from argparse import ArgumentParser # New since version 2.7\n\n parser = ArgumentParser(description=usage_descr)\n parser.add_argument('projectname', help='name of project to create')\n args = parser.parse_args()\n projectname = args.projectname\n\n mode0755 = S_IRUSR | S_IWUSR | S_IXUSR | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH\n profiles = ('develop', 'staging', 'production')\n filenames = ('__init__', 'common') + profiles\n\n print('Generating project %s ...' % projectname)\n code = call(['django-admin.py', 'startproject', projectname, '.'])\n if code != 0:\n return code\n os.chmod('manage.py', mode0755)\n\n print('Creating directories ...')\n os.mkdir('media')\n os.mkdir('static')\n os.mkdir('templates')\n os.mkdir(os.path.join(projectname, 'settings'))\n\n print('Converting settings to deployment profiles (%s) ...' % ', '.join(profiles))\n os.rename(os.path.join(projectname, 'settings.py'),\n os.path.join(projectname, 'settings', 'common.py'))\n\n settings = DjangoSettingsManager(projectname, *filenames)\n settings.append_lines('__init__',\n '\"\"\"',\n 'Modularized settings generated by django Organice setup. http://organice.io',\n 'This solution follows the second recommendation from',\n 'http://www.sparklewise.com/django-settings-for-production-and-development-best-practices/',\n '\"\"\"',\n 'from .develop import *')\n for prof in profiles:\n settings.append_lines(prof,\n '# Django project settings for %s environment' % prof.capitalize(),\n '',\n 'from .common import *')\n\n # out-of-the-box Django values relevant for deployment\n settings.move_var('common', profiles, 'DEBUG')\n settings.move_var('common', profiles, 'TEMPLATE_DEBUG')\n settings.move_var('common', profiles, 'ALLOWED_HOSTS')\n settings.move_var('common', profiles, 'DATABASES')\n settings.move_var('common', profiles, 'SECRET_KEY')\n settings.move_var('common', profiles, 'WSGI_APPLICATION')\n settings.insert_lines('common',\n 'import os',\n 'PROJECT_PATH = os.sep.join(__file__.split(os.sep)[:-3])')\n settings.set_value('common', 'MEDIA_URL', \"'/media/'\")\n settings.set_value('common', 'MEDIA_ROOT', \"os.path.join(PROJECT_PATH, 'media')\")\n settings.set_value('common', 'STATIC_ROOT', \"os.path.join(PROJECT_PATH, 'static')\")\n settings.set_value('common', 'USE_I18N', False)\n settings.set_value('staging', 'DEBUG', False)\n settings.set_value('production', 'DEBUG', False)\n\n print('Configuring development database ...')\n DEV_DATABASES = \"\"\"{\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.\n 'NAME': os.path.join(PROJECT_PATH, '%s.sqlite'), # path to database file if using sqlite3.\n # The following settings are not used with sqlite3:\n 'USER': '',\n 'PASSWORD': '',\n 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.\n 'PORT': '', # Set to empty string for default.\n }\n}\"\"\" % projectname\n settings.set_value('develop', 'DATABASES', DEV_DATABASES)\n\n # configuration for included packages\n adding_settings_for = 'Adding settings for %s ...'\n\n print(adding_settings_for % 'installed apps')\n settings.delete_var('common', 'INSTALLED_APPS')\n settings.append_lines('common',\n 'INSTALLED_APPS = (',\n \" 'django.contrib.auth',\",\n \" 'django.contrib.comments',\",\n \" 'django.contrib.contenttypes',\",\n \" 'django.contrib.sessions',\",\n \" 'django.contrib.sites',\",\n \" 'django.contrib.messages',\",\n \" 'django.contrib.staticfiles',\",\n \" 'django.contrib.admin',\",\n \" 'organice',\",\n \" 'cms',\",\n \" 'mptt',\",\n \" 'menus',\",\n \" 'south',\",\n \" 'sekizai',\",\n \" 'reversion',\",\n \" 'cms.plugins.text',\",\n \" 'cms.plugins.picture',\",\n \" 'cms.plugins.link',\",\n \" 'cms.plugins.teaser',\",\n \" 'cms.plugins.file',\",\n \" 'cms.plugins.video',\",\n \" 'cms.plugins.flash',\",\n \" 'cms.plugins.googlemap',\",\n \" 'cms.plugins.inherit',\",\n \" 'cmsplugin_contact',\",\n \" 'cmsplugin_zinnia',\",\n \" 'tagging',\",\n \" 'emencia.django.newsletter',\",\n \" 'tinymce',\",\n \" 'simple_links',\",\n \" 'zinnia',\",\n ')')\n\n print(adding_settings_for % 'django CMS')\n settings.delete_var('common', 'MIDDLEWARE_CLASSES')\n settings.append_lines('common',\n 'MIDDLEWARE_CLASSES = (',\n \" 'django.middleware.common.CommonMiddleware',\",\n \" 'django.middleware.doc.XViewMiddleware',\",\n \" 'solid_i18n.middleware.SolidLocaleMiddleware',\",\n \" 'django.middleware.csrf.CsrfViewMiddleware',\",\n \" 'django.contrib.sessions.middleware.SessionMiddleware',\",\n \" 'django.contrib.messages.middleware.MessageMiddleware',\",\n \" 'django.contrib.auth.middleware.AuthenticationMiddleware',\",\n \" 'cms.middleware.page.CurrentPageMiddleware',\",\n \" 'cms.middleware.user.CurrentUserMiddleware',\",\n \" 'cms.middleware.toolbar.ToolbarMiddleware',\",\n \" 'cms.middleware.language.LanguageCookieMiddleware',\",\n ')')\n # must be set both in order to make solid_i18n work properly\n settings.set_value('common', 'LANGUAGE_CODE', \"\"\"'en-us'\nLANGUAGES = (\n ('en-us', 'English (United States)'),\n)\"\"\")\n settings.append_lines('common',\n 'CMS_TEMPLATES = (',\n \" ('cms_article.html', 'Template for normal content pages'),\",\n \" ('cms_bookmarks.html', 'Template for the bookmarks page'),\",\n ')',\n 'CMS_USE_TINYMCE = False')\n settings.delete_var('common', 'TEMPLATE_DIRS')\n settings.append_lines('common',\n 'TEMPLATE_DIRS = (',\n \" # Don't forget to use absolute paths, not relative paths.\",\n \" os.path.join(PROJECT_PATH, 'templates'),\",\n \" os.path.join(PROJECT_PATH, 'templates', 'zinnia'),\",\n ')')\n settings.append_lines('common',\n 'TEMPLATE_CONTEXT_PROCESSORS = (',\n \" 'django.contrib.auth.context_processors.auth',\",\n \" 'django.core.context_processors.i18n',\",\n \" 'django.core.context_processors.request',\",\n \" 'django.core.context_processors.media',\",\n \" 'django.core.context_processors.static',\",\n \" 'cms.context_processors.media',\",\n \" 'sekizai.context_processors.sekizai',\",\n \" 'organice.context_processors.expose',\",\n ')')\n\n print(adding_settings_for % 'Emencia Newsletter')\n settings.append_lines('common',\n \"NEWSLETTER_DEFAULT_HEADER_SENDER = 'Your Organization <newsletter@your.domain>'\",\n \"NEWSLETTER_MEDIA_URL = '/media/' # emencia/django/newsletter/media/edn/ directory (alternative)\",\n 'NEWSLETTER_USE_TINYMCE = True',\n 'TINYMCE_DEFAULT_CONFIG = {',\n \" 'height': 450,\",\n \" 'width': 800,\",\n \" 'convert_urls': False,\",\n \" 'plugins': 'table,paste,searchreplace,template',\",\n \" 'theme': 'advanced',\",\n \" 'theme_advanced_toolbar_location': 'top',\",\n \" 'theme_advanced_buttons1': 'bold,italic,underline,forecolor,|,justifyleft,justifycenter,justifyright,justifyfull,|,formatselect,|,template',\",\n \" 'theme_advanced_buttons3_add': 'tablecontrols',\",\n '}')\n\n print(adding_settings_for % 'Zinnia Blog')\n settings.append_lines('common',\n '# use plugin system of django-cms in blog entries',\n \"ZINNIA_ENTRY_BASE_MODEL = 'cmsplugin_zinnia.placeholder.EntryPlaceholder'\",\n \"ZINNIA_WYSIWYG = 'wymeditor'\")\n settings.append_lines('common',\n 'SOUTH_MIGRATION_MODULES = {',\n ' # integration of EntryPlaceholder (django CMS) into Zinnia',\n \" 'zinnia': 'organice.migrations.zinnia',\",\n '}')\n\n settings.save_files()\n\n print('Configuring project URLs ...')\n gen_by_comment = '# generated by django Organice'\n project = DjangoModuleManager(projectname)\n project.add_file('urls', lines=(gen_by_comment, 'from organice.urls import urlpatterns'))\n project.save_files()\n\n suggest_editing = ('ADMINS', 'TIME_ZONE', 'LANGUAGE_CODE', 'LANGUAGES')\n suggest_adding = ('SERVER_EMAIL', )\n print('Done. Enjoy your organiced day!' + os.linesep)\n\n print('Please visit file `%s` and edit or add the variables: %s' %\n (settings.get_file('common').name, ', '.join(suggest_editing + suggest_adding)))\n print('Please visit file `%s` and configure your development database in: %s' %\n (settings.get_file('develop').name, 'DATABASES'))\n print('See https://docs.djangoproject.com/en/1.5/ref/settings/ for details.' + os.linesep)\n\n print('To initialize your development database run: `python manage.py syncdb --migrate`')\n print('You can then run your development server with: `python manage.py runserver`')",
"def manage(args):\r\n\r\n from django.core import management as mgmt\r\n\r\n OldOptionParser = mgmt.LaxOptionParser\r\n class LaxOptionParser(mgmt.LaxOptionParser):\r\n def __init__(self, *args, **kwargs):\r\n kwargs['prog'] = 'djboss manage'\r\n OldOptionParser.__init__(self, *args, **kwargs)\r\n mgmt.LaxOptionParser = LaxOptionParser\r\n\r\n utility = mgmt.ManagementUtility(['djboss manage'] + args.args)\r\n utility.prog_name = 'djboss manage'\r\n utility.execute()",
"def create_project(project_name):\n\n print(\"Creating a Wagtail project called {project_name}\".format(project_name=project_name))\n\n import wagtailstartproject\n wagtailstartproject_path = os.path.dirname(wagtailstartproject.__file__)\n\n template_path = os.path.join(wagtailstartproject_path, 'project_template')\n\n # Call django-admin startproject\n utility_args = [\n 'django-admin.py',\n 'startproject',\n '--template=' + template_path,\n '--extension=py,ini,html,rst,json,cfg',\n project_name\n ]\n\n # always put the project template inside the current directory:\n utility_args.append('.')\n\n utility = ManagementUtility(utility_args)\n utility.execute()\n\n print(\"Success! {project_name} has been created\".format(project_name=project_name))",
"def python(code):\n setup = \"import os; os.environ[\\'DJANGO_SETTINGS_MODULE\\']=\\'settings\\';\"\n full_code = 'python -c \"%s%s\"' % (setup, code.replace(\"`\", \"\\\\\\`\"))\n with project():\n return run(full_code)",
"def django_cmd(cmd):\n return fig('run web {env[django_exec]} {cmd}'.format(cmd=cmd, env=env))",
"def django_project(name, root='.', template='~/templates/django_project'):\n project_template = _path(template)\n root_dir = _path(root, name)\n\n try:\n os.makedirs(root_dir)\n except OSError: # already exists\n pass\n\n print colors.blue(\"Creating Django project %s...\" % name, bold=True)\n _copy_template(project_template, root_dir, name)",
"def django_admin(*args):\n cwd = os.getcwd()\n os.chdir(os.environ['APP_DIR'])\n management.execute_from_command_line(['django-admin.py'] + list(args))\n os.chdir(cwd)",
"def PLATFORM_CREATE_PROJECT(self):\n\t\treturn \"\"\"This is how to create project\"\"\"",
"def create_django_code_directory():\n print 'Attempting to create django code directory'\n if not files.exists(env.django_code_dir):\n print 'Creating code dir ', env.django_code_dir\n run('mkdir ' + env.django_code_dir)\n\n default_webfaction_project_dir = env.django_root + '/myproject'\n if files.exists(default_webfaction_project_dir):\n print 'Deleting default webfaction project dir ', default_webfaction_project_dir\n run('rm -rf ' + default_webfaction_project_dir)",
"def makeProj():\n return render_template('maker/projects.html', title=\"Maker - Projects\", year=year)",
"def main(args):\n out = {}\n\n try:\n out['bmml_path'] = args[0]\n except IndexError:\n print(usage)\n print()\n print(\"You must specify the bmml path\")\n sys.exit(1)\n\n out['name'] = utils.prompt_user(\"What is the name of this project (which\"\n \" will become the name of the resulting Django app)?\",\n checks=[utils.checks.identifier],\n )\n\n # write out project.json\n outdir = os.getcwd()\n outfile = open(os.path.join(outdir, \"project.json\"), \"w\")\n json.dump(out, outfile)\n outfile.write(\"\\n\")\n outfile.close()\n\n print(\"Output written to project.json\")\n print(\"Done!\")",
"def management_cmd(cmd):\n\n require('hosts')\n require('code_dir')\n\n sudo(\"cd %s ;\"\n \". ./venv/bin/activate ; \"\n \"cd hemedicinal ; \"\n \"python manage.py %s\" % (env.code_dir, cmd))",
"def migrate(djangoenv='develop'):\n with shell_env(DJANGOENV=djangoenv):\n _manage('migrate --noinput')",
"def deploy_project():\n virtualenv.virtualenv_create()\n make_clone()\n\n virtualenv.pip_install(env.conf.PIP_REQUIREMENTS, restart=False)\n\n setup_web_server()\n update_django_config()\n\n dj_cmd.syncdb()\n dj_cmd.migrate()",
"def django_app(name, root='.', template='~/templates/django_app'):\n root_dir = _path(root, name)\n template_dir = _path(template)\n\n try:\n os.makedirs(root_dir)\n except OSError: # already exists\n pass\n\n print(colors.blue(\"Creating Django app %s...\" % name, bold=True))\n _copy_template(template_dir, root_dir, name)",
"def setDjango():\n\n # TODO(daniel): this should be removed at some point as Interactive Shell\n # does not use Django. This is currently required, because when main module\n # is loaded, it always imports gae_django module which requires Django\n # version to be set. This should be changed so that it is loaded only\n # for Prod/Dev server.\n os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'\n\n from google.appengine import dist\n dist.use_library('django', '1.2')",
"def make_dir(pn, fp, dj_an):\n path = os.path.join(fp, pn)\n os.mkdir(path)\n os.chdir(path)\n os.system(f'python -m venv {pn}_venv')\n os.system(f'{pn}_venv\\\\scripts\\\\activate.bat && pip install Django && django-admin startproject {dj_an}')\n shutil.move(f'{path}\\\\{dj_an}\\\\manage.py', f'{path}\\\\manage.py')\n shutil.move(f'{path}\\\\{dj_an}\\\\{dj_an}', f'{path}\\\\_{dj_an}')\n shutil.rmtree(f'{path}\\\\{dj_an}')\n os.rename(f'{path}\\\\_{dj_an}', f'{path}\\\\{dj_an}')",
"def run_web(context):\n init_db(context)\n print(\"Run web server\")\n cmd = ('python manage.py runserver 8080')\n\n run_local_cmd(cmd, run_web.__doc__)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Takes the algorithm, salt and password and uses Python's hashlib to produce the hash. Currently only supports bcrypt.
|
def gen_hexdigest(raw_password, algorithm=BCRYPT, salt=None):
if raw_password is None:
raise ValueError('No empty passwords, fool')
if algorithm == BCRYPT:
# bcrypt has a special salt
if salt is None:
salt = bcrypt.gensalt()
return (algorithm, salt, bcrypt.hashpw(raw_password, salt))
raise ValueError('Unknown password algorithm')
|
[
"def get_hexdigest(algorithm, salt, raw_password):\r\n raw_password, salt = smart_str(raw_password), smart_str(salt)\r\n if algorithm == 'crypt':\r\n try:\r\n import crypt\r\n except ImportError:\r\n raise ValueError('\"crypt\" password algorithm not supported in this environment')\r\n return crypt.crypt(raw_password, salt)\r\n\r\n if algorithm == 'md5':\r\n return md5_constructor(salt + raw_password).hexdigest()\r\n elif algorithm == 'sha1':\r\n return sha_constructor(salt + raw_password).hexdigest()\r\n raise ValueError(\"Got unknown password algorithm type in password.\")",
"def get_hexdigest(algorithm, salt, raw_password):\r\n raw_password, salt = smart_str(raw_password), smart_str(salt)\r\n if algorithm == 'crypt':\r\n try:\r\n import crypt\r\n except ImportError:\r\n raise ValueError('\"crypt\" password algorithm not supported in this '\r\n 'environment')\r\n return crypt.crypt(raw_password, salt)\r\n\r\n if algorithm == 'md5':\r\n return md5_constructor(salt + raw_password).hexdigest()\r\n elif algorithm == 'sha1':\r\n return sha_constructor(salt + raw_password).hexdigest()\r\n raise ValueError(\"Got unknown password algorithm type in password.\")",
"def hashed (cls, clear_password, salt = None) :",
"def hash_password(salt, password):\n string = salt + password\n hashed_password = hashlib.sha256(string.encode()).hexdigest()\n return hashed_password",
"def _hash_password(password, salt):\n return hashlib.sha256(salt + password.strip()).hexdigest()",
"def _get_crypt_hash(self, salt, auth_key):\n if not isinstance(salt, (bytes, str)):\n salt = str(salt)\n if isinstance(salt, str):\n salt = salt.encode('utf-8')\n if not isinstance(auth_key, (bytes, str)):\n auth_key = str(auth_key)\n if isinstance(auth_key, str):\n auth_key = auth_key.encode('utf-8')\n return hmac.new(salt, auth_key, hashlib.sha1).hexdigest()",
"def create_hash(password, salt=uuid.uuid4().hex):\n\n hash_pass = hashlib.sha512((password + salt).encode('utf-8')).hexdigest()\n\n return hash_pass, salt",
"def get_hasher():\n return BCryptSHA256PasswordHasher()",
"def encrypt(password, salt=None, hash_algorithm=None):\n to_encrypt = password\n if salt is not None:\n to_encrypt += salt\n if hash_algorithm is not None:\n return hash_algorithm(to_encrypt).hexdigest()\n return current_app.auth.hash_algorithm(to_encrypt).hexdigest()",
"def hashed (cls, clear_password, salt = None) :\n if salt is None :\n salt = bcrypt.gensalt (cls.default_rounds)\n else :\n salt = pyk.encoded (salt)\n result = bcrypt.hashpw (pyk.encoded (clear_password, \"ascii\"), salt)\n return pyk.decoded (result, \"ascii\")",
"def hash_password(self, password):\n password = hashpw(password.encode('utf-8'), gensalt()).decode('utf-8')\n return password",
"def gen_hash(name, password):\n if name in __GEN_HASH_METHODS:\n return __GEN_HASH_METHODS[name](password)\n else:\n raise ValueError(\n \"Hash-method '{}' does not exists\"\n .format(name))",
"def hash_password(raw_password):\n return bcrypt.hashpw(raw_password, bcrypt.gensalt())",
"def password_hash(db, password, user):\n\n if type(user) is int:\n check = True\n elif type(user) is str:\n if len(user) > 0 and len(password) > 6:\n check = True\n else:\n check = False\n else:\n check = False\n\n if check:\n salt = return_salt(db, user)\n if salt:\n return hashlib.sha256((password + salt).encode()).hexdigest()\n else:\n return False",
"def get_userhash(username: str, password: str) -> str:\n original_word = \"\".join([username, password]).encode('utf8').rstrip()\n return sha256(original_word).hexdigest()",
"def make_pwhash(algo, password, iterations):\n salt = binascii.hexlify(os.urandom(16))\n hsh = pbkdf2_hmac(algo, password.encode(), salt, iterations)\n hsh = binascii.hexlify(hsh)\n hsh = \"%s$%s$%s\" % (algo, salt.decode(), hsh.decode())\n return hsh",
"def hash_password(self, plain_text: str) -> str:\n return self.hasher.hash(plain_text)",
"def hash_password(password):\n password = password.encode('utf-8')\n salt = app.config['SECRET_KEY']\n return hashlib.md5(salt + password).hexdigest()",
"def encode(self, password, salt):\n scrypt = self._load_library()\n data = force_str(scrypt.hash(force_bytes(password),salt ))\n return \"%s$%d$%s$%s\" % (self.algorithm, self.maxtime, salt, data)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Takes a password line and returns the line split by PASSWD_DELIM
|
def split_passwd_line(password_line):
return password_line.split(PASSWD_DELIM)
|
[
"def retrieve_password(username):\r\n return open('passfile').readlines()[find_password_line(username)].strip()",
"def getUsernamePassword(file):\n\n username=linecache.getline(file,1) #username on 1st line\n password=linecache.getline(file,2) #password on 2nd line\n return username.strip(),password.strip() #remove the CRLF",
"def read_passFile(passFile):\n try:\n with open(passFile, 'r') as inFile:\n passwd = inFile.read()\n except:\n return ''\n return passwd[:-1]",
"def _read_pw_file(self):\n import codecs\n\n with open(self.password_file, \"r\") as f:\n pwstring = codecs.decode(f.read(), \"rot_13\")\n (username, password) = pwstring.split(\",\", 2)\n return (username, password)",
"def read_password():\n with open(passwordfile,'r') as handle:\n read = handle.read()\n return read",
"def parse_line(line: str) -> Tuple[Tuple[int, int], str, str]:\n policy, password = line.split(':')\n password = password.strip()\n times, letter = policy.split(' ')\n at_least, at_most = map(int, times.split('-'))\n return ((at_least, at_most), letter, password)",
"def getpass(prompt='Password: '):\n\t\n\tpassword = console.secure_input(prompt)\n\treturn password",
"def readCredential(name):\n try:\n file=open(name, \"r\")\n user=file.readline().strip()\n passw=file.readline().strip()\n file.close()\n return user,passw\n except:\n print(\"Invalid credentials\\nCheck your txt file.\")\n print(\"The format of passGit.txt must be:\\n\\tusername\\npassword\")",
"def parse_passwords(PASSWORDLIST):\n print \"[INFO] Parsing password list\"\n passwords = []\n try:\n with open(PASSWORDLIST, 'r') as fh:\n data = fh.readlines()\n for item in data:\n password = item.split()[0]\n # print \"[INFO] Adding \" + password\n passwords.append(password)\n except:\n sys.exit(\"[ERROR] Problem parsing password list\")\n\n print \"[INFO] Total passwords: \" + str(len(passwords))\n\n return passwords",
"def dmenu():\n getpwd = openpipe([\"dmenu\", \"-p\", \"Master Password: \",\n \"-nb\", \"#000000\", \"-nf\", \"#000000\",\n \"-sb\", \"#000000\", \"-sf\", \"#ffffff\"])\n pw, _ = getpwd.communicate(\"\")\n if getpwd.returncode: sys.exit(getpwd.returncode)\n if pw[-1] == \"\\n\": pw = pw[:-1]\n return pw",
"def get_command_line_string():\n PASSWORD_NAME = '--password'\n\n command_line_list=sys.argv[:]\n\n try:\n password_index = command_line_list.index(PASSWORD_NAME)\n command_line_list[password_index+1]='*****'\n except ValueError:\n pass\n\n return ' '.join(command_line_list)",
"def unix_getpass(prompt='Password: ', stream=None):\r\n if stream is None:\r\n stream = sys.stdout\r\n\r\n try:\r\n fd = sys.stdin.fileno()\r\n except:\r\n return default_getpass(prompt)\r\n\r\n old = termios.tcgetattr(fd) # a copy to save\r\n new = old[:]\r\n\r\n new[3] = new[3] & ~termios.ECHO # 3 == 'lflags'\r\n try:\r\n termios.tcsetattr(fd, termios.TCSADRAIN, new)\r\n passwd = _raw_input(prompt, stream)\r\n finally:\r\n termios.tcsetattr(fd, termios.TCSADRAIN, old)\r\n\r\n stream.write('\\n')\r\n return passwd",
"def parse_credentials(username: str, password: str) -> tuple:\n return username, password",
"def tokenize(password_and_constraints: str) -> Tuple[int, int, str, str]:\n split_string = re.split(r\"[-: ]\", password_and_constraints)\n\n a = int(split_string[0])\n b = int(split_string[1])\n letter = split_string[2]\n # `split_string[3]` is an empty string and not used.\n password = split_string[4]\n\n return (a, b, letter, password)",
"def part4b(filename, password=None):\n f = open(filename, \"r\")\n lines = f.readlines()\n read_user = lines[0][:-1]\n read_pass = lines[1][:-1]\n\n if password == None: \n print(\"Username: \" + base64.b64decode(bytes(read_user)))\n print(\"Password: \" + base64.b64decode(bytes(read_pass)))\n else:\n username_encrypted = read_user\n password_encrypted = base64.b64encode(bytes(password))\n\n print(\"Username: \" + base64.b64decode(bytes(read_user)))\n print(\"Password: \" + password)",
"def get_user_password():\n try:\n mypwd1 = os.environ['mypwd1']\n if re.search('\"',mypwd1):\n raise KeyError()\n if re.search('\\\\\"',mypwd1):\n raise KeyError()\n except KeyError:\n mypwd1 = getpass.getpass('Enter your password -> ')\n return mypwd1",
"def get_user_credentials():\n\n if sys.stdin.isatty():\n # Gets the credentials from the userFile if it exists\n _mail = getusername()\n _pass = getpass(stream=sys.stderr)\n _cred = [_mail, _pass]\n\n else:\n # Gets the credentials from stdin\n _cred = sys.stdin.readlines()\n _cred = map(str.strip, _cred)\n\n print 'autologin\\t:', _cred[0]\n if len(_cred) != 2:\n print 'Error: Malformed input. Missing arguments.'\n print 'Here is what your input should look like:'\n print '\\tjonh.doe@mail.mcgill.ca'\n print '\\tpassword'\n exit()\n\n return _cred",
"def validatePassword(password):\n if password.isdigit():\n reject()\n with open(\"words\") as f:\n words = f.readlines()\n words = [x.strip().strip(\"\\\\\") for x in words]\n for word in words:\n segments = password.split(word, 1)\n if (segments[0].isdigit() or segments[0] == '') and (segments[1].isdigit() or segments[1] == ''):\n reject()",
"def entryparse(entry):\n username,password = entry.strip().split(':',1)\n domain = username.split('@',1)[1].split('.',1)[0]\n return (username,password,domain)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The authenticated user for this message. Determined by either get_current_user, which you can override to set the user based on, e.g., a cookie. If that method is not overridden, this method always returns None. We lazyload the current user the first time this method is called and cache the result after that.
|
def current_user(self):
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
|
[
"def get_current_user(self):\n return self.graph.users.get(int(self.get_secure_cookie('eid')))",
"def get_current_user(self):\n admin_cookie = self.get_secure_cookie(COOKIE_NAME)\n try:\n if admin_cookie:\n try:\n (user, expires) = json.loads(admin_cookie)\n except ValueError:\n # Old cookie format; delete it.\n self.clear_cookie(COOKIE_NAME)\n return None\n if expires > time.time():\n self._auth_user = user\n return self._auth_user\n except Exception:\n logging.exception('cannot authenticate admin access')\n return None",
"def get_active_user(self):\n return self._active_user",
"def user(self) -> Optional[str]:\n if self.logged_in():\n return self.username()\n return None",
"def current_user(self, request=None) -> TypeVar('User'):\n h = self.authorization_header(request)\n h = self.extract_base64_authorization_header(h)\n h = self.decode_base64_authorization_header(h)\n user = self.extract_user_credentials(h)\n return self.user_object_from_credentials(user[0], user[1])",
"def retrieve_logged_in_user(self):\n return self.client._get(\"/users/me\")",
"def get_user(request):\n if not hasattr(request, '_cached_user'):\n request._cached_user = auth_get_user(request)\n return request._cached_user",
"def get_session_user(request):\n if request.user.is_authenticated:\n return request.user\n return None",
"def get_current_user():\n\treturn users.get_current_user()\n\n\tuser = users.get_current_user()\n\tif not user:\n\t\treturn user\n\treturn get_user(user.nickname().lower())",
"def get_current_user(self):\r\n return self.get('users/current').json()",
"def get_user():\r\n return login_session.get('user', None)",
"def get_current_user(self):\r\n if self.request.cookies.get(\"user_id\") and self.request.cookies.get(\"user_id\") != '':\r\n self.usr = Users.get_by_username(self.request.cookies.get(\"user_id\"))\r\n return self.usr.dispname\r\n else:\r\n return None",
"def user(self):\r\n try:\r\n return User.objects.get(username=self.username)\r\n except User.DoesNotExist:\r\n return None",
"def user(self) -> Optional[dict]:\n return self._get('user')",
"def user(self):\n return self.parsed_prefix.user",
"def active_user():\n return getattr(_request_ctx_stack.top, 'user', None)",
"def ticket_user(self):\n if not hasattr(self, \"_ticket_user\"):\n self._ticket_user = User.objects.get(username=\"ixf_importer\")\n return self._ticket_user",
"def authenticate_and_get_user():\n try:\n check_auth(None, None, None)\n return get_current_user()\n except (AssertionError, BadRequest, PreconditionFailed, Unauthorized):\n return None",
"def _get_user(request):\n try:\n userid = auth._get_user_session_key(request)\n except:\n return anonymoususer\n\n return load_user(userid)",
"def get_user():\n try:\n user = session[\"user\"]\n return user\n except:\n user = ''\n return user"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Look up a word in the CMU dictionary, return a list of syllables
|
def get_syllables(word):
try:
return CMU[word.lower()]
except KeyError:
return [[]]
|
[
"def syllabify_word(self, word):\n word_syllables = self.syllable.findall(word)\n if word_syllables:\n return [s for s in word_syllables]\n else:\n return [word]",
"def lookup(self, word):",
"def getSyllables(word):\n\tsyllables = []\n\tsyl = []\n\texp = Base.explode(word)\n\n\tfor i in range(len(exp)):\n\t\tc = exp[i]\n\t\tsyl.append(c)\n\t\tif i < len(exp) - 1:\n\t\t\tif Alphabet.isConsonant(c) and Alphabet.isConsonant(exp[i + 1]):\n\t\t\t\tsyllables.append(syl)\n\t\t\t\tsyl = []\n\tsyllables.append(syl)\n\n\tsyl = []\n\tsyl2 = []\n\tfor s in syllables:\n\t\tfor i in range(len(s)):\n\t\t\tif Alphabet.isConsonant(s[i]) and (i > 0 and i < len(s) - 1):\n\t\t\t\tif Alphabet.isVowel(s[i - 1]) and Alphabet.isVowel(s[i + 1]):\n\t\t\t\t\tsyl2.append(syl)\n\t\t\t\t\tsyl = []\n\t\t\tsyl.append(s[i])\n\t\tsyl2.append(syl)\n\t\tsyl = []\n\treturn syl2",
"def syllables(self, line):\n return [self.syllabify_word(w) for w in self.words(line)]",
"def finalTokenize(self,syllables):\n special_word = ['thành phố']\n special_word_time = ['lúc','vào lúc','vào thời điểm','thời điểm']\n city_name = ['hồ chí minh', 'đà nẵng', 'huế', 'hà nội']\n index = 0\n while (index < len(syllables) - 1):\n curr_word = syllables[index].lower()\n if curr_word in special_word:\n next_word = syllables[index + 1].lower()\n if next_word in city_name:\n syllables[index:(index+2)] = [syllables[index] + ' ' + syllables[index+1]]\n index += 1\n return syllables",
"def _convert_morphemes_to_syllables(\n self, input_morphemes: List[Tuple[str, str]]\n ) -> List[Syllable]:\n\n prev = None\n syllables = []\n for pos in input_morphemes:\n for char in pos[0]:\n tag = pos[1]\n for _func, _tag in self._correction.items():\n if _func(char, pos[1]):\n tag = _tag\n break\n syllable = Syllable(char, tag)\n syllable.prev = prev\n if prev is not None:\n prev.next = syllable\n syllables.append(syllable)\n prev = syllable\n\n return syllables",
"def syllables (self):\n if self._syllables:\n return self._syllables\n raw_syllables = SYLLABLES.get_syllables(self.clean_text, resolutions=True)\n syllables = [Syllable(i, s) for i, s in enumerate(raw_syllables)]\n # Assemble data about the containing word for each syllable\n word_data_list = []\n for w in self.words:\n data = (w.text, w.number, w.lemma, w.POS, w.tags)\n # If two words are joined by a resolution, the data of the SECOND word\n # is retroactively assigned to that resolved syllable, but the tags\n # of both are combined.\n if w.initial_resolution:\n previous_tags = word_data_list[-1][-1]\n combined_tags = w.tags + previous_tags\n combined_data = data[:-1] + (combined_tags,)\n word_data_list = word_data_list[:-1]\n word_data_list.append(combined_data)\n word_data_list.extend([data] * w.syl_count)\n # Assemble data about the containing line for each syllable\n line_data_list = []\n for l in self.raw_lines:\n data = (l.number, l.corrupt, l.tags)\n line_data_list.extend([data]*l.syl_count)\n # Update each syllable with word, line and stanza data\n for i, s in enumerate(syllables):\n s.number = i\n s.stanza = self.name\n s.stanza_tags = self.tags\n s.prosody = self.meter[i]\n s.meter = self.meter[i] #[ADDED FOR CONVENIENCE]\n s.word, s.word_number, s.lemma, s.POS, s.word_tags = word_data_list[i]\n s.line_number, s.corrupt, s.line_tags = line_data_list[i]\n # Assemble and add contour data\n contours = self._get_contours(syllables)\n for i, s in enumerate(syllables):\n s.contour = contours[i]\n self._syllables = syllables\n return syllables",
"def syllable_count(word: str):\n if len(word.split()) > 1:\n return [syllable_count(w) for w in word.split()]\n word = G2pModel.get_cmu([G2pModel.preprocess(word)])\n return cmu_syllable_count(word[0][0])",
"def _lookup(w, at_sentence_start, lookup):\r\n\r\n def lookup_abbreviation(w):\r\n \"\"\" Lookup abbreviation from abbreviation list \"\"\"\r\n # Remove brackets, if any, before lookup\r\n clean_w = w[1:-1] if w[0] == '[' else w\r\n # Return a single-entity list with one meaning\r\n m = Abbreviations.DICT.get(clean_w, None)\r\n return None if m is None else [ BIN_Meaning._make(m) ]\r\n\r\n # Start with a simple lookup\r\n m = lookup(w)\r\n\r\n if at_sentence_start or not m:\r\n # No meanings found in database, or at sentence start\r\n # Try a lowercase version of the word, if different\r\n lower_w = w.lower()\r\n if lower_w != w:\r\n # Do another lookup, this time for lowercase only\r\n if not m:\r\n m = lookup(lower_w)\r\n else:\r\n m.extend(lookup(lower_w))\r\n\r\n if not m and (lower_w != w or w[0] == '['):\r\n # Still nothing: check abbreviations\r\n m = lookup_abbreviation(w)\r\n if not m and w[0] == '[':\r\n # Could be an abbreviation with periods at the start of a sentence:\r\n # Lookup a lowercase version\r\n m = lookup_abbreviation(lower_w)\r\n if m and w[0] == '[':\r\n # Remove brackets from known abbreviations\r\n w = w[1:-1]\r\n\r\n if not m and BIN_Db._ADJECTIVE_TEST in lower_w:\r\n # Not found: Check whether this might be an adjective\r\n # ending in 'legur'/'leg'/'legt'/'legir'/'legar' etc.\r\n for aend, beyging in AdjectiveTemplate.ENDINGS:\r\n if lower_w.endswith(aend) and len(lower_w) > len(aend):\r\n prefix = lower_w[0 : len(lower_w) - len(aend)]\r\n # Construct an adjective descriptor\r\n if m is None:\r\n m = []\r\n m.append(BIN_Meaning(prefix + \"legur\", 0, \"lo\", \"alm\", lower_w, beyging))\r\n\r\n if not m:\r\n # Still nothing: check compound words\r\n cw = Wordbase.dawg().slice_compound_word(w)\r\n if not cw and lower_w != w:\r\n # If not able to slice in original case, try lower case\r\n cw = Wordbase.dawg().slice_compound_word(lower_w)\r\n if cw:\r\n # This looks like a compound word:\r\n # use the meaning of its last part\r\n prefix = \"-\".join(cw[0:-1])\r\n m = lookup(cw[-1])\r\n m = [ BIN_Meaning(prefix + \"-\" + r.stofn, r.utg, r.ordfl, r.fl,\r\n prefix + \"-\" + r.ordmynd, r.beyging)\r\n for r in m]\r\n\r\n if not m and lower_w.startswith('ó'):\r\n # Check whether an adjective without the 'ó' prefix is found in BÍN\r\n # (i.e. create 'óhefðbundinn' from 'hefðbundinn')\r\n suffix = lower_w[1:]\r\n if suffix:\r\n om = lookup(suffix)\r\n if om:\r\n m = [ BIN_Meaning(\"ó\" + r.stofn, r.utg, r.ordfl, r.fl,\r\n \"ó\" + r.ordmynd, r.beyging)\r\n for r in om if r.ordfl == \"lo\" ]\r\n\r\n # noinspection PyRedundantParentheses\r\n return (w, m)",
"def define(word: str):\n dictionary = PyDictionary()\n meanings = dictionary.meaning(word)\n return meanings",
"def lookup_abbreviation(w):\r\n # Remove brackets, if any, before lookup\r\n clean_w = w[1:-1] if w[0] == '[' else w\r\n # Return a single-entity list with one meaning\r\n m = Abbreviations.DICT.get(clean_w, None)\r\n return None if m is None else [ BIN_Meaning._make(m) ]",
"def syllable_features(sentence,word_index,syllable_index):\t\n\tword = sentence[word_index]\n\tsyllable = word[syllable_index][0]\n\n\t# Initialize feature dictionary\n\tfeatures = {}\n\n\t# Determine Ultima traits\n\n\t# 1. First syllable in line\n\tfeatures['first'] = (word_index == 0 and syllable_index == 0)\n\t# 2. Second to last syllable in line\n\tfeatures['second_to_last'] = (word_index == (len(sentence) - 1) and syllable_index == (len(word)-2))\n\t# 3. Last syllable in line\n\tfeatures['last'] = (word_index == (len(sentence)-1) and syllable_index == (len(word) - 1))\n\n\t# Traits useful for finding ellisions\n\n\t# 1. If last syllable in word and next word starts with a vowel\n\tif not word_index == (len(sentence) - 1) and syllable_index == (len(word) - 1):\n\t\tnext_syllable = sentence[word_index+1][0][0]\n\t\tfeatures['can_elide'] = bool(\n\t\t\tre.search(re_patterns['take_elision'], syllable) and\n\t\t\tsyllable_index == (len(word) - 1) and \n\t\t\tre.match(re_patterns['give_elision'], next_syllable)\n\t\t)\n\n\t# Determine Length by Nature\n\tif len(syllable) > 1:\n\t\tfeatures['final_two'] = syllable[-2:]\n\t\n\tfeatures['final_letter'] = syllable[-1]\n\n\t# Determine Length by Position\n\tfeatures['dipthong'] = bool(re.search(re_patterns['dipthong'],syllable))\n\n\tif not syllable_index == (len(word)-1):\n\t\tnext_syllable = word[syllable_index+1][0]\n\t\t\n\t\tfeatures['followed_stop_liq'] = bool(\n\t\t\tre.match(re_patterns['stop_liquid'],next_syllable)\n\t\t)\n\n\t\tfeatures['followed_two_cons'] = bool(\n\t\t\tre.match(re_patterns['two_consonants'],next_syllable)\n\t\t)\n\n\telse:\n\t\tfeatures['followed_stop_liq'] = False\n\t\tfeatures['followed_two_cons'] = False\n\n\treturn features",
"def get_word_en(self, word):\n request = HttpRequest(dict(urllib=self._urllib))\n options = dict(search_value=word, search_type=SearchType.WORD_EN)\n entries = request.get(options)\n \n return entries",
"def disambiguate(self, word):\n matches = re.match(r'^meng([aiueo])(.*)$', word)\n if matches:\n return 'k' + matches.group(1) + matches.group(2)",
"def num_syllables(word):\n return len(list(y for y in cmu_lookup(word) if y[-1].isdigit()))",
"def get_word_by_clue(self, clue : str) -> Word:\n for w in self.words:\n if w.clue == clue:\n return w\n print(\"Error: Word not found.\")\n return None",
"def translate(term, var_name, show_table=False):\n if var_name == \"nt\":\n # Name types\n tabl = {\n \"Aatelointinimi\": _(\"Noble Name\"), # \"Aatelointinimi\"\n \"Aateloitu nimi\": _(\"Noble Name\"), # \"Aateloitu nimi\"\n \"Also Known As\": _(\"Also Known As\"), # \"tunnettu myös\"\n \"Birth Name\": _(\"Birth Name\"), # \"syntymänimi\"\n \"Married Name\": _(\"Married Name\"), # \"avionimi\"\n \"Otettu nimi\": _(\"Taken Name\"),\n \"Sotilasnimi\": _(\"Soldier Name\"), # \"Sotilasnimi\n \"Taitelijanimi\": _(\"Artist Name\"), # \"Taiteilijanimi\n \"Vaihdettu nimi\": _(\"Changed Name\"), # \"Vaihdettu nimi\n \"Unknown\": _(\"Unknown type\"), # \"määrittämätön\"\n }\n elif var_name == \"evt\":\n # Event types\n tabl = {\n \"Arvonimi\": _(\"Grant Title\"), # arvonimen myöntäminen\n \"Baptism\": _(\"Baptism\"), # \"kaste\"\n \"Birth\": _(\"Birth\"), # \"syntymä\"\n \"Burial\": _(\"Burial\"), # \"hautaus\"\n \"Cause Of Death\": _(\"Cause Of Death\"), # \"kuolinsyy\"\n \"Census\": _(\"Census\"), # \"mainittu henkikirjassa\"\n \"Christening\": _(\"Christening\"), # \"kristillinen kaste\"\n \"Confirmation\": _(\"Confirmation\"), # \"ripille pääsy\"\n \"Death\": _(\"Death\"), # \"kuolema\"\n \"Degree\": _(\"Degree\"), # \"oppiarvo\"\n \"Divorce\": _(\"Divorce\"), # \"avioero\"\n \"Education\": _(\"Education\"), # \"koulutus\"\n \"Ehtoollinen\": _(\"Holy Communion\"), # \"ehtoollinen\"\n \"Elected\": _(\"Elected\"), # \"vaali\"\n \"Engagement\": _(\"Engagement\"), # \"kihlajaiset\"\n \"Family\": _(\"Family\"), # \"Family event marriage etc.\"\n \"First Communion\": _(\"First Communion\"), # \"ensimmäinen ehtoollinen\"\n \"Graduation\": _(\"Graduation\"), # \"valmistuminen\"\n \"Immigration\": _(\"Immigration\"), # \"maahanmuutto\"\n \"Käräjöinti\": _(\"Lawsuit\"), # \"käräjöinti\"\n \"Luottamustoimi\": _(\"Public Duty\"), # \"luottamustoimi\"\n \"Lähtömuutto\": _(\"Moved out\"), # \"lähtömuutto\"\n \"Marriage Banns\": _(\"Marriage Banns\"), # \"kuulutus avioliittoon\"\n \"Marriage\": _(\"Marriage\"), # \"avioliitto\"\n \"Medical Information\": _(\"Medical Information\"), # \"avioliitto\"\n \"Military Service\": _(\"Military Service\"), # \"asepalvelus\"\n \"Nobility Title\": _(\"Nobility Title\"), # \"aatelointi\"\n \"Occupation\": _(\"Occupation\"), # \"ammatti\"\n \"Ordination\": _(\"Ordination\"), # \"palkitseminen\"\n \"Onnettomuus\": _(\"Accident\"),\n \"Accident\": _(\"Accident\"),\n \"Estate inventory\": _(\"Estate Inventory\"), # perunkirjoitus\n \"Property\": _(\"Property\"), # \"omaisuus\"\n \"Residence\": _(\"Residence\"), # \"asuinpaikka\"\n \"Retirement\": _(\"Retirement\"), # \"eläkkeelle siirtyminen\"\n \"Sota\": _(\"War\"), # \"sota\"\n \"Tulomuutto\": _(\"War\"), # \"sota\"\n \"Virkatalo\": _(\"Official House\"), # virkatalo-oikeus\n \"Yhteiskunnallinen toiminta\": _(\"Social Activities\"),\n \"Nimenmuutos\": _(\"Name Change\"),\n \"Tulomuutto\": _(\"Moved to\"), # \"tulomuutto\"\n }\n elif var_name == \"role\":\n # Event role types or member role in family\n tabl = {\n \"As_child\": _(\"as a child\"), # Role as family member\n \"As_parent\": _(\"as spouse\"), # Role as family member\n # Roles between parent and child doesn't currently exist in our data model?\n \"child\": _(\"Child\"), # Role as family member\n \"Adoptio\": _(\"Adoption\"), # Adoptiolapsi\n \"Kasvatus\": _(\"Foster-child\"), # Kasvatuslapsi\n \"Clergy\": _(\"Clergy\"), # \"pappi\"\n \"Edunsaaja\": _(\"Beneficiary\"),\n # \"Family\": _(\"Family\"), #\"perhe\" ?\n \"father\": _(\"Father\"),\n \"Kantaja\": _(\"Plaintiff\"),\n \"Kohde\": _(\"Concerned\"),\n \"kummi\": _(\"Wittness\"), # \"kummina\"\n \"Kummi\": _(\"Wittness\"), # \"kummina\"\n \"man\": _(\"Husband\"),\n \"mother\": _(\"Mother\"),\n \"Myyjä\": _(\"Myyjä\"), # \"myyjänä\"\n \"Opettaja\": _(\"Teacher\"),\n \"Osallinen\": _(\"Osallinen\"), # \"osallisena\"\n \"Ostaja\": _(\"Buyer\"), # \"ostajana\"\n \"parent\": _(\"Spouse\"), # Role as family member\n \"Perillinen\": _(\"Heir\"), # \"perillisenä\"\n \"Perinnönjättäjä\": _(\"Testator\"), # \"perinnönjättäjänä\"\n \"Primary\": _(\"Primary\"), # \"pääosallisena\"\n \"Pääosallinen\": _(\"Pääosallinen\"), # \"pääosallisena\"\n \"Toimittaja\": _(\"Ceremonial\"),\n \"Vihkijä\": _(\"Officiant\"), # \"vihkijä\"\n \"Vastaaja\": _(\"Defendant\"),\n \"wife\": _(\"Wife\"),\n \"Unknown\": _(\"Unknown\"), # \"määräämätön\"\n }\n elif var_name == \"conf\":\n # Confidence levels\n tabl = {\n \"0\": _(\"confidence 0/4\"), # \"erittäin matala\"\n \"1\": _(\"confidence 1/4\"), # \"alhainen\"\n \"2\": _(\"confidence 2/4\"), # \"normaali\"\n \"3\": _(\"confidence 3/4\"), # \"korkea\"\n \"4\": _(\"confidence 4/4\"), # \"erittäin korkea\"\n }\n elif var_name == \"notet\":\n # Note types\n tabl = {\n \"Cause Of Death\": _(\"Cause Of Death\"), # \"kuolinsyy\"\n \"Citation\": _(\"Citation\"), # \"viitteen lisätieto\"\n \"Event Note\": _(\"Event Note\"), # \"tapahtuman lisätieto\"\n \"Family Note\": _(\"Family Note\"), # \"perheen lisätieto\"\n \"General\": _(\"General\"), # \"yleistä\"\n \"Html code\": _(\"Html code\"), # \"html-koodi\"\n \"Link\": _(\"See\"), # \"ks.\"\n \"Media Note\": _(\"Media Note\"), # \"media\"\n \"Media Reference Note\": _(\"Media Reference Note\"), # \"mediaviite\"\n \"Name Note\": _(\"Name Note\"), # \"nimen lisätieto\"\n \"Person Note\": _(\"Person Note\"), # \"henkilön lisätieto\"\n \"Place Note\": _(\"Place Note\"), # \"paikan lisätieto\"\n \"Repository Note\": _(\"Repository Note\"), # \"arkiston lisätieto\"\n \"Research\": _(\"Research\"), # \"tutkimus\"\n \"Source Note\": _(\"Source Note\"), # \"lähteen lisätieto\"\n \"To Do\": _(\"To Do\"), # \"tehtävä/työlistalla\"\n \"Transcript\": _(\"Transcript\"), # \"kirjoituskopio\"\n \"Web Home\": _(\"Home Page\"), # \"Kotisivu\"\n \"Web Search\": _(\"Web Search\"), # \"Verkosta löytynyt\"\n }\n elif var_name == \"rept\":\n # Repository types\n tabl = {\n \"Album\": _(\"Album\"), # \"albumi\"\n \"Archive\": _(\"Archive\"), # \"arkisto\"\n \"Collection\": _(\"Collection\"), # \"kokoelma\"\n \"Library\": _(\"Library\"), # \"kirjasto\"\n \"Unknown\": _(\"Unknown type\"), # \"tuntematon\"\n \"Web site\": _(\"Web site\"), # \"verkkopalvelu\"\n }\n elif var_name == \"medium\":\n # Document types\n tabl = {\n \"Asiakirja\": _(\"Document\"), # \"asiakirja\"\n \"Book\": _(\"Book\"), # \"kirja\"\n \"Electronic\": _(\"Electronic\"), # \"sähköinen\"\n \"Magazine\": _(\"Magazine\"), # \"aikakauslehti\"\n \"Manuscript\": _(\"Manuscript\"), # \"käsikirjoitus\"\n \"Newspaper\": _(\"Newspaper\"), # \"lehti\"\n \"Photo\": _(\"Photo\"), # valokuva\n \"Tombstone\": _(\"Tombstone\"), # \"hautakivi\"\n \"Unknown\": _(\"Unknown\"), # \"tuntematon\"\n }\n elif var_name == \"lt\":\n # Location (place) types\n tabl = {\n \"Alue\": _(\"Region\"),\n \"Alus\": _(\"Vessel\"),\n \"Borough\": _(\"Borough\"), # \"aluehallintoyksikkö\"\n \"Building\": _(\"Building\"), # \"rakennus tai torppa\"\n \"Church\": _(\"Church\"), # \"kirkko\" (rakennus)\n \"City\": _(\"City\"), # \"paikkakunta\"\n \"Country\": _(\"Country\"), # \"maa\"\n \"Department\": _(\"Department\"), #\n \"District\": _(\"District\"), # \"lääni\"\n \"Farm\": _(\"Farm\"), # \"tila\"\n \"Talo\": _(\"Farm\"), # \"tila\"\n \"Hamlet\": _(\"Hamlet\"), # \"taloryhmä\"\n \"Hautapaikka\": _(\"Burial Site\"),\n \"Hautausmaa\": _(\"Cemetery\"), # \"hautausmaa\"\n \"Kappeliseurakunta\": _(\"Chapel Parish\"), # \"kappeliseurakunta\"\n \"Kartano\": _(\"Mansion\"), # \"kartano\"\n \"Kirkkokunta\": _(\"Denomination\"), # \"kirkkokunta\"\n \"Katuosoite\": _(\"Street Address\"),\n \"Kortteli\": _(\"Block\"), # \"kortteli\"\n \"Kuntakeskus\": _(\"Kuntakeskus\"), # \"kuntakeskus\"\n \"Kuvernementti\": _(\"Governorate\"), # kuvernementti\n \"Laitos\": _(\"Institute\"), # laitos\n \"Linnoitus\": _(\"Fortress\"), # \"linnoitus\"\n \"Locality\": _(\"Locality\"), # \"kulmakunta\"\n \"Luonnonpaikka\": _(\"Natural Place\"),\n \"Municipality\": _(\"Municipality\"), # kunta\n \"Neighborhood\": _(\"Neighborhood\"), # kulmakunta\n \"Oppilaitos\": _(\"Learning Institution\"), # \"oppilaitos\"\n \"Organisaatio\": _(\"Organization\"), # \"organisaatio\"\n \"Parish\": _(\"Parish\"), # \"seurakunta\"\n \"Province\": _(\"Province\"), # provinssi\n \"Region\": _(\"Region\"), # \"alue\"\n \"srk\": _(\"Parish\"), # \"seurakunta\"\n \"Sairaala\": _(\"Hospital\"),\n \"State\": _(\"State\"), # \"valtio\"\n \"Säteri\": _(\"Seat Farm\"),\n \"Tila\": _(\"Farm\"), # \"maatila\"\n \"Tontti\": _(\"Tontti\"), # \"tontti\"\n \"Torppa\": _(\"Torppa\"), # \"torppa\"\n \"Town\": _(\"Town\"), # \"kaupunki\"\n \"Village\": _(\"Village\"), # \"kylä\"\n \"Yritys\": _(\"Company\"), # yritys\n \"Unknown\": _(\"Unknown\"), # \"tuntematon\"\n }\n elif var_name == \"lt_in\":\n # Location types, inessive\n tabl = {\n \"Alue\": _(\"in the region\"), # \"alueella\"\n \"Alus\": _(\"on vessel\"), # \"aluksessa\"\n \"Borough\": _(\"in the borough of\"), # \"aluehallintoyksikössä\"\n \"Building\": _(\"in the building of\"), # \"rakennuksessa tai torpassa\"\n \"Church\": _(\"in the church\"), # kirkossa\n \"City\": _(\"in the City\"), # \"paikassa\"\n \"Country\": _(\"in the country of\"), # \"maassa\"\n \"Department\": _(\"in the department of\"), # \"\n \"District\": _(\"in the district of\"), # \"läänissä\"\n \"Farm\": _(\"in the farm of\"), # \"tilalla\"\n \"Hamlet\": _(\"in the hamlet of\"), # \"talossa\"\n \"Hautapaikka\": _(\"in a burial site of\"),\n \"Hautausmaa\": _(\"in the cemetery\"), # \"hautausmaalla\"\n \"Kappeliseurakunta\": _(\"in chapel parish\"), # \"kappeliseurakunnassa\"\n \"Kartano\": _(\"in the mansion of\"), # \"kartanossa\"\n \"Kirkkokunta\": _(\"in the denomination of\"), # \"kirkkokunta\"\n \"Katuosoite\": _(\"at street address\"),\n \"Kortteli\": _(\"in the block\"), # \"kortteli\"\n \"Kuntakeskus\": _(\"Kuntakeskuksessa\"), # \"kuntakeskuksessa\"\n \"Kuvernementti\": _(\"in the governorate\"), # kuvernementti\n \"Laitos\": _(\"in the nstitute\"), # laitos\n \"Linnoitus\": _(\"in the fortress\"), # \"linnoituksessa\"\n \"Locality\": _(\"at locality of\"), # \"kulmakuntannassa\"\n \"Luonnonpaikka\": _(\"in a natural place of\"),\n \"Municipality\": _(\"in the municipality of\"),\n \"Neighborhood\": _(\"in the neighborhood of\"), # kulmakunta\n \"Oppilaitos\": _(\"in the learning lnstitution\"), # \"oppilaitos\"\n \"Organisaatio\": _(\"in the organization of\"), # \"organisaatiossa\"\n \"Parish\": _(\"in the parish\"), # \"seurakunnassa\"\n \"Province\": _(\"int the province of\"), # provinssi\n \"Region\": _(\"in the region\"), # \"alueella\"\n \"Sairaala\": _(\"at the hospital\"),\n \"srk\": _(\"in the parish of\"), # \"seurakunnassa\"\n \"State\": _(\"in the state\"), # \"valtiossa\"\n \"Säteri\": _(\"in seat farm\"),\n \"Talo\": _(\"in the farm\"), # tilalla\n \"Tontti\": _(\"Tontilla\"), # \"tontilla\"\n \"Town\": _(\"in the town\"), # \"kaupunki\"\n \"Village\": _(\"in the village of\"), # \"kylässä\"\n \"Yritys\": _(\"at the company\"), # yritys\n \"Unknown\": _(\"in a place of unkown type\"), # \"tuntematon\"\n }\n try:\n if term:\n return tabl[term]\n except:\n return term + \":ssa\"\n\n elif var_name == \"marr\":\n # Marriage types\n tabl = {\"Married\": _(\"Married\"), \"Unknown\": _(\"Unknown relation\")}\n\n elif var_name == \"child\":\n # Child relations to family\n tabl = {SEX_FEMALE: _(\"Daughter\"), SEX_MALE: _(\"Son\"), SEX_UNKOWN: _(\"Child\")}\n\n elif var_name == \"handle\":\n # Shows handle '_dd3d7f7206c3ca3408c9daf6c58' in short format '_d…f6c58'\"\n if len(term) > 8:\n return term[:2] + \"…\" + term[-5:]\n return term\n elif var_name == \"urldomain\":\n # Pick domain part of url\n return urlparse(term).hostname\n\n try:\n if term:\n return tabl[term]\n if show_table:\n # Return conversion table\n return tabl\n print(\n f\"WARNING: ui.jinja_filters.translate: missing term={term}, var_name={var_name}\"\n )\n return \"~\"\n except:\n return \"'\" + term + \"'\"",
"def word_vec(word, dictionary):\n with open(dictionary) as file_:\n for line in file_:\n dict_word = line.split(' ',1)[0]\n if(dict_word == word):\n line = line.strip('\\n');\n return line.split(' ',1)[1]\n return \"Word not found\"",
"def syl_filter(word, min_syllables, max_syllables):\n return min_syllables <= word[\"numSyllables\"] <= max_syllables"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Represent strong and weak stress of a word with a series of 1's and 0's
|
def stress(word, variant = "primary"):
syllables = get_syllables(word)
if syllables:
# TODO: Implement a more advanced way of handling multiple pronunciations than just picking the first
if variant == "primary" or variant not in ["all", "min", "max"]:
return stress_from_syllables(syllables[0])
else:
all_pronunciations = [stress_from_syllables(x) for x in syllables]
all_pronunciations.sort()
all_pronunciations.sort(key=len) # Sort by shortest pronunciation
if variant == "all":
return all_pronunciations
elif variant == "min":
return all_pronunciations[0] # shorest pronunciation, latest stress
elif variant == "max":
return all_pronunciations[-1] # most syllables, earliest stress
return stress_numbers
# Provisional logic for adding stress when the word is not in the dictionary is to stress first syllable only
return '1' + '0' * (count_syllables(word) - 1)
|
[
"def stress(word, SECONDARY=True):\n def extract_stress(s):\n n = int(s[-1])\n if not SECONDARY and n > 0:\n return 1\n return n\n syllables = filter(lambda x: x[-1].isdigit(), cmu_lookup(word))\n return map(extract_stress, syllables)",
"def to_true_stress(true_stress, stretch):\n return true_stress",
"def to_true_stress(engineering_stress, stretch):\n return multiply(stretch, engineering_stress)",
"def stress_counts_by_syllable(text, SECONDARY=True):\n result = []\n temp = []\n for (word, tag) in tag_text(text):\n if tag in PUNCTUATION_TAGS:\n result.append((temp, word))\n temp = []\n else:\n temp += stress(word, SECONDARY=SECONDARY)\n if temp:\n result.append((temp, None))\n return result",
"def easy_words():\n length = random.randint(4,6)\n constraints = init_constraint(length)\n return ''.join(constraints)",
"def syllable_features(sentence,word_index,syllable_index):\t\n\tword = sentence[word_index]\n\tsyllable = word[syllable_index][0]\n\n\t# Initialize feature dictionary\n\tfeatures = {}\n\n\t# Determine Ultima traits\n\n\t# 1. First syllable in line\n\tfeatures['first'] = (word_index == 0 and syllable_index == 0)\n\t# 2. Second to last syllable in line\n\tfeatures['second_to_last'] = (word_index == (len(sentence) - 1) and syllable_index == (len(word)-2))\n\t# 3. Last syllable in line\n\tfeatures['last'] = (word_index == (len(sentence)-1) and syllable_index == (len(word) - 1))\n\n\t# Traits useful for finding ellisions\n\n\t# 1. If last syllable in word and next word starts with a vowel\n\tif not word_index == (len(sentence) - 1) and syllable_index == (len(word) - 1):\n\t\tnext_syllable = sentence[word_index+1][0][0]\n\t\tfeatures['can_elide'] = bool(\n\t\t\tre.search(re_patterns['take_elision'], syllable) and\n\t\t\tsyllable_index == (len(word) - 1) and \n\t\t\tre.match(re_patterns['give_elision'], next_syllable)\n\t\t)\n\n\t# Determine Length by Nature\n\tif len(syllable) > 1:\n\t\tfeatures['final_two'] = syllable[-2:]\n\t\n\tfeatures['final_letter'] = syllable[-1]\n\n\t# Determine Length by Position\n\tfeatures['dipthong'] = bool(re.search(re_patterns['dipthong'],syllable))\n\n\tif not syllable_index == (len(word)-1):\n\t\tnext_syllable = word[syllable_index+1][0]\n\t\t\n\t\tfeatures['followed_stop_liq'] = bool(\n\t\t\tre.match(re_patterns['stop_liquid'],next_syllable)\n\t\t)\n\n\t\tfeatures['followed_two_cons'] = bool(\n\t\t\tre.match(re_patterns['two_consonants'],next_syllable)\n\t\t)\n\n\telse:\n\t\tfeatures['followed_stop_liq'] = False\n\t\tfeatures['followed_two_cons'] = False\n\n\treturn features",
"def positive_word(self, tweet):\n positive_words = set(['wow', 'beautiful', 'amazing', 'won', 'want', 'really cool', 'feel better', 'good']) # Constructing a set of postive words from tweet messages. \n dense = self.tfidf_vectorizer.transform([tweet]).toarray()[0] # Find the tokens of tweet which are part of vocabulary \n dense = np.where(dense > 0)[0] \n terms = set([self.tfidf_vectorizer.get_feature_names()[x] for x in dense]) # Converting the index list to actual feature names\n return len(terms.intersection(positive_words))/(len(terms) + 1.0) # Adding 1 in denominator to prevent division by 0. ",
"def get_stress(self, stran):\n return self.E * stran * (stran + 2.) / 2. / (stran + 1.)",
"def calc_word_value(w):\n som = 0\n q = list(w)\n for i in q:\n if (i in string.ascii_lowercase) or (i in string.ascii_uppercase):\n som += LETTER_SCORES[i.upper()]\n return som",
"def tone_count_with_negation_check(dict, article):\n pos_count = 0\n neg_count = 0\n \n pos_words = []\n neg_words = []\n \n input_words = lemmatizer(article)\n \n word_count = len(input_words)\n \n for i in range(0, word_count):\n if input_words[i] in dict['Negative']:\n neg_count += 1\n neg_words.append(input_words[i])\n if input_words[i] in dict['Positive']:\n if i >= 3:\n if negated(input_words[i - 1]) or negated(input_words[i - 2]) or negated(input_words[i - 3]):\n neg_count += 1\n neg_words.append(input_words[i] + ' (with negation)')\n else:\n pos_count += 1\n pos_words.append(input_words[i])\n elif i == 2:\n if negated(input_words[i - 1]) or negated(input_words[i - 2]):\n neg_count += 1\n neg_words.append(input_words[i] + ' (with negation)')\n else:\n pos_count += 1\n pos_words.append(input_words[i])\n elif i == 1:\n if negated(input_words[i - 1]):\n neg_count += 1\n neg_words.append(input_words[i] + ' (with negation)')\n else:\n pos_count += 1\n pos_words.append(input_words[i])\n elif i == 0:\n pos_count += 1\n pos_words.append(input_words[i])\n \n '''\n print('The results with negation check:', end='\\n\\n')\n print('The # of positive words:', pos_count)\n print('The # of negative words:', neg_count)\n print('The list of found positive words:', pos_words)\n print('The list of found negative words:', neg_words)\n print('\\n', end='')\n '''\n \n results = [word_count, pos_count, neg_count, pos_words, neg_words]\n \n return results",
"def scalar_inc_dec(word, valence, words_and_emoticons):\n previous_word = ' '\n next_word = ' '\n scalar = 0.0\n\n if word in BOOSTER_DICT:\n\n scalar = BOOSTER_DICT[word]\n if valence < 0:\n scalar *= -1\n\n return scalar",
"def train(sf = '/home/link/code/tweet-stock/data/subjclueslen1-HLTEMNLP05.tff'):\n weights = {\n \"positive\" : 1,\n \"neutral\" : 0.0,\n \"negative\" : -1.0,\n \"weakneg\" : -0.5,\n \"both\" : 0.0, # no way to distinguish with unigrams\n \"weaksubj\" : 1.0,\n \"strongsubj\" : 5.0\n }\n\n sentiments = {} # make default 0?\n\n with open(sf) as f:\n for line in f:\n x = dict(x.split(\"=\") for x in line.split())\n sentiments[x['word1']] = weights[x['type']] * \\\n weights[x['priorpolarity']]\n \n return sentiments",
"def wordSquares(words):\n pass",
"def nudgeWeights(self, nudge = 0.001):\n\t\tfor syn in self.synapses:\n\t\t\tif syn.enabled == True:\n\t\t\t\tpolarity = random.choice((-1,1)) \n\t\t\t\tadj = polarity * random.random() * nudge\n\t\t\t\tsyn.weight += adj\n\t\t# the idea being that a weight of 0.0 means that the synapse is \"Disabled\". ",
"def negative_word(self, tweet):\n negative_words = set(['wrong', 'worst', 'warned', 'dont like', 'upset', 'ugh', 'bad']) # Using the tweet data to find negative words\n dense = self.tfidf_vectorizer.transform([tweet]).toarray()[0]\n dense = np.where(dense > 0)[0]\n terms = set([self.tfidf_vectorizer.get_feature_names()[x] for x in dense])\n return len(terms.intersection(negative_words))/(len(terms) + 1.0)",
"def train(self, s, path=\"spelling.txt\"):\n model = {}\n for w in re.findall(\"[a-z]+\", s.lower()):\n model[w] = w in model and model[w] + 1 or 1\n model = (\"%s %s\" % (k, v) for k, v in model.items())\n model = \"\\n\".join(model)\n f = open(path, \"w\")\n f.write(model)\n f.close()",
"def oneOrSeveral(amount, word):\n if amount != 1:\n word += 's'\n\n return word",
"def classify(self, sText):\n sumPos = 0.0\n sumNeg = 0\n sumAll = 0.0\n sText = sText.lower()\n tokens = self.tokenize(sText)\n strPos = self.pos.keys()\n for i in range (len(self.pos)):\n sumAll += self.pos[strPos[i]]+self.neg[strPos[i]]\n for i in range (len(tokens)):\n if self.pos.has_key(tokens[i])==False:\n self.pos[tokens[i]]=0\n self.neg[tokens[i]]=0\n sumPos += math.log(float(self.pos[tokens[i]]+1)/float(sumAll))\n sumNeg += math.log(float(self.neg[tokens[i]]+1)/float(sumAll))\n print sumPos\n print sumNeg\n if sumPos >= sumNeg:\n return \"Positive\"\n else:\n return \"Negtive\"",
"def trick_or_treat():\n return 'trick' if random.random() < .5 else 'treat'"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Get stress notation for every line in the poem
|
def scanscion(tokenized_poem):
line_stresses = []
currline = 0
for line in tokenized_poem:
line_stresses.append([])
[line_stresses[currline].append(stress(word)) for word in line if word]
currline += 1
return line_stresses
|
[
"def stresses_for_line(line):\n\n\tparts = line.split('\\t')\n\n\tif len(parts) == 2:\n\t\ttext, info = parts\n\t\tstresses_string = get_property(info, 'stress')\n\t\tstresses = ''.join(stresses_string.split())\n\t\treturn list(stresses)\n\telif len(parts) == 1:\n\t\treturn stresses_for_text(parts[0])",
"def get_stress(self):\n\n stress = []\n\n for group in self.material_groups:\n stress.append(\n {\n \"Material\": group.material.name,\n \"sig_zz_n\": group.stress_result.sig_zz_n,\n \"sig_zz_mxx\": group.stress_result.sig_zz_mxx,\n \"sig_zz_myy\": group.stress_result.sig_zz_myy,\n \"sig_zz_m11\": group.stress_result.sig_zz_m11,\n \"sig_zz_m22\": group.stress_result.sig_zz_m22,\n \"sig_zz_m\": group.stress_result.sig_zz_m,\n \"sig_zx_mzz\": group.stress_result.sig_zx_mzz,\n \"sig_zy_mzz\": group.stress_result.sig_zy_mzz,\n \"sig_zxy_mzz\": group.stress_result.sig_zxy_mzz,\n \"sig_zx_vx\": group.stress_result.sig_zx_vx,\n \"sig_zy_vx\": group.stress_result.sig_zy_vx,\n \"sig_zxy_vx\": group.stress_result.sig_zxy_vx,\n \"sig_zx_vy\": group.stress_result.sig_zx_vy,\n \"sig_zy_vy\": group.stress_result.sig_zy_vy,\n \"sig_zxy_vy\": group.stress_result.sig_zxy_vy,\n \"sig_zx_v\": group.stress_result.sig_zx_v,\n \"sig_zy_v\": group.stress_result.sig_zy_v,\n \"sig_zxy_v\": group.stress_result.sig_zxy_v,\n \"sig_zz\": group.stress_result.sig_zz,\n \"sig_zx\": group.stress_result.sig_zx,\n \"sig_zy\": group.stress_result.sig_zy,\n \"sig_zxy\": group.stress_result.sig_zxy,\n \"sig_1\": group.stress_result.sig_1,\n \"sig_3\": group.stress_result.sig_3,\n \"sig_vm\": group.stress_result.sig_vm,\n }\n )\n\n return stress",
"def stress(word, variant = \"primary\"):\n\n syllables = get_syllables(word)\n\n if syllables:\n # TODO: Implement a more advanced way of handling multiple pronunciations than just picking the first\n if variant == \"primary\" or variant not in [\"all\", \"min\", \"max\"]:\n return stress_from_syllables(syllables[0])\n else:\n all_pronunciations = [stress_from_syllables(x) for x in syllables]\n all_pronunciations.sort()\n all_pronunciations.sort(key=len) # Sort by shortest pronunciation\n if variant == \"all\":\n return all_pronunciations\n elif variant == \"min\":\n return all_pronunciations[0] # shorest pronunciation, latest stress\n elif variant == \"max\":\n return all_pronunciations[-1] # most syllables, earliest stress\n\n return stress_numbers\n\n # Provisional logic for adding stress when the word is not in the dictionary is to stress first syllable only\n return '1' + '0' * (count_syllables(word) - 1)",
"def get_tex(self) -> typing.Tuple[str, typing.List[str]]:\n desc = \"\\\\boldsymbol{v}\\\\mapsto\"\n desc += \"\\\\nablaa\\\\cdot\\\\boldsymbol{v}\"\n desc += \"(\" + \",\".join([_to_tex(i, True) for i in self.dof_point()]) + \")\"\n return desc, []",
"def get_stress(self, stran):\n return self.E * stran * (stran + 2.) / 2. / (stran + 1.)",
"def stress(word, SECONDARY=True):\n def extract_stress(s):\n n = int(s[-1])\n if not SECONDARY and n > 0:\n return 1\n return n\n syllables = filter(lambda x: x[-1].isdigit(), cmu_lookup(word))\n return map(extract_stress, syllables)",
"def get_stresses(self, len_unit, stress_unit):\n cx_w = np.arange(0, self._len_cx, ((self._width_cx + self._len_cx) / 1000))\n cx_h = np.arange(0, self._width_cx, ((self._width_cx + self._len_cx) / 100))\n shear_stress = [[0] * len(cx_h)] * len(cx_w)\n axial_stress = [[0] * len(cx_h)] * len(cx_w)\n\n for index in range(len(cx_w)):\n thick = 0\n position = cx_w[index]*self._units.get_len_conversion(len_unit)\n a_dash = 0\n ya_dash = 0\n for rect in self._rects:\n height = rect['h']\n width = rect['b']\n y_coor = rect['y_loc']\n if y_coor <= position < (y_coor + height):\n thick = width\n a_dash += ((position - y_coor) * width)\n ya_dash += ((position + y_coor) / 2) * (position - y_coor) * width\n break\n if position >= (y_coor + height):\n thick = width\n a_dash += (width * height)\n ya_dash += (((height / 2) + y_coor) * width * height)\n if a_dash != 0:\n y_dash = ya_dash / a_dash\n y_dash = self._prop['yNA'] - y_dash\n else:\n y_dash = 0\n q = y_dash * a_dash\n shear_stress[len(cx_w) - index - 1] = [(self._sf * q / (self._prop['I'] * thick))] * len(cx_h)\n axial_stress[len(cx_w) - index - 1] = [((self._bm * (self._prop['yNA'] - position) / self._prop['I']) +\n self._af / self._prop['A'])]*len(cx_h)\n\n return shear_stress, axial_stress, [self._width_cx, self._len_cx]",
"def _lines_charm ( self ) :\n sel = self._selection ( 'CharmLines' )\n if sel : return sel\n #\n from StrippingConf.StrippingLine import StrippingLine\n #\n sel = [\n ##\n StrippingLine (\n \"D02KpiFor\" + self.name() ,\n prescale = self['D0Prescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.D02Kpi() ]\n ) ,\n ##\n StrippingLine (\n \"DstarFor\" + self.name() ,\n prescale = self['D*Prescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.Dstar() ]\n ) ,\n ##\n StrippingLine (\n \"DFor\" + self.name() ,\n prescale = self['D+Prescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.Dplus () ]\n ) ,\n ##\n StrippingLine (\n \"DsFor\" + self.name() ,\n prescale = self['DsPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.Ds() ]\n ) ,\n ##\n StrippingLine (\n \"LambdaCFor\" + self.name() ,\n prescale = self['LambdaCPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.LamC () ]\n ) ,\n StrippingLine (\n \"LambdaC2pKKFor\" + self.name() ,\n prescale = self['LambdaCPrescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.LamC2pKK () ]\n ) ,\n ## Sigma_c\n StrippingLine (\n \"SigmaCFor\" + self.name() ,\n prescale = self['SigmaCPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.SigC () ]\n ) ,\n ## Lambda_c*\n StrippingLine (\n \"LambdaCstarFor\" + self.name() ,\n prescale = self['LambdaC*Prescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.LamCstar () ]\n ) ,\n ## DiCharm\n StrippingLine (\n \"DiCharmFor\" + self.name() ,\n prescale = self['DiCharmPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.DiCharm () ]\n ) ,\n ##\n StrippingLine (\n \"DiMuonAndCharmFor\" + self.name() ,\n prescale = self['DiMu&CharmPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.DiMuonAndCharm () ]\n ) ,\n ##\n StrippingLine (\n \"DoubleDiMuonFor\" + self.name() ,\n prescale = self['DoubleDiMuPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.DoubleDiMuon () ]\n ) ,\n ##\n StrippingLine (\n \"ChiAndCharmFor\" + self.name() ,\n prescale = self['Chi&CharmPrescale'] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.ChiAndCharm () ]\n ) ,\n ##\n StrippingLine (\n \"CharmAndWFor\" + self.name() ,\n prescale = self['Charm&WPrescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.CharmAndW () ]\n ) ,\n ##\n StrippingLine (\n \"DiMuonAndWFor\" + self.name() ,\n prescale = self['DiMuon&WPrescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.DiMuonAndW () ]\n ) ,\n ##\n StrippingLine (\n \"ChiAndWFor\" + self.name() ,\n prescale = self['Chi&WPrescale' ] , ## ATTENTION! Prescale here !!\n checkPV = self['CheckPV' ] ,\n algos = [ self.ChiAndW () ]\n ) ,\n ##\n ]\n #\n return self._add_selection ( 'CharmLines' , sel )",
"def thermo_string(data):\n tab = ' '*4\n endl = '\\n'\n s = data['Name'] + endl + \\\n '{' + endl + \\\n tab + 'specie' + endl + \\\n tab + '{' + endl + \\\n 2*tab + 'nMoles 1;' + endl + \\\n 2*tab + 'molWeight %7.4f;' % data['W'] + endl + \\\n tab + '}' + endl + \\\n tab + 'thermodynamics' + endl + \\\n tab + '{' + endl + \\\n 2*tab + 'Tlow %6.1f;' % data['Tlow'] + endl + \\\n 2*tab + 'Thigh %6.1f;' % data['Thigh'] + endl + \\\n 2*tab + 'Tcommon %6.1f;' % data['Tmid'] + endl + \\\n 2*tab + 'highCpCoeffs ( %s );' % \" \".join(['%10.9e'%i for i in data['highCpCoeffs']]) + endl + \\\n 2*tab + 'lowCpCoeffs ( %s );' % \" \".join(['%10.9e'%i for i in data['lowCpCoeffs']]) + endl + \\\n tab + '}' + endl + \\\n tab + 'transport' + endl + \\\n tab + '{' + endl + \\\n 2*tab + 'As %10.9e;' % data['As'] + endl + \\\n 2*tab + 'Ts %7.3f;' % data['Ts'] + endl + \\\n tab + '}' + endl + \\\n '}' + 2*endl;\n return s",
"def stress(X,Q,D):\n Y = projected_positions(X,Q)\n s2 = mds.stress(Y,D)\n return s2",
"def get_tex(self) -> typing.Tuple[str, typing.List[str]]:\n assert isinstance(self.point, VectorFunction)\n if len(self.point) == 1:\n desc = \"v\\\\mapsto \"\n desc += f\"v'({','.join([_to_tex(i, True) for i in self.point])})\"\n return desc, []\n desc = \"v\\\\mapsto\"\n desc += \"\\\\frac{\\\\partial\"\n if sum(self.derivative) > 1:\n desc += f\"^{{{sum(self.derivative)}}}\"\n desc += \"}{\"\n for v, i in zip(\"xyz\", self.derivative):\n if i > 0:\n desc += f\"\\\\partial {v}\"\n if i > 1:\n desc += f\"^{{{i}}}\"\n desc += \"}\"\n desc += f\"v({','.join([_to_tex(i, True) for i in self.point])})\"\n return desc, []",
"def calculate_stress_and_tangent_modulus(self):\n eps = self._strain\n ep0 = self._strain_0\n epp = self._strain_p\n epr = self._strain_r\n sgr = self._stress_r\n K = self._K\n Z = self._Z\n fc = self._fc\n\n # == inequality signs are reversed compared to theory becuase of the negative signs\n\n # positive strain\n if eps >= 0:\n self._stress = 0.0\n self._Et = 0.0\n return\n\n # loading path\n if eps <= epr:\n if eps >= ep0:\n stress = K * fc * (2 * eps / ep0 - (eps / ep0) ** 2)\n tangen = K * fc * (2 / ep0 - 2 * (eps / ep0 ** 2))\n else:\n stress = K * fc * (1 + Z * (eps - ep0))\n if stress < 0.2 * K * fc:\n stress = 0.2 * K * fc\n tangen = 0\n else:\n tangen = K * fc * Z\n\n # unloading path\n else:\n if eps >= epp:\n self._stress = 0.0\n self._Et = 0.0\n return\n stress = -(sgr * eps - epp * sgr) / (epr - epp)\n tangen = -sgr / (epr - epp)\n\n self._stress = -1 * stress\n self._Et = -1 * tangen",
"def attractorstring(self):\n attractorstring = \"\"\n _, attractor = RBN.get_cycle(self.nodes)\n for count, state in enumerate(attractor):\n attractorstring += str(count) + \" \" + str(state) + linesep\n return attractorstring",
"def makePoem():\n adjFile = r\"C:\\Users\\shockma\\Documents\\Special\\Python\\extract\\adj.txt\"\n advFile = r\"C:\\Users\\shockma\\Documents\\Special\\Python\\extract\\adv.txt\"\n nounFile = r\"C:\\Users\\shockma\\Documents\\Special\\Python\\extract\\noun.txt\"\n prepFile = r\"C:\\Users\\shockma\\Documents\\Special\\Python\\extract\\prepositions.txt\"\n verbFile = r\"C:\\Users\\shockma\\Documents\\Special\\Python\\extract\\verb.txt\"\n adj = getWord(adjFile, 3)\n adv = getWord(advFile, 1)\n noun = getWord(nounFile, 3)\n prep = getWord(prepFile, 2)\n verb = getWord(verbFile, 3)\n aan = aOrAn(adj)\n return aan[0] + ' ' + adj[0] \\\n + ' ' + noun[0] + '\\n\\n' \\\n + aan[0] + ' ' + adj[0] \\\n + ' ' + noun[0] \\\n + ' ' + verb[0] \\\n + ' ' + prep[0] \\\n + ' the ' + adj[1] \\\n + ' ' + noun[1] + '\\n' \\\n + adv[0] \\\n + ', the ' + noun[0] \\\n + ' ' + verb[1] + '\\n'\\\n + 'the ' + noun[1] \\\n + ' ' + verb[2] \\\n + ' ' + prep [1] \\\n + ' ' + aan[2].lower() + ' ' + adj[2] \\\n + ' ' + noun[2]",
"def simplify(self):\n #c = 0\n simp_sentences = []\n for s in self.sentences:\n\n #print \"Original: \" + s\n \n simp_sentences.append(self.transformation(s, ''))\n\n ## for demonstration purposes only. remove the prints later\n #print \"Simplified: \",\n #print simp_sentences[c]\n #c+=1\n\n #print \n return simp_sentences",
"def scenario_emissions_rule(_m):\r\n\r\n return sum(m.e[g, t] * m.EMISSIONS_RATE[g] for g in m.G_THERM for t in m.T)",
"def to_true_stress(true_stress, stretch):\n return true_stress",
"def linecost(n,M,i,j,lenghts):\n extras = M - j + i - sum(lenghts[i:j+1])\n if extras < 0:\n # Doesn't fit in line\n return INF\n if j==n-1 :\n # Last line\n return 0\n else:\n return extras**2",
"def get_prime_text(self):\n return \"\".join(\n [self.format_example(ex) for ex in self.examples.values()])"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Given the rhyme level n and a syllable (phone) list, count backward witin the list to find the nth vowel. Return the (negative) index where it can be located.
|
def get_nth_last_vowel(phones, n):
vowel_count = 0
for i in range(1, len(phones) + 1):
if phones[-i][-1].isdigit():
vowel_count += 1
if vowel_count == n:
return -i
|
[
"def _last_vowel(syllable):\n for i in range(len(syllable) - 1, -1, -1):\n if syllable[i] in _SHORT_VOWELS or syllable[i] in _LONG_VOWELS:\n return i\n return -1",
"def count_syllables(word):\n word_lower = word.lower () #make function non-case sensetive\n vowels = 'aeiouy'\n vowels_position = []\n i = 0\n while i < len (word_lower):\n if word_lower [i] in vowels:\n vowels_position += [i] #put each vowel posistion in a list\n i += 1\n syllables = len (vowels_position)\n k = 0\n while k < len (vowels_position) - 1:\n if vowels_position [k+1] - vowels_position [k] == 1: #vowels next to each other\n syllables -= 1 #no new syllable, substract one from previous total\n k += 1\n if word_lower [-1] == 'e' and word_lower [-2] not in vowels: # 'e' is last and preceded by non-vowel\n syllables -= 1 #no new syllable, substract one from previous total\n syllables_final = max (syllables, 1) #at least one syllable\n return (syllables_final)",
"def max_pinyin_length(name):\n num_syllables = 0\n try:\n num_syllables = nsyl(name)[0]\n except:\n pass\n hyphenated = dic.inserted(name).split('-')\n hyph_count = len(hyphenated)\n # add 1 for each consonant cluster\n for h in hyphenated:\n for i in range(len(h)):\n bgram = h[i:min(len(h), i+2)]\n if len(bgram) == 2 and not search_utils.is_vowel(bgram[0]) and not search_utils.is_vowel(bgram[1]) and bgram[1] != 'y':#not in ok_clusters:\n hyph_count += 1\n num_syllables += 1\n\n # starts with vowel\n if search_utils.is_vowel(hyphenated[0][0]):\n hyph_count += 1\n num_syllables += 1\n\n # has some commonly messed up letter combos :(\n if \"ia\" in name or \"oi\" in name or \"oui\" in name:\n hyph_count += 1\n num_syllables += 1\n \n return max(hyph_count, num_syllables)",
"def n_terminal_helix(ss, maxend=5):\n i = 0\n while i < len(ss) and ss[i] == 'L' and ss[i] != 'E':\n i += 1\n nbeg = i\n while i < len(ss) and ss[i] == 'H':\n i += 1\n if not i: return None\n nend = i - 1 if ss[i - 1] == 'H' and i else i - 2\n # s = \"\".join(a[1] for a in ss1)\n nbeg, nend = nbeg + 1, nend - 1\n if nbeg >= nend or nbeg > maxend: return None\n nend = min(nend, nbeg + 13)\n return nbeg, nend",
"def f2_find_NthOccurrence_of_char(withinStr, targetChar, N):\r\n if N > withinStr.count(targetChar):\r\n return(-1)\r\n occNo = 0\r\n for i in range(0, len(withinStr)):\r\n if withinStr[i: i+1] == targetChar:\r\n occNo = occNo + 1\r\n if occNo == N:\r\n return(i)",
"def upper_index(char):\n upper = upper_list()\n for index, value in enumerate(upper):\n if char == value:\n return index",
"def ind(e, L):\n if L == []:\n return 0.0\n def ind2(e, L, n):\n if L == []:\n return n\n if L == '':\n return n\n if e == L[0]:\n return n\n return ind2(e, L[1:], n + 1)\n return ind2(e, L, 0)",
"def vowel_indices(word):\n return [i + 1 for i, j in enumerate(word) if j.lower() in \"aeiouy\"]",
"def nth_letter(n):\n LETTERS = string.ascii_lowercase\n # Account for zero indexing.\n letter = LETTERS[n - 1]\n return letter",
"def __get_patriot_or_ivy_college_index(self, player):\r\n index = 0\r\n for chunk in player.split():\r\n if '(' in chunk:\r\n break\r\n index += 1\r\n return index",
"def Word_Point(word):\r\n if len(word) == 0:\r\n return None\r\n vowels = 'aeiouyAEIOUY'\r\n center = len(word)/2\r\n pattern = []\r\n i = 0\r\n while i<center:\r\n pattern = [i, -i]+pattern\r\n i+=1\r\n #print pattern\r\n for i in pattern:\r\n if word[i] in vowels:\r\n i_abs = i%len(word)\r\n return i_abs\r\n return center",
"def look_and_say(n):\n\n # Base cases\n if (n == 1):\n return \"1\"\n if (n == 2):\n return \"11\"\n\n prev_term = \"11\"\n\n for i in range(3, n + 1):\n # Add a dummy character to allow extra iteration\n # without this, your very first loop will exit\n prev_term += '$'\n seq_end = len(prev_term)\n\n count = 1\n seq_n = ''\n\n for j in range(1, seq_end):\n if (prev_term[j] != prev_term[j - 1]):\n seq_n += str(count)\n seq_n += prev_term[j - 1]\n count = 1\n else:\n count += 1\n\n print('\\n LNS: ', seq_n)\n prev_term = seq_n\n\n print('\\n')\n return prev_term",
"def lastposary(self, chtr, istart):\n if istart < 1:\n return 0 # -1 #ejf\n if istart > self.linmax:\n return 0 # -1 #ejf\n\n I = istart\n while 1 <= I:\n if self.Linary[I] == chtr:\n return I\n I -= 1\n return 0",
"def get_n_grams_ind(s, n):\n n_grams = []\n for i in range(0, len(s) - n + 1):\n n_grams.append((i, s[i:i+n]))\n return n_grams",
"def first_vowel(s):\n result=len(s) # In case there is no 'a'\n \n if 'a' in s:\n result=introcs.find_str(s,'a')\n if 'e' in s[:result]:\n result=introcs.find_str(s,'e')\n if 'i' in s[:result]:\n result=introcs.find_str(s,'i')\n if 'o' in s[:result]:\n result=introcs.find_str(s,'o')\n if 'u' in s[:result]:\n result=introcs.find_str(s,'u')\n if 'y' in s[1:result]:\n result=introcs.find_str(s,'y')\n \n return -1 if (result==len(s) and 'y' not in s[1:]) else result",
"def count_code(str):\n result = 0\n i = 0\n while i < len(str) - 3:\n word = str[i:i+4]\n if word[:2] == 'co' and word[-1] == 'e':\n result += 1\n i += 4\n else:\n i += 1\n return result",
"def nth_char(char_map, index):\n for char in char_map:\n if index < char_map[char]:\n return char\n index = index - char_map[char]\n return None",
"def n_neg(seq):\n\n # convert sequence to upper case\n seq = seq.upper()\n\n # check for a valid sequence\n for aa in seq:\n if aa not in bd.aa.keys():\n raise RuntimeError(aa + ' is not a valid amino acid.')\n\n # count E and D and retrun the count\n return seq.count('E') + seq.count('D')",
"def aparitii(lst, n):\n nr = 0\n for i in lst:\n if i == n:\n nr = nr + 1\n return nr"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
For each word, get a list of various syllabic pronunications. Then check whether the last level number of syllables is pronounced the same. If so, the words probably rhyme
|
def rhymes(word1, word2, level=2):
pronunciations = get_syllables(word1)
pronunciations2 = get_syllables(word2)
if not (pronunciations and pronunciations2):
return False
# Work around some limitations of CMU
equivalents = {"ER0": "R"}
def replace_syllables(syllables):
return [equivalents[syl] if syl in equivalents else syl for syl in syllables]
for syllables in pronunciations:
syllables = replace_syllables(syllables)
# If word only has a single vowel (i.e. 'stew'), then we reduce this to 1 otherwise we won't find a monosyllabic rhyme
if num_vowels(syllables) < level:
level = num_vowels(syllables)
vowel_idx = get_nth_last_vowel(syllables, level) # Default number of syllables to check back from
for syllables2 in pronunciations2:
syllables2 = replace_syllables(syllables2)
if syllables[vowel_idx:] == syllables2[vowel_idx:]:
return True
return False
|
[
"def doesRhyme(self, word1, word2):\n\t\tif word1 == word2:\n\t\t\treturn 0\n\n\t\tpron1 = []\n\t\tpron2 = []\n\t\tif word1 in self.pronDict:\n\t\t\tpron1 = self.pronDict[word1][0]\n\t\t\tpron1 = [filter(lambda x: re.sub(\"[^a-zA-Z]\", '', x), str(lex)) for lex in pron1]\n\t\telse:\n\t\t\ti = 0\n\t\t\twhile i < len(word1):\n\t\t\t\tif word1[i] in self.vowelSet:\n\t\t\t\t\tpron1.append(self.mappingDictionary[word1[i]])\n\t\t\t\t\twhile i < len(word1) and word1[i] in self.vowelSet:\n\t\t\t\t\t\ti += 1\n\t\t\t\telse:\n\t\t\t\t\tj = i + 1\n\t\t\t\t\twhile j < len(word1) and word1[j] not in self.vowelSet:\n\t\t\t\t\t\tj += 1\n\t\t\t\t\tpron1.append(word1[i:j].upper())\n\t\t\t\t\ti = j\n\n\t\tif word2 in self.pronDict:\n\t\t\tpron2 = self.pronDict[word2][0]\n\t\t\tpron2 = [filter(lambda x: re.sub(\"[^a-zA-Z]\", '', x), str(lex)) for lex in pron2]\n\t\telse:\n\t\t\ti = 0\n\t\t\twhile i < len(word2):\n\t\t\t\tif word2[i] in self.vowelSet:\n\t\t\t\t\tpron2.append(self.mappingDictionary[word2[i]])\n\t\t\t\t\twhile i < len(word2) and word2[i] in self.vowelSet:\n\t\t\t\t\t\ti += 1\n\t\t\t\telse:\n\t\t\t\t\tj = i + 1\n\t\t\t\t\twhile j < len(word2) and word2[j] not in self.vowelSet:\n\t\t\t\t\t\tj += 1\n\t\t\t\t\tpron2.append(word2[i:j].upper())\n\t\t\t\t\ti = j\n\n\t\tnumMatchingVowelSyllables = 0\n\t\tif not pron1 or not pron2:\n\t\t\treturn numMatchingVowelSyllables\n\n\t\treverseIndex = -1\n\t\twhile abs(reverseIndex) <= len(pron1) and abs(reverseIndex) <= len(pron2):\n\t\t\tif pron1[reverseIndex] != pron2[reverseIndex]:\n\t\t\t\tbreak\n\t\t\tnumMatchingVowelSyllables += 1\n\t\t\treverseIndex -= 1\n\n\t\treturn numMatchingVowelSyllables",
"def syllables (self):\n if self._syllables:\n return self._syllables\n raw_syllables = SYLLABLES.get_syllables(self.clean_text, resolutions=True)\n syllables = [Syllable(i, s) for i, s in enumerate(raw_syllables)]\n # Assemble data about the containing word for each syllable\n word_data_list = []\n for w in self.words:\n data = (w.text, w.number, w.lemma, w.POS, w.tags)\n # If two words are joined by a resolution, the data of the SECOND word\n # is retroactively assigned to that resolved syllable, but the tags\n # of both are combined.\n if w.initial_resolution:\n previous_tags = word_data_list[-1][-1]\n combined_tags = w.tags + previous_tags\n combined_data = data[:-1] + (combined_tags,)\n word_data_list = word_data_list[:-1]\n word_data_list.append(combined_data)\n word_data_list.extend([data] * w.syl_count)\n # Assemble data about the containing line for each syllable\n line_data_list = []\n for l in self.raw_lines:\n data = (l.number, l.corrupt, l.tags)\n line_data_list.extend([data]*l.syl_count)\n # Update each syllable with word, line and stanza data\n for i, s in enumerate(syllables):\n s.number = i\n s.stanza = self.name\n s.stanza_tags = self.tags\n s.prosody = self.meter[i]\n s.meter = self.meter[i] #[ADDED FOR CONVENIENCE]\n s.word, s.word_number, s.lemma, s.POS, s.word_tags = word_data_list[i]\n s.line_number, s.corrupt, s.line_tags = line_data_list[i]\n # Assemble and add contour data\n contours = self._get_contours(syllables)\n for i, s in enumerate(syllables):\n s.contour = contours[i]\n self._syllables = syllables\n return syllables",
"def num_syllables(self, word):\r\n if self._pronunciations.has_key(word):\r\n return min([len(list(y for y in x if y[-1].isdigit())) for x in self._pronunciations[word.lower()]])\r\n else:\r\n return 1",
"def check_hypernym(word, word2):\n l_syns = list()\n synsets = wn.synsets(word2)\n \n for synset in synsets:\n\t\tfor hypernym in synset.hypernyms():\n\t\t\tfor ss in hypernym.lemmas: \n\t\t\t\tif word == ss.name:\n\t\t\t\t\t l_syns.append( (word, word2) )\n\t\t\t\t\t #print l_syns\n\t\t\t\t\t return l_syns\t\n return l_syns",
"def syllabify_word(self, word):\n word_syllables = self.syllable.findall(word)\n if word_syllables:\n return [s for s in word_syllables]\n else:\n return [word]",
"def count_syllables(word):\n word_lower = word.lower () #make function non-case sensetive\n vowels = 'aeiouy'\n vowels_position = []\n i = 0\n while i < len (word_lower):\n if word_lower [i] in vowels:\n vowels_position += [i] #put each vowel posistion in a list\n i += 1\n syllables = len (vowels_position)\n k = 0\n while k < len (vowels_position) - 1:\n if vowels_position [k+1] - vowels_position [k] == 1: #vowels next to each other\n syllables -= 1 #no new syllable, substract one from previous total\n k += 1\n if word_lower [-1] == 'e' and word_lower [-2] not in vowels: # 'e' is last and preceded by non-vowel\n syllables -= 1 #no new syllable, substract one from previous total\n syllables_final = max (syllables, 1) #at least one syllable\n return (syllables_final)",
"def getSyllables(word):\n\tsyllables = []\n\tsyl = []\n\texp = Base.explode(word)\n\n\tfor i in range(len(exp)):\n\t\tc = exp[i]\n\t\tsyl.append(c)\n\t\tif i < len(exp) - 1:\n\t\t\tif Alphabet.isConsonant(c) and Alphabet.isConsonant(exp[i + 1]):\n\t\t\t\tsyllables.append(syl)\n\t\t\t\tsyl = []\n\tsyllables.append(syl)\n\n\tsyl = []\n\tsyl2 = []\n\tfor s in syllables:\n\t\tfor i in range(len(s)):\n\t\t\tif Alphabet.isConsonant(s[i]) and (i > 0 and i < len(s) - 1):\n\t\t\t\tif Alphabet.isVowel(s[i - 1]) and Alphabet.isVowel(s[i + 1]):\n\t\t\t\t\tsyl2.append(syl)\n\t\t\t\t\tsyl = []\n\t\t\tsyl.append(s[i])\n\t\tsyl2.append(syl)\n\t\tsyl = []\n\treturn syl2",
"def finalTokenize(self,syllables):\n special_word = ['thành phố']\n special_word_time = ['lúc','vào lúc','vào thời điểm','thời điểm']\n city_name = ['hồ chí minh', 'đà nẵng', 'huế', 'hà nội']\n index = 0\n while (index < len(syllables) - 1):\n curr_word = syllables[index].lower()\n if curr_word in special_word:\n next_word = syllables[index + 1].lower()\n if next_word in city_name:\n syllables[index:(index+2)] = [syllables[index] + ' ' + syllables[index+1]]\n index += 1\n return syllables",
"def syllable_count(word: str):\n if len(word.split()) > 1:\n return [syllable_count(w) for w in word.split()]\n word = G2pModel.get_cmu([G2pModel.preprocess(word)])\n return cmu_syllable_count(word[0][0])",
"def analyze_syllables(word_lists):\n stresses = poetrytools.scanscion(word_lists)\n\n # deep copy\n syllables = copy.deepcopy(stresses)\n for i in range(len(syllables)):\n for j in range(len(syllables[i])):\n syllables[i][j] = len(syllables[i][j])\n\n return stresses, syllables",
"def printrhyme(self, word, upto=3):\n y=self.getword(word) if word in self.dictionary else word\n print y+\"\\n------------\\n\"+\"\\n\".join(self.rhyme(word, upto))",
"def is_limerick(self, text):\n # TODO: provide an implementation!\n processed_text = text.split(\"\\n\")\n processed_text = [word_tokenize(line.strip()) for line in processed_text if len(line.strip())!=0]\n\n if len(processed_text)!=5:\n \treturn False\n\n for line in processed_text:\n \tword_ind = 0\n \twhile word_ind<len(line):\n \t\tcount = 0\n \t\tfor char in line[word_ind]:\n \t\t\tif not char.isalpha():\n \t\t\t\tcount += 1\n\n \t\tif count==len(line[word_ind]):\n \t\t\tdel line[word_ind]\n \t\t\tword_ind = 0\n \t\t\tcontinue\n \t\tword_ind += 1\n\n total_syllables_first_line = 0\n total_syllables_second_line = 0\n total_syllables_third_line = 0\n total_syllables_fourth_line = 0\n total_syllables_fifth_line = 0\n min_syllables = 0\n min_a_line_syllables = 0\n max_b_line_syllables = 0\n\n for word in processed_text[0]:\n \ttotal_syllables_first_line += self.num_syllables(word)\n min_syllables = total_syllables_first_line\n min_a_line_syllables = total_syllables_first_line\n\n for word in processed_text[1]:\n \ttotal_syllables_second_line += self.num_syllables(word)\n min_syllables = min(min_syllables, total_syllables_second_line)\n min_a_line_syllables = min(min_a_line_syllables, total_syllables_second_line)\n\n for word in processed_text[2]:\n \ttotal_syllables_third_line += self.num_syllables(word)\n min_syllables = min(min_syllables, total_syllables_third_line)\n max_b_line_syllables = total_syllables_third_line\n\n for word in processed_text[3]:\n \ttotal_syllables_fourth_line += self.num_syllables(word)\n min_syllables = min(min_syllables, total_syllables_fourth_line)\n max_b_line_syllables = max(max_b_line_syllables, total_syllables_fourth_line)\n\n for word in processed_text[4]:\n \ttotal_syllables_fifth_line += self.num_syllables(word)\n min_syllables = min(min_syllables, total_syllables_fifth_line)\n min_a_line_syllables = min(min_a_line_syllables, total_syllables_fifth_line)\n\n # print min_syllables, min_a_line_syllables, max_b_line_syllables\n # print total_syllables_first_line, total_syllables_second_line, total_syllables_third_line, total_syllables_fourth_line, total_syllables_fifth_line\n if min_syllables<4 or max_b_line_syllables>=min_a_line_syllables:\n \treturn False\n\n if abs(total_syllables_first_line - total_syllables_second_line)>2 or abs(total_syllables_first_line - total_syllables_fifth_line)>2 or abs(total_syllables_fifth_line - total_syllables_second_line)>2:\n \treturn False\n\n if abs(total_syllables_third_line - total_syllables_fourth_line)>2:\n \treturn False\n\n first_word = processed_text[0][-1]\n second_word = processed_text[1][-1]\n third_word = processed_text[2][-1]\n fourth_word = processed_text[3][-1]\n fifth_word = processed_text[4][-1]\n\n if self.rhymes(first_word, second_word) and self.rhymes(second_word, fifth_word) and self.rhymes(first_word, fifth_word) and self.rhymes(third_word, fourth_word):\n \tif not self.rhymes(first_word, third_word) and not self.rhymes(second_word, third_word) and not self.rhymes(fifth_word, third_word):\n \t\tif not self.rhymes(first_word, fourth_word) and not self.rhymes(second_word, fourth_word) and not self.rhymes(fifth_word, fourth_word):\n \t\t\treturn True\n\n return False",
"def is_limerick(self, text):\r\n lines = text.strip().split(\"\\n\")\r\n\r\n if len(lines) is not 5:\r\n return False\r\n\r\n line_count = 1\r\n punctuations_to_delete = set(string.punctuation)\r\n dict = {}\r\n\r\n for line in lines:\r\n line_strip = line.strip()\r\n words = nltk.word_tokenize(line_strip)\r\n # words = self.apostrophe_tokenize(line_strip)\r\n\r\n words = [x for x in words if x and x not in punctuations_to_delete]\r\n last_word = words[-1].lower()\r\n\r\n if line_count == 1 or line_count == 2 or line_count == 5:\r\n line_group = 'A'\r\n else:\r\n line_group = 'B'\r\n\r\n syllables_in_line = 0;\r\n for word in words:\r\n syllables_in_token = self.num_syllables(word.lower())\r\n syllables_in_line = syllables_in_line + syllables_in_token\r\n\r\n dict[line_count] = [line_group, syllables_in_line, last_word]\r\n line_count = line_count + 1\r\n\r\n\r\n # Limerick Check\r\n groupA_word1 = dict.get(1)[2]\r\n groupA_word2 = dict.get(2)[2]\r\n groupA_word3 = dict.get(5)[2]\r\n groupB_word1 = dict.get(3)[2]\r\n groupB_word2 = dict.get(4)[2]\r\n\r\n syllables_line1 = dict.get(1)[1]\r\n syllables_line2 = dict.get(2)[1]\r\n syllables_line3 = dict.get(3)[1]\r\n syllables_line4 = dict.get(4)[1]\r\n syllables_line5 = dict.get(5)[1]\r\n\r\n # Check rhymming\r\n if self.rhymes(groupA_word1, groupA_word2) and self.rhymes(groupA_word2, groupA_word3) and self.rhymes(groupA_word1, groupA_word3) and self.rhymes(groupB_word1, groupB_word2):\r\n\r\n # Check if no of syllables of all line >=4\r\n if syllables_line1 >= 4 and syllables_line2 >=4 and syllables_line3 >= 4 and syllables_line4 >= 4 and syllables_line5 >= 4:\r\n\r\n # Check if syllables of group B is always less than each of the lines of group A\r\n if syllables_line3 < syllables_line1 and syllables_line3 < syllables_line2 and syllables_line3 < syllables_line5 and syllables_line4 < syllables_line1 and syllables_line4 <syllables_line2 and syllables_line4 < syllables_line5:\r\n\r\n # Check if group A syllables differ by less than 2\r\n if syllables_line1 - syllables_line2 <= 2 and syllables_line2 - syllables_line5 <= 2 and syllables_line1 - syllables_line5 <= 2:\r\n\r\n # Check if group B syllables differ by less than 2\r\n if syllables_line3 - syllables_line4 <= 2:\r\n return True\r\n return False",
"def spellings(self):\n scale_candidates = []\n nb_alt_prev = 7\n for tonic_base in self.tonic.closest_white_keys():\n note_names = []\n bad = False\n nb_alt = 0\n for i, cur_base in enumerate(Note.whites_from(tonic_base)):\n cur_note = self.notes[i]\n name = cur_note.name_with_base_white(cur_base)\n note_names.append(name)\n\n if Note.sharp_sym in name or Note.flat_sym in name:\n nb_alt += 1\n if Note.sharp_sym * 2 in name or Note.flat_sym * 2 in name:\n bad = True\n\n if not bad:\n if nb_alt < nb_alt_prev:\n scale_candidates = []\n scale_candidates.append(tuple(note_names))\n nb_alt_prev = nb_alt\n\n return scale_candidates",
"def getRhymes(self):\n # Before checking for rhymes, remove sentence final punctuation\n # and blank lines to ensure correct rhyme detection\n punctuation = \"[^0-9A-Za-z]\"\n i = 0\n self.stanzas = 1\n # Use while to iterate over the verses since the deletion of lines\n # changes the length of the list\n while i < len(self.verses):\n if not len(self.verses[i]):\n # Delete blank lines/stanza delimiters\n self.verses.pop(i)\n self.stanzas += 1\n else:\n # Delete verse final punctuation so it won't mess with the rhyme scheme\n self.verses[i][-1] = re.sub(punctuation,\n \"\", self.verses[i][-1])\n if not len(self.verses[i][-1]):\n self.verses[i].pop()\n # Lines only consisting of punctuation are deleted for the same reason\n if not len(self.verses[i]):\n self.verses.pop(i)\n continue\n i += 1\n\n # The rhyme scheme is constructed with the first four sentences of the poem\n rhymes = [\"a\", \"b\", \"c\", \"d\"]\n # Initialize unmatched verses and rhyme scheme bins\n verses = {i for i in range(len(self.verses))}\n bins = {}\n # Look at the first four verses\n for i in range(min(len(self.verses), 4)):\n # Each verse that is not yet matched to a rhyme scheme is\n # matched to the next rhyme placeholder (\"a\", \"b\", ...) and deleted\n # from the unmatched verses\n if i in verses:\n verses.remove(i)\n schema = rhymes.pop(0)\n bins[i] = schema\n # The subsequent verses are checked against the current verse\n for j in range(i+1, min(len(self.verses), 4)):\n # For the last word of the two current verses check if\n # they are identical or if either one is in the rhyme set\n # of the other (this is necessary as pronouncing uses a\n # dictionary to check rhymes, whose entries are not always\n # identical sets)\n if self.verses[j][-1] in pronouncing.rhymes(self.verses[i][-1]) \\\n or self.verses[i][-1] in pronouncing.rhymes(self.verses[j][-1]) \\\n or self.verses[i][-1] == self.verses[j][-1]:\n # Assign an unmatched subsequent verse to the current rhyme scheme\n # and delete it from the unmatched set\n if j in verses:\n bins[j] = schema\n verses.remove(j)\n # If the subsequent verse is already matched to a rhyme\n # scheme, this means that the pronouncing rhyme sets of\n # a previous verse included it but not the current verse\n else:\n # Use the older rhyme scheme instead of the current one\n schema_j = bins[j]\n bins[i] = schema_j\n # Reassign all verses with the current rhyme scheme\n # to the older one (including the current verse)\n for k in bins:\n if bins[k] == schema:\n bins[k] = schema_j\n # Rejoin the current rhyme scheme to the rhyme placeholders\n schema_j = [schema_j]\n schema_j.extend(rhymes)\n rhymes = schema_j\n # Extend poems under four lines with dummy placeholders\n if len(self.verses) < 4:\n for i in range(len(self.verses), 4):\n bins[i] = \"x\"\n # Construct the rhyme scheme by concatenating the rhyme placeholders\n # of the first four lines\n self.rhyme_scheme = bins[0]+bins[1]+bins[2]+bins[3]",
"def test_no_syllabic_requirements(self):\n poem_lines = ['The first line leads off,',\n 'With a gap before the next.']\n pattern = ([0, 0], ['*', '*'])\n expected_list = []\n self.assertEqual(check_syllables(poem_lines,pattern,word_to_phonemes),\n expected_list, 'No syllabic requirements')",
"def syl_filter(word, min_syllables, max_syllables):\n return min_syllables <= word[\"numSyllables\"] <= max_syllables",
"def syllableMatches(syl, form):\n\t# FIXME this is not necessarily done on a syllable by syllable basis. sometimes it\n\t# can overlap boundaries\n\tsylMatches = False\n\n\t# better handling of 'ng' and other double letters\n\tsyl = Base.explode(syl)\n\tform = Base.explode(form)\n\n\tsyl = syl[::-1]\n\tform = form[::-1]\n\n\t#FIXME need to write test about if form longer than syl has correct behavior\n\t#FIXME optional '[]' letters should not increment i\n\tinBrackets = False\n\tj = 0\n\n\tif len(syl) > 0 and len(syl) >= len(form):\n\t\tfor i in range(len(form)):\n\t\t\tif i <= j:\n\t\t\t\tif inBrackets:\n\t\t\t\t\t# FIXME not sure if there is really anything to do but ignore\n\t\t\t\t\tif form[i] == '[':\n\t\t\t\t\t\tinBrackets = False\n\t\t\t\t\t\tj += 1\n\t\t\t\telse:\n\t\t\t\t\tif form[i] == 'V' and Alphabet.isVowel(syl[j]):\n\t\t\t\t\t\tsylMatches = True\n\t\t\t\t\t\tj += 1\n\t\t\t\t\telif form[i] == 'C' and Alphabet.isConsonant(syl[j]):\n\t\t\t\t\t\tsylMatches = True\n\t\t\t\t\t\tj += 1\n\t\t\t\t\telif form[i] == syl[j]:\n\t\t\t\t\t\t#FIXME this may have some false positives\n\t\t\t\t\t\tsylMatches = True\n\t\t\t\t\t\tj += 1\n\t\t\t\t\telif form[i] == ']':\n\t\t\t\t\t\t# we are reversed, so close brackets = open brackets\n\t\t\t\t\t\tinBrackets = True\n\t\t\t\t\telse:\n\t\t\t\t\t\tsylMatches = False\n\t\t\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tbreak\n\treturn sylMatches",
"def get_syllables(word):\n\n try:\n return CMU[word.lower()]\n except KeyError:\n return [[]]"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Get a rhyme scheme for the poem. For each line, lookahead to the future lines of the poem and see whether last words rhyme.
|
def rhyme_scheme(tokenized_poem):
num_lines = len(tokenized_poem)
# By default, nothing rhymes
scheme = ['X'] * num_lines
rhyme_notation = list(ascii_lowercase)
currrhyme = -1 # Index into the rhyme_notation
for lineno in range(0, num_lines):
matched = False
for futurelineno in range(lineno + 1, num_lines):
# If next line is not already part of a rhyme scheme
if scheme[futurelineno] == 'X':
base_line = tokenized_poem[lineno]
current_line = tokenized_poem[futurelineno]
if base_line == ['']: # If blank line, represent that in the notation
scheme[lineno] = ' '
elif rhymes(base_line[-1], current_line[-1]):
if not matched: # Increment the rhyme notation
matched = True
currrhyme += 1
if base_line == current_line: # Capitalise rhyme if the whole line is identical
scheme[lineno] = scheme[futurelineno] = rhyme_notation[currrhyme].upper()
else:
scheme[lineno] = scheme[futurelineno] = rhyme_notation[currrhyme]
return scheme
|
[
"def getRhymes(self):\n # Before checking for rhymes, remove sentence final punctuation\n # and blank lines to ensure correct rhyme detection\n punctuation = \"[^0-9A-Za-z]\"\n i = 0\n self.stanzas = 1\n # Use while to iterate over the verses since the deletion of lines\n # changes the length of the list\n while i < len(self.verses):\n if not len(self.verses[i]):\n # Delete blank lines/stanza delimiters\n self.verses.pop(i)\n self.stanzas += 1\n else:\n # Delete verse final punctuation so it won't mess with the rhyme scheme\n self.verses[i][-1] = re.sub(punctuation,\n \"\", self.verses[i][-1])\n if not len(self.verses[i][-1]):\n self.verses[i].pop()\n # Lines only consisting of punctuation are deleted for the same reason\n if not len(self.verses[i]):\n self.verses.pop(i)\n continue\n i += 1\n\n # The rhyme scheme is constructed with the first four sentences of the poem\n rhymes = [\"a\", \"b\", \"c\", \"d\"]\n # Initialize unmatched verses and rhyme scheme bins\n verses = {i for i in range(len(self.verses))}\n bins = {}\n # Look at the first four verses\n for i in range(min(len(self.verses), 4)):\n # Each verse that is not yet matched to a rhyme scheme is\n # matched to the next rhyme placeholder (\"a\", \"b\", ...) and deleted\n # from the unmatched verses\n if i in verses:\n verses.remove(i)\n schema = rhymes.pop(0)\n bins[i] = schema\n # The subsequent verses are checked against the current verse\n for j in range(i+1, min(len(self.verses), 4)):\n # For the last word of the two current verses check if\n # they are identical or if either one is in the rhyme set\n # of the other (this is necessary as pronouncing uses a\n # dictionary to check rhymes, whose entries are not always\n # identical sets)\n if self.verses[j][-1] in pronouncing.rhymes(self.verses[i][-1]) \\\n or self.verses[i][-1] in pronouncing.rhymes(self.verses[j][-1]) \\\n or self.verses[i][-1] == self.verses[j][-1]:\n # Assign an unmatched subsequent verse to the current rhyme scheme\n # and delete it from the unmatched set\n if j in verses:\n bins[j] = schema\n verses.remove(j)\n # If the subsequent verse is already matched to a rhyme\n # scheme, this means that the pronouncing rhyme sets of\n # a previous verse included it but not the current verse\n else:\n # Use the older rhyme scheme instead of the current one\n schema_j = bins[j]\n bins[i] = schema_j\n # Reassign all verses with the current rhyme scheme\n # to the older one (including the current verse)\n for k in bins:\n if bins[k] == schema:\n bins[k] = schema_j\n # Rejoin the current rhyme scheme to the rhyme placeholders\n schema_j = [schema_j]\n schema_j.extend(rhymes)\n rhymes = schema_j\n # Extend poems under four lines with dummy placeholders\n if len(self.verses) < 4:\n for i in range(len(self.verses), 4):\n bins[i] = \"x\"\n # Construct the rhyme scheme by concatenating the rhyme placeholders\n # of the first four lines\n self.rhyme_scheme = bins[0]+bins[1]+bins[2]+bins[3]",
"def rhyme(self, msg, args):\n return self._get_related_words(args[0], 'rhyme')",
"def rhymeSayer(content):\n if not content:\n return ''\n\n content_array = content.rsplit(' ', 1)\n if len(content_array) > 1:\n line = content_array[0]\n else:\n line = ''\n\n word_to_rhyme = re.sub('[^a-zA-Z]+','', content_array[-1])\n rhyme_list = pronouncing.rhymes(word_to_rhyme)\n\n if not rhyme_list:\n return ''\n\n rhyme = random.choice(rhyme_list)\n return line + ' ' + rhyme",
"def guess_rhyme_type(tokenized_poem):\n\n joined_lines = ''.join(rhyme_scheme(tokenized_poem))\n no_blanks = joined_lines.replace(' ', '')\n\n guessed_rhyme = levenshtein(no_blanks, POSSIBLE_RHYMES)\n return joined_lines, guessed_rhyme",
"def _break_rhyme(word: Word, rhymeset: set, words_rhymed: dict) -> bool:\r\n \r\n return ((o.OPTIONS['M'] and o.OPTIONS['RHYMELESS_M'])\r\n or (word in rhymeset\r\n and words_rhymed.get(word, 0) <= o.OPTIONS['N_SELF_RHYME']))",
"def _get_lines(words: list, rhymeset: set, meterset: set) -> list:\r\n \r\n def _can_break() -> bool:\r\n \"\"\"Helper to determine whether a word is okay to break on.\"\"\"\r\n \r\n # Check appropriate conditions depending on how we're breaking\r\n if o.OPTIONS['R'] and o.OPTIONS['M']:\r\n return (_break_rhyme(word, rhymeset, words_rhymed)\r\n and _break_meter(line_meter, foot, feet_per_line, enjambed))\r\n \r\n elif o.OPTIONS['R']:\r\n return _break_rhyme(word, rhymeset, words_rhymed)\r\n \r\n else:\r\n # Never break just for meter before an unpronounceable word.\r\n if (i < len(words) - 1) and (not words[i+1].pron):\r\n return False\r\n return _break_meter(line_meter, foot, feet_per_line, enjambed) \r\n \r\n foot, feet_per_line = meterset\r\n \r\n lines = []\r\n words_rhymed = {}\r\n \r\n # Begin constructing a line\r\n line = Line()\r\n line_meter = ''\r\n enjambed = False\r\n \r\n for i, word in enumerate(words):\r\n \r\n # Accumulate line \r\n line.append(word) \r\n line_meter += get_symbolic_string([word])\r\n \r\n # Break\r\n if _can_break():\r\n \r\n # Get leftover syllables and check enjambment\r\n extra_sylls = _get_extra_sylls(line_meter, foot, feet_per_line) \r\n if _is_illegal_enjambment(foot, extra_sylls):\r\n return []\r\n \r\n else: \r\n # Register the rhyme\r\n words_rhymed[word] = words_rhymed.get(word, 0) + 1\r\n \r\n # Add and reset the line\r\n lines.append(line)\r\n line = Line()\r\n line_meter = extra_sylls\r\n enjambed = bool(extra_sylls)\r\n \r\n # Catch a last unbroken line if allowed\r\n if line and _can_orphan():\r\n lines.append(line)\r\n \r\n return lines",
"def fetch_rhymes(word):\n if word in CACHED_RHYMES:\n return CACHED_RHYMES[word]\n\n params = {\"rel_rhy\": word, \"md\": \"ps\", \"max\": 1000}\n response = requests.get(API_BASE, params=params)\n rhyming_words = response.json()\n\n CACHED_RHYMES[word] = rhyming_words\n\n return rhyming_words",
"def rhymes(word1, word2, level=2):\n\n pronunciations = get_syllables(word1)\n pronunciations2 = get_syllables(word2)\n\n if not (pronunciations and pronunciations2):\n return False\n\n # Work around some limitations of CMU\n equivalents = {\"ER0\": \"R\"} \n def replace_syllables(syllables):\n return [equivalents[syl] if syl in equivalents else syl for syl in syllables]\n\n for syllables in pronunciations:\n syllables = replace_syllables(syllables)\n # If word only has a single vowel (i.e. 'stew'), then we reduce this to 1 otherwise we won't find a monosyllabic rhyme\n if num_vowels(syllables) < level:\n level = num_vowels(syllables)\n vowel_idx = get_nth_last_vowel(syllables, level) # Default number of syllables to check back from\n\n for syllables2 in pronunciations2:\n syllables2 = replace_syllables(syllables2)\n if syllables[vowel_idx:] == syllables2[vowel_idx:]:\n return True\n\n return False",
"def printrhyme(self, word, upto=3):\n y=self.getword(word) if word in self.dictionary else word\n print y+\"\\n------------\\n\"+\"\\n\".join(self.rhyme(word, upto))",
"def get_poems(words: list) -> list:\r\n \r\n poems = set()\r\n \r\n # If we're not breaking on rhyme or meter, we have nothing to break on\r\n if not (o.OPTIONS['R'] or o.OPTIONS['M']):\r\n return poems\r\n \r\n # Otherwise we're in business\r\n else:\r\n # All possible pairs of (rhymeset, meterset)\r\n rhyme_meter_pairs = _get_rhyme_meter_pairs(words)\r\n \r\n # For each pair, add the poem it yields\r\n for pair in rhyme_meter_pairs:\r\n lines =_get_lines(words, *pair)\r\n poem = Poem(lines)\r\n poems |= {poem} if poem else set()\r\n \r\n return poems",
"def doesRhyme(self, word1, word2):\n\t\tif word1 == word2:\n\t\t\treturn 0\n\n\t\tpron1 = []\n\t\tpron2 = []\n\t\tif word1 in self.pronDict:\n\t\t\tpron1 = self.pronDict[word1][0]\n\t\t\tpron1 = [filter(lambda x: re.sub(\"[^a-zA-Z]\", '', x), str(lex)) for lex in pron1]\n\t\telse:\n\t\t\ti = 0\n\t\t\twhile i < len(word1):\n\t\t\t\tif word1[i] in self.vowelSet:\n\t\t\t\t\tpron1.append(self.mappingDictionary[word1[i]])\n\t\t\t\t\twhile i < len(word1) and word1[i] in self.vowelSet:\n\t\t\t\t\t\ti += 1\n\t\t\t\telse:\n\t\t\t\t\tj = i + 1\n\t\t\t\t\twhile j < len(word1) and word1[j] not in self.vowelSet:\n\t\t\t\t\t\tj += 1\n\t\t\t\t\tpron1.append(word1[i:j].upper())\n\t\t\t\t\ti = j\n\n\t\tif word2 in self.pronDict:\n\t\t\tpron2 = self.pronDict[word2][0]\n\t\t\tpron2 = [filter(lambda x: re.sub(\"[^a-zA-Z]\", '', x), str(lex)) for lex in pron2]\n\t\telse:\n\t\t\ti = 0\n\t\t\twhile i < len(word2):\n\t\t\t\tif word2[i] in self.vowelSet:\n\t\t\t\t\tpron2.append(self.mappingDictionary[word2[i]])\n\t\t\t\t\twhile i < len(word2) and word2[i] in self.vowelSet:\n\t\t\t\t\t\ti += 1\n\t\t\t\telse:\n\t\t\t\t\tj = i + 1\n\t\t\t\t\twhile j < len(word2) and word2[j] not in self.vowelSet:\n\t\t\t\t\t\tj += 1\n\t\t\t\t\tpron2.append(word2[i:j].upper())\n\t\t\t\t\ti = j\n\n\t\tnumMatchingVowelSyllables = 0\n\t\tif not pron1 or not pron2:\n\t\t\treturn numMatchingVowelSyllables\n\n\t\treverseIndex = -1\n\t\twhile abs(reverseIndex) <= len(pron1) and abs(reverseIndex) <= len(pron2):\n\t\t\tif pron1[reverseIndex] != pron2[reverseIndex]:\n\t\t\t\tbreak\n\t\t\tnumMatchingVowelSyllables += 1\n\t\t\treverseIndex -= 1\n\n\t\treturn numMatchingVowelSyllables",
"def generate_hairstyle(hairstyle_attributes, is_male):\n\n hair_type = {\"Bald\", \"Straight_Hair\", \"Wavy_Hair\", \"Receding_Hairline\"}\n\n # To create grammatically correct order of description\n arranged_attributes = []\n colours = list(set(hairstyle_attributes) - hair_type)\n if len(colours) > 1:\n # Combines two colours into one attribute\n colour = \"\"\n for i, _colour in enumerate(colours):\n if i == 0:\n _colour = _colour.lower().split(\"_\")[0] + \"ish\"\n _colour = _colour.lower().split(\"_\")[0]\n colour += _colour + \" \"\n arranged_attributes.append(\n colour.strip()\n ) # Strip to remove trailing whitespace\n elif len(colours) == 1:\n colour = colours[0].lower().split(\"_\")[0]\n arranged_attributes.append(colour)\n style = set(hairstyle_attributes) & {\"Straight_Hair\", \"Wavy_Hair\"}\n arranged_attributes.extend(list(style))\n bald_rec = set(hairstyle_attributes) & {\"Receding_Hairline\", \"Bald\"}\n arranged_attributes.extend(list(bald_rec))\n\n if len(arranged_attributes) == 1:\n attribute = arranged_attributes[0].lower().split(\"_\")[0]\n if attribute == \"bald\":\n return \"He is bald.\" if is_male else \"She is bald.\"\n if random.random() <= 0.5:\n sentence = \"His\" if is_male else \"Her\"\n return sentence + \" hair is \" + attribute + \".\"\n else:\n sentence = \"He\" if is_male else \"She\"\n return sentence + \" has \" + attribute + \" hair.\"\n\n # Adding variation in sentence structure\n if random.random() <= 0.5:\n sentence = \"His\" if is_male else \"Her\"\n sentence += \" hair is\"\n for i, attribute in enumerate(arranged_attributes):\n attribute = attribute.lower().split(\"_\")[0]\n if len(arranged_attributes) - 1 == i:\n sentence = sentence[:-1]\n if attribute == \"bald\":\n attribute = \"he\" if is_male else \"she\"\n attribute += (\n \" is \" + random.choice([\"going\", \"partially\"]) + \" bald\"\n )\n return sentence + \" and \" + attribute + \".\"\n return sentence + \" and \" + attribute + \".\"\n sentence += \" \" + attribute + \",\"\n else:\n sentence = \"He\" if is_male else \"She\"\n sentence += \" has\"\n for i, attribute in enumerate(arranged_attributes):\n attribute = attribute.lower().split(\"_\")[0]\n if len(arranged_attributes) - 1 == i:\n sentence = sentence[:-1]\n if attribute == \"bald\":\n sentence += \" hair\"\n attribute = \"he\" if is_male else \"she\"\n attribute += (\n \" is \" + random.choice([\"going\", \"partially\"]) + \" bald\"\n )\n return sentence + \" and \" + attribute + \".\"\n return sentence + \" and \" + attribute + \" hair.\"\n sentence += \" \" + attribute + \",\"",
"def choose_line(text):\n global poetry\n global privacy\n global lines_seen\n global pattern\n line = random.choice(text)\n if text == poetry:\n if pattern is not \"\":\n for l in text:\n if re.search(pattern, l) is not None:\n line = l\n continue\n text.remove(line)\n if len(line) > 0:\n if line.isupper():\n line = line.lower()\n # If line contains multiple sentences, randomly choose one\n # and remove extra spaces and punction.\n line = random.choice(line.split('.'))\n line = re.sub(r\"[^\\w\\'\\-\\s]\", \"\", line).strip()\n # print(line)\n # If the line exceeds --max_words_from_line, randomly choose the\n # specified num of words from the start, middle, or end of the line.\n words_removed = 0\n if text == privacy:\n if args.max_words_per_line:\n if len(line.split()) > args.max_words_per_line:\n words_removed = len(line.split()) - args.max_words_per_line\n start = ' '.join(line.split(' ')[:-words_removed])\n middle = ' '.join(line.split(' ')[(words_removed // 2):-(words_removed // 2)])\n end = ' '.join(line.split(' ')[words_removed:])\n line = random.choice([start, middle, end]).strip()\n pattern = line[-3:]\n # If --unique_lines is set, check if the line was seen in a previous\n # iteration. If not, write the line to new_poem and add it to lines_seen.\n if args.unique_lines:\n if line.lower() in lines_seen:\n return\n lines_seen.add(line.lower())\n if line.isspace() or len(line) < 1:\n print(\"Skip empty line\\n\")\n return\n if not line.isascii():\n print(\"Skip non-ascii line\\n\")\n return\n write_line(line, text, words_removed)",
"def is_limerick(self, text):\r\n lines = text.strip().split(\"\\n\")\r\n\r\n if len(lines) is not 5:\r\n return False\r\n\r\n line_count = 1\r\n punctuations_to_delete = set(string.punctuation)\r\n dict = {}\r\n\r\n for line in lines:\r\n line_strip = line.strip()\r\n words = nltk.word_tokenize(line_strip)\r\n # words = self.apostrophe_tokenize(line_strip)\r\n\r\n words = [x for x in words if x and x not in punctuations_to_delete]\r\n last_word = words[-1].lower()\r\n\r\n if line_count == 1 or line_count == 2 or line_count == 5:\r\n line_group = 'A'\r\n else:\r\n line_group = 'B'\r\n\r\n syllables_in_line = 0;\r\n for word in words:\r\n syllables_in_token = self.num_syllables(word.lower())\r\n syllables_in_line = syllables_in_line + syllables_in_token\r\n\r\n dict[line_count] = [line_group, syllables_in_line, last_word]\r\n line_count = line_count + 1\r\n\r\n\r\n # Limerick Check\r\n groupA_word1 = dict.get(1)[2]\r\n groupA_word2 = dict.get(2)[2]\r\n groupA_word3 = dict.get(5)[2]\r\n groupB_word1 = dict.get(3)[2]\r\n groupB_word2 = dict.get(4)[2]\r\n\r\n syllables_line1 = dict.get(1)[1]\r\n syllables_line2 = dict.get(2)[1]\r\n syllables_line3 = dict.get(3)[1]\r\n syllables_line4 = dict.get(4)[1]\r\n syllables_line5 = dict.get(5)[1]\r\n\r\n # Check rhymming\r\n if self.rhymes(groupA_word1, groupA_word2) and self.rhymes(groupA_word2, groupA_word3) and self.rhymes(groupA_word1, groupA_word3) and self.rhymes(groupB_word1, groupB_word2):\r\n\r\n # Check if no of syllables of all line >=4\r\n if syllables_line1 >= 4 and syllables_line2 >=4 and syllables_line3 >= 4 and syllables_line4 >= 4 and syllables_line5 >= 4:\r\n\r\n # Check if syllables of group B is always less than each of the lines of group A\r\n if syllables_line3 < syllables_line1 and syllables_line3 < syllables_line2 and syllables_line3 < syllables_line5 and syllables_line4 < syllables_line1 and syllables_line4 <syllables_line2 and syllables_line4 < syllables_line5:\r\n\r\n # Check if group A syllables differ by less than 2\r\n if syllables_line1 - syllables_line2 <= 2 and syllables_line2 - syllables_line5 <= 2 and syllables_line1 - syllables_line5 <= 2:\r\n\r\n # Check if group B syllables differ by less than 2\r\n if syllables_line3 - syllables_line4 <= 2:\r\n return True\r\n return False",
"def test_poem_of_mutiple_lines(self):\n poem_lines = ['The first line leads off,',\n 'With a gap before the next.',\n 'Then the poem ends.']\n pattern = ([5, 5, 4], ['*','*','*'])\n expected_list = ['With a gap before the next.', 'Then the poem ends.']\n \n self.assertEqual(check_syllables(poem_lines,pattern,word_to_phonemes),\n expected_list, 'Poem_lines consists of mutiple lines')\n \n self.assertFalse(check_syllables(poem_lines,pattern,word_to_phonemes) \\\n == expected_list[::-1],'Order of returned lines')",
"def get_rhyme_graph(grid, words, thr=0.8):\n word_keys = [w + '_{0}'.format(i) for i, w in enumerate(words)]\n binary_grid = binarize_grid(grid, thr=thr)\n G = networkx.Graph()\n G.add_nodes_from(word_keys)\n for i, x in enumerate(word_keys):\n for j, y in enumerate(word_keys):\n if binary_grid[i, j] > 0:\n G.add_edge(x, y)\n return G",
"def jokes_helper():\n\n resp = open('response_phrases/jokes.txt')\n line = next(resp)\n for num, aline in enumerate(resp):\n if random.randrange(num + 2):\n continue\n if aline != '':\n line = aline\n else:\n line = 'I intend to live forever. So far, so good.'\n return line",
"def test_poem_of_one_line(self):\n \n poem_lines = ['The first line leads off,']\n pattern = ([5], ['*'])\n self.assertEqual(check_syllables(poem_lines,pattern,word_to_phonemes),\n [], 'Poem_lines consists of one line')",
"def boyer_moore(p, p_bm, t):\n i = 0 # track where we are in the text\n occurrences = [] # the index that p match t\n while i < len(t) - len(p) + 1:\n # loop though all the positions in t where p should start\n shift = 1\n mismatched = False\n for j in range(len(p) - 1, -1, -1):\n # the 3rd word '-1' means we're going backwards\n if not p[j] == t[i + j]: # when we have a mismatch\n # calculate the bad character rule and good suffix rule to see\n # how many bases we can skip\n skip_bc = p_bm.bad_character_rule(j, t[i + j])\n skip_gs = p_bm.good_suffix_rule(j)\n # calculate the max shift bases.\n shift = max(shift, skip_bc, skip_gs)\n mismatched = True\n break\n if not mismatched: # if there is no mismatch.\n occurrences.append(i)\n # if there is no mismatch we don't need to use the\n # bad_character_rule\n skip_gs = p_bm.match_skip()\n shift = max(shift, skip_gs)\n i += shift # add the value of shift to i\n\n return occurrences"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a commadelimited string of stanza lengths
|
def stanza_lengths(tokenized_poem):
stanzas = []
i = 0
for line in tokenized_poem:
if line != ['']:
i += 1
else:
stanzas.append(str(i))
i = 0
if i != 0:
stanzas.append(str(i))
joined = ','.join(stanzas)
return joined
|
[
"def format_length(length, size):\n\t\tformatted = hex(int(length)).split('0x')[1]\n\t\twhile len(formatted) < size:\n\t\t\tformatted = '0' + formatted\n\t\treturn formatted",
"def item_length(self):\n return 4 + self.ts_sub_item.total_length()",
"def order_by_length(*items):\n sorted_items = sorted(items, key=lambda item: (len(str(item)), str(item)))\n return ' '.join(sorted_items)",
"def __len__(self):\n return(len(self.nucleotides))",
"def getLengths(self):\n return [self.length]",
"def longString(self):\n l = ''\n for item in self.header:\n l = l + item + '\\n'\n for item in self.amp:\n l = l + '%f\\n' % (item*SweepData.gain_value[self.gain])\n return l",
"def serialize_as_length_prefixed_string(value, output_buffer):\n output_buffer.write('`s')\n if type(value) is unicode:\n to_serialize = value.encode('utf-8')\n else:\n to_serialize = value\n output_buffer.write(struct.pack('>i', len(to_serialize)))\n output_buffer.write(to_serialize)",
"def max_message_size(self):\n max_message_size = self.member_type.max_message_size()\n if isinstance(self.length, str):\n # A list where all the elements are self.length and there will be max_message_size\n # occurences\n return [self.length] * max_message_size\n return max_message_size * self.length",
"def getLength(self) -> \"int\":\n return _coin.SoAuditorList_getLength(self)",
"def length(quat):\n return vector4.length(quat)",
"def get_display_lengths(self):\n self.d_lengths = [50, 20, 20, 16, 16]\n if self.paths:\n self.d_lengths[0] = max([len(i) for i in self.paths.keys()])\n self.d_lengths.append(sum(self.d_lengths) + 2)",
"def min_message_size(self):\n min_message_size = self.member_type.min_message_size()\n if isinstance(self.length, str):\n # A a list where all the elements are self.length and there will be min_message_size\n # occurences\n return [self.length] * min_message_size\n return min_message_size * self.length",
"def lengths(self):\n return self._lengths.__copy__()",
"def pdu_length(self):\n return sum((i.total_length() for i in self.data_value_items))",
"def _str_len(length):\n if (length / pi).is_integer():\n str_len = repr(int(length / pi)) + \"pi\"\n else:\n str_len = f\"{length:.3f}\".rstrip(\"0\")\n\n return str_len",
"def track_length_string(length):\n us = length % 1000\n ms = int((length / 1000) % 1000)\n s = int(length / 1000000)\n minutes = int(s / 60)\n s = s - minutes * 60\n if us != 0:\n return \"%d:%02d.%03d%03d\" % (minutes, s, ms, us)\n elif ms != 0:\n return \"%d:%02d.%03d\" % (minutes, s, ms)\n else:\n return \"%d:%02d\" % (minutes, s)",
"def generate_random_lengths(self):\n random_lengths = []\n amount = self.spec.num_segments\n while amount > 0:\n random_lengths.append((random.uniform(self.spec.min_lengths[0], self.spec.max_lengths[1])))\n amount -= 1\n return tuple(random_lengths)",
"def len_pack(length):\n return struct.pack(\"!I\", length)",
"def get_length_bucket( msg_length ):\n if msg_length < 20:\n return \"short\"\n elif msg_length < 80:\n return \"medium\"\n else:\n return \"long\""
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Compare a string's Levenshtein distance to each candidate in a dictionary. Returns the name of the closest match
|
def levenshtein(string, candidates):
distances = defaultdict(int)
num_lines = len(string)
for k, v in candidates.items():
expanded = False
# Expands the length of each candidate to match the length of the compared string
if len(v) != len(string):
v = (v * (num_lines // len(v) + 1))[:num_lines]
expanded = True
edit_distance = distance(string, v)
# If we expanded the candidate, then it is a worse match than what we have already
if edit_distance in distances and expanded:
continue
distances[distance(string, v)] = k
return distances[min(distances)]
|
[
"def levenshtein_distance_using_lexical_tree(lexical_tree, input_string, strategy=0, case_sensitive=0):",
"def levenshtein_distance(str_1, str_2):\n return textdistance.levenshtein.normalized_similarity(str_1, str_2)",
"def lev_distance(self,b):\n str1 = self.name\n str2 = b.name\n d=dict()\n for i in range(len(str1)+1):\n d[i]=dict()\n d[i][0]=i\n for i in range(len(str2)+1):\n d[0][i] = i\n for i in range(1, len(str1)+1):\n for j in range(1, len(str2)+1):\n d[i][j] = min(d[i][j-1]+1, d[i-1][j]+1, d[i-1][j-1]+(not str1[i-1] == str2[j-1]))\n return d[len(str1)][len(str2)]",
"def closest_word_match(word):\n distances = {}\n for other_word in ALL_WORDS:\n dist = Levenshtein.distance(word, other_word)\n if dist not in distances:\n distances[dist] = []\n distances[dist].append(other_word)\n i = 1\n while i not in distances or distances[i] == []:\n i += 1\n if len(distances[i]) == 1:\n log(f\"Repaired '{word}' -> '{distances[i][0]}'.\", LOG_DEBUG, \"Who's on First?\")\n return distances[i][0]\n return None",
"def fuzzy_string_search(st, li):\n best_match = 0\n best_rat = 0\n for (i, s) in enumerate(li):\n rat = fuzz.partial_ratio(s, st)\n if rat > best_rat:\n best_match = i\n best_rat = rat\n\n return best_match, best_rat",
"def find_best_match(TAG_seq, tags, maximum_distance):\n best_match = \"unmapped\"\n best_score = maximum_distance\n for tag, name in tags.items():\n score = Levenshtein.hamming(tag, TAG_seq[: len(tag)])\n if score == 0:\n # Best possible match\n return name\n elif score <= best_score:\n best_score = score\n best_match = name\n return best_match\n return best_match",
"def _match_term_to_results_with_levenshtein(self, current_search_term, ocr_results):\n possible_matches = []\n for result in ocr_results:\n ocr_result_word = result[0]\n distance = editdistance.eval(current_search_term, ocr_result_word)\n similarity = 1 - distance / max(len(ocr_result_word), len(current_search_term)) \n if similarity > self.minimum_word_similarity:\n possible_matches.append(result)\n\n return possible_matches",
"def test_levenshteinDistance_bat_cat(self):\n distance = util.levenshteinDistance('bat', 'cat')\n self.assertEqual(distance, 1)",
"def find_nearest(name, names):\n if name in names:\n return names\n\n def normalize_name(name):\n return re_vowels.sub(\"\", name)\n\n # try with just consonents to handle vowel variations\n d = dict((normalize_name(n), n) for n in names)\n if normalize_name(name) in d:\n return d[normalize_name(name)]\n\n # sort all consonants \n def normalize_name(name):\n return \"\".join(sorted(set(re_vowels.sub(\"\", name))))\n d = dict((normalize_name(n), n) for n in names)\n if normalize_name(name) in d:\n return d[normalize_name(name)]\n\n raise Exception(\"Unable to find a nearest match for {0!r}\".format(name))",
"def rel_levenshtein(s1, s2):\n maxlen = max(len(s1), len(s2))\n if maxlen > 0:\n return levenshtein(s1, s2) / float(maxlen)\n else:\n return 0",
"def get_distances(self, word):\n for name in self.app_names:\n self.distances[name] = self.levenshtein(word, name)",
"def test_levenshteinDistance_cat_cat(self):\n distance = util.levenshteinDistance('cat', 'cat')\n self.assertEqual(distance, 0)",
"def test_levenshteinDistance_bar_cat(self):\n distance = util.levenshteinDistance('bar', 'cat')\n self.assertEqual(distance, 2)",
"def get_optimal_term(matches, key_index, sentences, trin):\n\n\t#Only keep values in the nearest sentence(s)\n\tclosest_val = None\n\tbest_term = None\n\tif matches:\n\t\tfor tpl in matches:\n\t\t\ttpl[1] = abs(key_index - tpl[1])\n\t\tmatches.sort(key=lambda tpl:tpl[1])\n\t\tclosest_val = matches[0][1]\n\t\tresult = []\n\t\tfor tpl in matches:\n\t\t\tif tpl[1] == closest_val:\n\t\t\t\tresult.append(tpl)\n\t\tmatches = result\n\t\tbest_term = matches[0]\n\t\t\n\t#Within the nearest sentences, figure out which one is closest\n\tif len(matches) > 1:\n\t\tagg_sentence = \"\"\n\t\t#Make all sentences one String for convenience\n\t\tfor si in range(key_index - closest_val, key_index + closest_val + 1):\n\t\t\tif si >= 0 and si < len(sentences):\n\t\t\t\tagg_sentence += sentences[si]\n\t\t#Everything is on the sentence at key_index\n\t\tif agg_sentence == \"\":\n\t\t\tagg_sentence = sentences[key_index]\n\t\tagg_sentence = re.sub(r\"\\s+\", ' ', agg_sentence)\n\n\t\t#Run distances through in order of longest tpl[0] to shortest\n\t\t#This is to prevent terms that are substrings of other terms\n\t\t#causing problems\n\t\tmatches.sort(key=lambda tpl:len(tpl[0]))\n\t\tmatches = list(reversed(matches))\n\n\t\tmin_distance = len(agg_sentence) + 1\n\t\tbest_term = None\n\t\tdist = 1000\n\t\tfor m in matches[:]:\n\t\t\tif not m[0]:\n\t\t\t\tmatches.remove(m)\n\t\tfor tpl in matches:\n\t\t\twhile tpl[0].casefold() in agg_sentence:\n\t\t\t\tdist = distance(agg_sentence.casefold(), tpl[0].casefold(), trin.casefold())\n\t\t\t\tagg_sentence = agg_sentence.replace(tpl[0].casefold(), '', 1)\n\t\t\tif dist <= min_distance:\n\t\t\t\tmin_distance = dist\n\t\t\t\tbest_term = tpl\n\n\tif best_term != None:\n\t\treturn best_term[0]\n\treturn None",
"def edit_levenshtein(c1, c2):\n return 0 if c1 == c2 else -1",
"def similarity(stringa,stringb):\n\n\treturn levenshtein_similarity(stringa,stringb)",
"def test_levenshteinDistance_bridgedb_doge(self):\n distance = util.levenshteinDistance('bridgedb', 'doge')\n self.assertEqual(distance, 6)",
"def levenshtein_distance(diffs):\n levenshtein = 0\n insertions = 0\n deletions = 0\n for (op, data) in diffs:\n if op == DIFF_INSERT:\n insertions += len(data)\n elif op == DIFF_DELETE:\n deletions += len(data)\n elif op == DIFF_EQUAL:\n # A deletion and an insertion is one substitution.\n levenshtein += max(insertions, deletions)\n insertions = 0\n deletions = 0\n levenshtein += max(insertions, deletions)\n return levenshtein",
"def levenshtein_distance(word, node_val):\n\n memo = {}\n\n # time for a closure!\n def memo_levenshtein_distance(word, i, node_val, j):\n if (word, i, node_val, j) in memo:\n return memo[(word, i, node_val, j)]\n\n if len(word) - i == 0:\n return len(node_val) - j\n if len(node_val) - j == 0:\n return len(word) - i\n if word[i] != node_val[j]:\n cost = 1\n else:\n cost = 0\n\n distance = min(memo_levenshtein_distance(word, i+1, node_val, j) + 1,\n memo_levenshtein_distance(word, i, node_val, j+1) + 1,\n memo_levenshtein_distance(word, i+1, node_val, j+1) + cost)\n\n memo[(word, i, node_val, j)] = distance\n return distance\n\n return memo_levenshtein_distance(word, 0, node_val, 0)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.